repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Rhombik/rhombik-object-repository
|
project/views.py
|
Python
|
agpl-3.0
| 13,528
| 0.016263
|
from os import path
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, render, redirect
from django.http import HttpResponseRedirect, HttpResponse
from django.template import RequestContext
from django.views.decorators.csrf import ensure_csrf_cookie
import thumbnailer.thumbnailer as thumbnailer
from filemanager.models import fileobject, thumbobject, htmlobject, zippedobject
from project.models import Project
from project.forms import ProjectForm
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.http import Http404
from gitHooks.models import githubAccount
"""~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"""
##obviously ignoring csrf is a bad thing. Get this fixedo.
"""~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"""
from django.views.decorators.csrf import csrf_exempt, csrf_protect,requires_csrf_token
from django.core.context_processors import csrf
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.core.urlresolvers import reverse
def searchtest(*args, **kwargs):
project = Project.objects.filter(pk=1).get()
return render_to_response('search/indexes/project/project_text.txt', dict(object=project))
from django.shortcuts import redirect
def api(request):
projects=[]
##Get all the project drafts if "selectAll" is selected
if request.POST.get('all'):
projects=Project.objects.filter(author=request.user,draft=True)
else:
##Get selected projects.
projects = []
for i in request.POST.getlist('p'):
##This query is all we need for authentication! But I am sleep deprived.
projects.append(Project.objects.filter(pk=i,author=request.user)[0])
response_data=""
##These should ideally just pass it on to one or more specific functions.
#It would be nice if they returned JSON data, and put it into response_data. So we can at least pretend this is a real api.
if request.POST['action'] == "Publish":
for i in projects:
i.draft=False
i.save()
if request.POST['action'] == "Delete":
for i in projects:
i.delete()
if "application/json" in request.META['HTTP_ACCEPT_ENCODING']:
mimetype = 'application/json'
return HttpResponse(response_data, mimetype=mimetype)
return redirect(request.POST['redirect'])
def delete(RequestContext, pk):
request=RequestContext
project = Project.objects.get(pk=pk)
response_data = False
if str(project.author) == str(request.user):
project.delete()
response_data = True
if "application/json" in request.META['HTTP_ACCEPT_ENCODING']:
mimetype = 'application/json'
return HttpResponse(response_data, mimetype=mimetype)
return redirect("/mydrafts/")
## this function is for the mydrafts page to publish things. It relies on the project.valid bool, which should be set within editOrCreateStuff.
def publish(request, pk):
project = Project.objects.get(pk=pk)
response_data = False
if str(project.author) == str(request.user) and project.valid == True:
project.draft = False
project.save()
response_data = True
if "application/json" in request.META['HTTP_ACCEPT_ENCODING']:
mimetype = 'application/json'
return HttpResponse(response_data, mimetype=mimetype)
return redirect("/mydrafts/")
"""______________________________"""
## project_list_get takes a list of projects and returns a list of lists containing:
## -a thumbnail object for the project.
## -the project.
"""~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"""
def project_list_get(projects, purge=True):
listdata = []
for project in projects:
if project and (project.enf_consistancy() == True or purge == False):
object_type = ContentType.objects.get_for_model(project)
try:
project.thumbnail
thumbnail = project.thumbnail.get_thumb(300,200)
except:
thumbnail = [""]
listdata += [[project, thumbnail[0]]]
return listdata
def project(request, pk):
## Try to get the project from the database. If this fails than return 404.
try:
project = Project.objects.exclude(draft=True).get(pk=pk)
except:
raise Http404
object_type = ContentType.objects.get_for_model(project)
projectfiles = fileobject.objects.filter(content_type=object_type,object_id=project.id)
if project.enf_consistancy == False:
raise Http404
else:
mainthumb = project.thumbnail.get_thumb(650,500)
images=[]# Images in the project; will be handed to template
# Get readme as first item in the list of texts to hand to the template.
try:
#Thus is in a try statement becouse the file backend might name the readme "ReadMe_1.md" or similar. Need to switch it out for "bodyfile" forighnkey at some point.
readme = project.bodyFile
htmlreadme=htmlobject.objects.get_or_create(fileobject = readme )[0]
texts = [[htmlreadme, path.split(str(readme.filename))[1]]]
except:
texts = []
pass
# norenders. this is the number of files in the project not rendered. We currently do nothing.. unless someone changed that and not this note.
norenders =0
for i in projectfiles:
thumb=i.get_thumb(65,50)
renderer=i.filetype
if renderer != "norender" and renderer != "text":
images.append(thumb)
if renderer == "norender":
norenders +=1
if renderer == "text" and i != project.bodyFile:
htmlmodel=htmlobject.objects.get_or_create(fileobject = i)[0]
texts.append([htmlmodel, path.split(str(i.filename))[1]])
download=zippedobject.objects.filter(project=project)
if download:
download=download[0]
else:
download=None
author = project.author
from userProfile.models import userProfile
authorprofile = userProfile.objects.filter(user=author)[0]
try:
authorpic=authorprofile.userpic.get_thumb(128,128)[0]
except:
authorpic=False
## get the root comment of the project and use it to get all the projects comments.
from threadedComments.models import CommentRoot
object_type = ContentType.objects.get(model="project")
commentRoot = CommentRoot.objects.get_or_create(commenter=project.author, content_type=object_type, object_id=project.pk)[0]
nodes = commentRoot.get_descendants(include_self=False)
if download.filename:
downloadurl=download.filename.url
else:
downloadurl=None
c = RequestContext(request, dict(project=project,
user=request.user,
author=author,
authorprofile=authorprofile,
authorpic=authorpic,
commentRootId=commentRoot.id,
nodes=nodes,
moreobjects=norenders,
images=images,
texts
|
=texts,
galleryname="base",
mainthumb=[mainthumb],
downloadurl=downloadurl))
return render_to_response('article.html', RequestContext(request, c))
def front(request):
return render_to_response('list.html', dict(project=project, user=request.user,))
'''
- Needs to generate a list of the most popular printables of the day and/or week and/or month. The overhead of this is beyond me, but I ima
|
gine some sort of algorithm to factor in upvotes/downloads/comments and staff interest is needed to decide what is "popular".
'''
def list(request):
"""Main listing."""
### get all the projects! ###
newprojects = Project.objects.exclude(draft=True).order_by("-created")
paginator = Paginator(newprojects, 8)
try: page = int(request.GET.get("page", '1'))
except ValueError: page = 1
try:
#only get thumbnails for projects on the current page.
listdata = project_list_get(paginator.page(page))
except (InvalidPage, EmptyPage):
listdata = paginator.page(paginator.num_pages)
|
Tjorriemorrie/housing
|
src/settings.py
|
Python
|
mit
| 496
| 0.002016
|
from os.path import dirname, realpath
from jinja2 import Environment, FileSystemLoader
from google.appengine.ext import ndb
DEBUG = True
SECRET_KEY = 'asdfjasdflkjsfewi23kjl3kjl45kjl56jk6hjb76vsjsa'
CONFIG = {
}
SRC_ROOT = dirname(realpath(__file__))
JINJA_ENVIRONMENT = Environment(
loader=FileSy
|
stemLoader(SRC_ROOT),
extensions=['jinja2.ext
|
.autoescape'],
autoescape=True,
)
REGIONS = ['NSW', 'VIC', 'QLD', 'WA', 'SA', 'TAS', 'ACT', 'NT']
PARENT_KEY = ndb.Key('daddy', 'oz')
|
Danielhiversen/home-assistant
|
tests/components/subaru/test_config_flow.py
|
Python
|
apache-2.0
| 8,146
| 0.000123
|
"""Tests for the Subaru component config flow."""
# pylint: disable=redefined-outer-name
from copy import deepcopy
from unittest import mock
from unittest.mock import patch
import pytest
from subarulink.exceptions import InvalidCredentials, InvalidPIN, SubaruException
from homeassistant import config_entries
from homeassistant.components.subaru import config_flow
from homeassistant.components.subaru.const import CONF_UPDATE_ENABLED, DOMAIN
from homeassistant.const import CONF_DEVICE_ID, CONF_PIN
from homeassistant.setup import async_setup_component
from .conftest import (
MOCK_API_CONNECT,
MOCK_API_IS_PIN_REQUIRED,
MOCK_API_TEST_PIN,
MOCK_API_UPDATE_SAVED_PIN,
TEST_CONFIG,
TEST_CREDS,
TEST_DEVICE_ID,
TEST_PIN,
TEST_USERNAME,
)
from tests.common import MockConfigEntry
ASYNC_SETUP_ENTRY = "homeassistant.components.subaru.async_setup_entry"
async def test_user_form_init(user_form):
"""Test the initial user form for first step of the config flow."""
assert user_form["description_placeholders"] is None
assert user_form["errors"] is None
assert user_form["handler"] == DOMAIN
assert user_form["step_id"] == "user"
assert user_form["type"] == "form"
async def test_user_form_repeat_identifier(hass, user_form):
"""Test we handle repeat identifiers."""
entry = MockConfigEntry(
domain=DOMAIN, title=TEST_USERNAME, data=TEST_CREDS, options=None
)
entry.add_to_hass(hass)
with patch(
MOCK_API_CONNECT,
return_value=True,
) as mock_connect:
result = await hass.config_entries.flow.async_configure(
user_form["flow_id"],
TEST_CREDS,
)
assert len(mock_connect.mock_calls) == 0
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_user_form_cannot_connect(hass, user_form):
"""Test we handle cannot connect error."""
with patch(
MOCK_API_CONNECT,
side_effect=SubaruException(None),
) as mock_connect:
result = await hass.config_entries.flow.async_configure(
user_form["flow_id"],
TEST_CREDS,
)
assert len(mock_connect.mock_calls) == 1
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_user_form_invalid_auth(hass, user_form):
"""Test we handle invalid auth."""
with patch(
MOCK_API_CONNECT,
side_effect=InvalidCredentials("invalidAccount"),
) as mock_connect:
result = await hass.config_entries.flow.async_configure(
user_form["flow_id"],
TEST_CREDS,
)
assert len(mock_connect.mock_calls) == 1
assert result["type"] == "form"
assert result["errors"] == {"base": "invalid_auth"}
async def test_user_form_pin_not_required(hass, user_form):
"""Test successful login when no PIN is required."""
with patch(MOCK_API_CONNECT, return_value=True,) as mock_connect, patch(
MOCK_API_IS_PIN_REQUIRED,
return_value=False,
) as mock_is_pin_required, patch(
ASYNC_SETUP_ENTRY, return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
user_form["flow_id"],
TEST_CREDS,
)
assert len(mock_connect.mock_calls) == 1
assert len(mock_is_pin_required.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
expected = {
"title": TEST_USERNAME,
"description": None,
"description_placeholders": None,
"flow_id": mock.ANY,
"result": mock.ANY,
"handler": DOMAIN,
"type": "create_entry",
"version": 1,
"data": deepcopy(TEST_CONFIG),
"options": {},
}
expected["data"][CONF_PIN] = None
result["data"][CONF_DEVICE_ID] = TEST_DEVICE_ID
assert result == expected
async def test_pin_form_init(pin_form):
"""Test the pin entry form for second step of the config flow."""
expected = {
"data_schema": config_flow.PIN_SCHEMA,
"description_placeholders": None,
"errors": None,
"flow_id": mock.ANY,
"handler": DOMAIN,
"step_id": "pin",
"type": "form",
"last_step": None,
}
assert pin_form == expected
async def test_pin_form_bad_pin_format(hass, pin_form):
"""Test we handle invalid pin."""
with patch(MOCK_API_TEST_PIN,) as mock_test_pin, patch(
MOCK_API_UPDATE_SAVED_PIN,
return_value=True,
) as mock_update_saved_pin:
result = await hass.config_entries.flow.async_configure(
pin_form["flow_id"], user_input={CONF_PIN: "abcd"}
)
assert len(mock_test_pin.mock_calls) == 0
assert len(mock_update_saved_pin.mock_calls) == 1
assert result["type"] == "form"
assert result["errors"] == {"base": "bad_pin_format"}
async def test_pin_form_success(hass, pin_form):
"""Test successful PIN entry."""
with patch(MOCK_API_TEST_PIN, return_value=True,) as mock_test_pin, patch(
MOCK_API_UPDATE_SAVED_PIN,
return_value=True,
) as mock_update_saved_pin, patch(
ASYNC_SETUP_ENTRY, return_value=True
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
pin_form["flow_id"], user_input={CONF_PIN: TEST_PIN}
)
assert len(mock_test_pin.mock_calls) == 1
assert len(mock_update_saved_pin.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
expected = {
"title": TEST_USERNAME,
"description": None,
"description_placeholders": None,
"flow_id": mock.ANY,
"result": mock.ANY,
"handler": DOMAIN,
"type": "create_entry",
"version": 1,
"data": TEST_CONFIG,
"options": {},
}
result["data"][CONF_DEVI
|
CE_ID] = TEST_DEVICE_ID
assert result == expected
async def test_pin_form_incorrect_pin(hass, pin_form):
"""Test we handle invalid pin."
|
""
with patch(
MOCK_API_TEST_PIN,
side_effect=InvalidPIN("invalidPin"),
) as mock_test_pin, patch(
MOCK_API_UPDATE_SAVED_PIN,
return_value=True,
) as mock_update_saved_pin:
result = await hass.config_entries.flow.async_configure(
pin_form["flow_id"], user_input={CONF_PIN: TEST_PIN}
)
assert len(mock_test_pin.mock_calls) == 1
assert len(mock_update_saved_pin.mock_calls) == 1
assert result["type"] == "form"
assert result["errors"] == {"base": "incorrect_pin"}
async def test_option_flow_form(options_form):
"""Test config flow options form."""
assert options_form["description_placeholders"] is None
assert options_form["errors"] is None
assert options_form["step_id"] == "init"
assert options_form["type"] == "form"
async def test_option_flow(hass, options_form):
"""Test config flow options."""
result = await hass.config_entries.options.async_configure(
options_form["flow_id"],
user_input={
CONF_UPDATE_ENABLED: False,
},
)
assert result["type"] == "create_entry"
assert result["data"] == {
CONF_UPDATE_ENABLED: False,
}
@pytest.fixture
async def user_form(hass):
"""Return initial form for Subaru config flow."""
return await hass.config_entries.flow.async_init(
config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER}
)
@pytest.fixture
async def pin_form(hass, user_form):
"""Return second form (PIN input) for Subaru config flow."""
with patch(MOCK_API_CONNECT, return_value=True,), patch(
MOCK_API_IS_PIN_REQUIRED,
return_value=True,
):
return await hass.config_entries.flow.async_configure(
user_form["flow_id"], user_input=TEST_CREDS
)
@pytest.fixture
async def options_form(hass):
"""Return options form for Subaru config flow."""
entry = MockConfigEntry(domain=DOMAIN, data={}, options=None)
entry.add_to_hass(hass)
await async_setup_component(hass, DOMAIN, {})
return await hass.config_entries.options.async_init(entry.entry_id)
|
Vvucinic/Wander
|
venv_2_7/lib/python2.7/site-packages/Django-1.9-py2.7.egg/django/contrib/gis/maps/google/overlays.py
|
Python
|
artistic-2.0
| 11,955
| 0.000836
|
from __future__ import unicode_literals
from functools import total_ordering
from django.contrib.gis.geos import (
LinearRing, LineString, Point, Polygon, fromstr,
)
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import html_safe
@html_safe
@python_2_unicode_compatible
class GEvent(object):
"""
A Python wrapper for the Google GEvent object.
Events can be attached to any object derived from GOverlayBase with the
add_event() call.
For more information please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#event
Example:
from django.shortcuts import render_to_response
from django.contrib.gis.maps.google import GoogleMap, GEvent, GPolyline
def sample_request(request):
polyline = GPolyline('LINESTRING(101 26, 112 26, 102 31)')
event = GEvent('click',
'function() { location.href = "http://www.google.com"}')
polyline.add_event(event)
return render_to_response('mytemplate.html',
{'google' : GoogleMap(polylines=[polyline])})
"""
def __init__(self, event, action):
"""
Initializes a GEvent object.
Parameters:
event:
string for the event, such as 'click'. The event must be a valid
event for the object in the Google Maps API.
There is no validation of the event type within Django.
action:
string containing a Javascript function, such as
'function() { location.href = "newurl";}'
The string must be a valid Javascript function. Again there is no
validation fo the function within Django.
"""
self.event = event
self.action = action
def __str__(self):
"Returns the parameter part of a GEvent."
return '"%s", %s' % (self.event, self.action)
@html_safe
@python_2_unicode_compatible
class GOverlayBase(object):
def __init__(self):
self.events = []
def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join('new GLatLng(%s,%s)' % (y, x) for x, y in coords)
def add_event(self, event):
"Attaches a GEvent to the overlay object."
self.events.append(event)
def __str__(self):
"The string representation is the JavaScript API call."
return '%s(%s)' % (self.__class__.__name__, self.js_params)
class GPolygon(GOverlayBase):
"""
A Python wrapper for the Google GPolygon object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Polygon
"""
def __init__(self, poly,
stroke_color='#0000ff', stroke_weight=2, stroke_opacity=1,
fill_color='#0000ff', fill_opacity=0.4):
"""
The GPolygon object initializes on a GEOS Polygon or a parameter that
may be instantiated into GEOS Polygon. Please note t
|
hat this will not
|
depict a Polygon's internal rings.
Keyword Options:
stroke_color:
The color of the polygon outline. Defaults to '#0000ff' (blue).
stroke_weight:
The width of the polygon outline, in pixels. Defaults to 2.
stroke_opacity:
The opacity of the polygon outline, between 0 and 1. Defaults to 1.
fill_color:
The color of the polygon fill. Defaults to '#0000ff' (blue).
fill_opacity:
The opacity of the polygon fill. Defaults to 0.4.
"""
if isinstance(poly, six.string_types):
poly = fromstr(poly)
if isinstance(poly, (tuple, list)):
poly = Polygon(poly)
if not isinstance(poly, Polygon):
raise TypeError('GPolygon may only initialize on GEOS Polygons.')
# Getting the envelope of the input polygon (used for automatically
# determining the zoom level).
self.envelope = poly.envelope
# Translating the coordinates into a JavaScript array of
# Google `GLatLng` objects.
self.points = self.latlng_from_coords(poly.shell.coords)
# Stroke settings.
self.stroke_color, self.stroke_opacity, self.stroke_weight = stroke_color, stroke_opacity, stroke_weight
# Fill settings.
self.fill_color, self.fill_opacity = fill_color, fill_opacity
super(GPolygon, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s, "%s", %s' % (self.points, self.stroke_color, self.stroke_weight, self.stroke_opacity,
self.fill_color, self.fill_opacity)
class GPolyline(GOverlayBase):
"""
A Python wrapper for the Google GPolyline object. For more information
please see the Google Maps API Reference:
https://developers.google.com/maps/documentation/javascript/reference#Polyline
"""
def __init__(self, geom, color='#0000ff', weight=2, opacity=1):
"""
The GPolyline object may be initialized on GEOS LineStirng, LinearRing,
and Polygon objects (internal rings not supported) or a parameter that
may instantiated into one of the above geometries.
Keyword Options:
color:
The color to use for the polyline. Defaults to '#0000ff' (blue).
weight:
The width of the polyline, in pixels. Defaults to 2.
opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1.
"""
# If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types):
geom = fromstr(geom)
if isinstance(geom, (tuple, list)):
geom = Polygon(geom)
# Generating the lat/lng coordinate pairs.
if isinstance(geom, (LineString, LinearRing)):
self.latlngs = self.latlng_from_coords(geom.coords)
elif isinstance(geom, Polygon):
self.latlngs = self.latlng_from_coords(geom.shell.coords)
else:
raise TypeError('GPolyline may only initialize on GEOS LineString, LinearRing, and/or Polygon geometries.')
# Getting the envelope for automatic zoom determination.
self.envelope = geom.envelope
self.color, self.weight, self.opacity = color, weight, opacity
super(GPolyline, self).__init__()
@property
def js_params(self):
return '%s, "%s", %s, %s' % (self.latlngs, self.color, self.weight, self.opacity)
@total_ordering
class GIcon(object):
"""
Creates a GIcon object to pass into a Gmarker object.
The keyword arguments map to instance attributes of the same name. These,
in turn, correspond to a subset of the attributes of the official GIcon
javascript object:
https://developers.google.com/maps/documentation/javascript/reference#Icon
Because a Google map often uses several different icons, a name field has
been added to the required arguments.
Required Arguments:
varname:
A string which will become the basis for the js variable name of
the marker, for this reason, your code should assign a unique
name for each GIcon you instantiate, otherwise there will be
name space collisions in your javascript.
Keyword Options:
image:
The url of the image to be used as the icon on the map defaults
to 'G_DEFAULT_ICON'
iconsize:
a tuple representing the pixel size of the foreground (not the
shadow) image of the icon, in the format: (width, height) ex.:
GIcon('fast_food',
image="/media/icon/star.png",
iconsize=(15,10))
Would indicate your custom icon was 15px wide and 10px height.
shadow:
the url of the image of the icon's shadow
shadowsize:
a tuple representing the pixel size of the shadow image, format is
|
rspavel/spack
|
var/spack/repos/builtin/packages/xsdk/package.py
|
Python
|
lgpl-2.1
| 9,658
| 0.003417
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import sys
class Xsdk(BundlePackage):
"""Xsdk is a suite of Department of Energy (DOE) packages for numerical
simulation. This is a Spack bundle package that installs the xSDK
packages
"""
homepage = "http://xsdk.info"
maintainers = ['balay', 'luszczek']
version('develop')
version('0.5.0')
version('0.4.0')
version('0.3.0')
version('xsdk-0.2.0')
variant('debug', default=False, description='Compile in debug mode')
variant('cuda', default=False, description='Enable CUDA dependent packages')
variant('trilinos', default=True, description='Enable trilinos package build')
variant('omega-h', default=True, description='Enable omega-h package build')
variant('strumpack', default=True, description='Enable strumpack package build')
variant('dealii', default=True, description='Enable dealii package build')
variant('phist', default=True, description='Enable phist package build')
variant('ginkgo', default=True, description='Enable ginkgo package build')
variant('libensemble', default=True, description='Enable py-libensemble package build')
variant('precice', default=(sys.platform != 'darwin'),
description='Enable precice package build')
variant('butterflypack', default=True, description='Enable butterflypack package build')
depends_on('hypre@develop+superlu-dist+shared', when='@develop')
depends_on('hypre@2.18.2+superlu-dist+shared', when='@0.5.0')
depends_on('hypre@2.15.1~internal-superlu', when='@0.4.0')
depends_on('hypre@2.12.1~internal-superlu', when='@0.3.0')
depends_on('hypre@xsdk-0.2.0~internal-superlu', when='@xsdk-0.2.0')
depends_on('mfem@develop+mpi+superlu-dist+petsc~sundials+examples+miniapps', when='@develop')
depends_on('mfem@4.0.1-xsdk+mpi~superlu-dist+petsc+sundials+examples+miniapps', when='@0.5.0')
depends_on('mfem@3.4.0+mpi+superlu-dist+petsc+sundials+examples+miniapps', when='@0.4.0')
depends_on('mfem@3.3.2+mpi+superlu-dist+petsc+sundials+examples+miniapps', when='@0.3.0')
depends_on('superlu-dist@develop', when='@develop')
depends_on('superlu-dist@6.1.1', when='@0.5.0')
depends_on('superlu-dist@6.1.0', when='@0.4.0')
depends_on('superlu-dist@5.2.2', when='@0.3.0')
depends_on('superlu-dist@xsdk-0.2.0', when='@xsdk-0.2.0')
depends_on('trilinos@develop+hypre+superlu-dist+metis+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan2+amesos2~exodus+dtk+intrepid2+shards',
when='@develop +trilinos')
depends_on('trilinos@12.18.1+hypre+superlu-dist+metis+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan2+amesos2~exodus+dtk+intrepid2+shards',
when='@0.5.0 +trilinos')
depends_on('trilinos@12.14.1+hypre+superlu-dist+metis+hdf5~mumps+boost~suite-sparse+tpetra+nox+ifpack2+zoltan2+amesos2~exodus+dtk+intrepid2+shards',
when='@0.4.0 +trilinos')
depends_on('trilinos@12.12.1+hypre+superlu-dist+metis+hdf5~mumps+boost~suite-sparse~tpetra~ifpack2~zoltan2~amesos2~exodus',
when='@0.3.0 +trilinos')
depends_on('trilinos@xsdk-0.2.0+hypre+superlu-dist+metis+hdf5~mumps+boost~suite-sparse~tpetra~ifpack2~zoltan2~amesos2~exodus',
when='@xsdk-0.2.0 +trilinos')
depends_on('petsc +trilinos', when='+trilinos')
depends_on('petsc ~trilinos', when='~trilinos')
depends_on('petsc +batch', when='platform=cray @0.5.0:')
depends_on('petsc@develop+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
when='@develop')
depends_on('petsc@3.12.1+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
when='@0.5.0')
depends_on('petsc@3.10.3+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
when='@0.4.0')
depends_on('petsc@3.8.2+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
when='@0.3.0')
depends_on('petsc@xsdk-0.2.0+mpi+hypre+superlu-dist+metis+hdf5~mumps+double~int64',
when='@xsdk-0.2.0')
depends_on('dealii +trilinos', when='+trilinos +dealii')
depends_on('dealii ~trilinos', when='~trilinos +dealii')
depends_on('dealii@develop~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5~netcdf+metis~sundials~ginkgo~symengine', when='@develop +dealii')
depends_on('dealii@9.1.1~assimp~python~doc~gmsh+petsc+slepc+mpi~int64+hdf5~netcdf+metis~sundials~ginkgo~symengine', when='@0.5.0 +dealii')
depends
|
_on('dealii@9.0.1~assimp~python~doc~gmsh+petsc~slepc+mpi~int64+hdf5~netcdf+metis~ginkgo~symengine', when='@0.4.0 +dealii')
depends_on('pflotran@develop', when='@develop')
depends_on('pflotran@xsdk-0.5.0', when='@0.5.0')
depends_on('pflotran@xsdk-0.4.0', when='@0.4.0')
depends_on('pflotran@xsdk-0.3.0', when='@0
|
.3.0')
depends_on('pflotran@xsdk-0.2.0', when='@xsdk-0.2.0')
depends_on('alquimia@develop', when='@develop')
depends_on('alquimia@xsdk-0.5.0', when='@0.5.0')
depends_on('alquimia@xsdk-0.4.0', when='@0.4.0')
depends_on('alquimia@xsdk-0.3.0', when='@0.3.0')
depends_on('alquimia@xsdk-0.2.0', when='@xsdk-0.2.0')
depends_on('sundials+superlu-dist', when='@0.5.0: %gcc@6.1:')
depends_on('sundials@develop~int64+hypre+petsc', when='@develop')
depends_on('sundials@5.0.0~int64+hypre+petsc', when='@0.5.0')
depends_on('sundials@3.2.1~int64+hypre', when='@0.4.0')
depends_on('sundials@3.1.0~int64+hypre', when='@0.3.0')
depends_on('plasma@19.8.1:', when='@develop %gcc@6.0:')
depends_on('plasma@19.8.1:', when='@0.5.0 %gcc@6.0:')
depends_on('plasma@18.11.1:', when='@0.4.0 %gcc@6.0:')
depends_on('magma@2.5.1', when='@develop +cuda')
depends_on('magma@2.5.1', when='@0.5.0 +cuda')
depends_on('magma@2.4.0', when='@0.4.0 +cuda')
depends_on('magma@2.2.0', when='@0.3.0 +cuda')
depends_on('amrex@develop', when='@develop %intel')
depends_on('amrex@develop', when='@develop %gcc')
depends_on('amrex@19.08', when='@0.5.0 %intel')
depends_on('amrex@19.08', when='@0.5.0 %gcc')
depends_on('amrex@18.10.1', when='@0.4.0 %intel')
depends_on('amrex@18.10.1', when='@0.4.0 %gcc')
depends_on('slepc@develop', when='@develop')
depends_on('slepc@3.12.0', when='@0.5.0')
depends_on('slepc@3.10.1', when='@0.4.0')
depends_on('omega-h +trilinos', when='+trilinos +omega-h')
depends_on('omega-h ~trilinos', when='~trilinos +omega-h')
depends_on('omega-h@develop', when='@develop +omega-h')
depends_on('omega-h@9.29.0', when='@0.5.0 +omega-h')
depends_on('omega-h@9.19.1', when='@0.4.0 +omega-h')
depends_on('strumpack@master', when='@develop +strumpack')
depends_on('strumpack@3.3.0', when='@0.5.0 +strumpack')
depends_on('strumpack@3.1.1', when='@0.4.0 +strumpack')
depends_on('pumi@develop', when='@develop')
depends_on('pumi@2.2.1', when='@0.5.0')
depends_on('pumi@2.2.0', when='@0.4.0')
tasmanian_openmp = '~openmp' if sys.platform == 'darwin' else '+openmp'
depends_on('tasmanian@develop+xsdkflags+blas' + tasmanian_openmp, when='@develop')
depends_on('tasmanian@develop+xsdkflags+blas+cuda+magma' + tasmanian_openmp, when='@develop +cuda')
depends_on('tasmanian@7.0+xsdkflags+mpi+blas' + tasmanian_openmp, when='@0.5.0')
depends_on('tasmanian@7.0+xsdkflags+mpi+blas+cuda+magma' + tasmanian_openmp, when='@0.5.0 +cuda')
depends_on('tasmanian@6.0+xsdkflags+blas~openmp', when='@0.4.0')
depends_on('tasmanian@6.0+xsdkflags+blas+cuda+magma~openmp', when='@0.4.0 +cuda')
# the Fortran 2003 bindings of phist require python@3:, but this
# creates a conflict with other packages like petsc@develop. Actually
# these are type='build' dependencies, but spack reports a conflict anyway.
# This will be fixed once the new concretizer becomes available
# (says @adamjstewart)
depends_on('phist kernel_lib=tpetra', when='+trilinos +phist')
depends_on('phist kernel_lib=petsc', when='~trilinos +phist')
depends
|
endthestart/schwag
|
schwag/schwag/urls.py
|
Python
|
mit
| 1,849
| 0.005949
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable t
|
he admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'schwag.views.home', name
|
='home'),
url(r'^about/$', 'schwag.views.about', name='about'),
url(r'^location/$', 'schwag.views.location', name='location'),
url(r'^contact/$', 'schwag.views.contact', name='contact'),
url(r'^bmx/$', 'schwag.views.bmx', name='bmx'),
url(r'^account/login/$', 'schwag.views.login', name='login'),
url(r'^account/logout/$', 'schwag.views.logout', name='logout'),
url(r'^account/register/$', 'schwag.views.register', name='register'),
url(r'^account/', include('django.contrib.auth.urls')),
url(r'^checkout/', include('senex_shop.checkout.urls')),
url(r'^cart/', include('senex_shop.cart.urls')),
url(r'^shop/', include('senex_shop.urls')),
url(r'^news/', include('senex_shop.news.urls')),
url(r'^admin/', include(admin.site.urls)),
)
# Uncomment the next line to serve media files in dev.
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
try:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
except ImportError:
pass
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
(r'media/(?P<path>.*)', 'serve', {'document_root': settings.MEDIA_ROOT}),
)
|
random-forests/tensorflow-workshop
|
archive/extras/cat_dog_estimator/extract_cats_dogs.py
|
Python
|
apache-2.0
| 1,545
| 0.01165
|
"""One-time script for extracting all the cat and dog images from CIFAR-10."""
import cPickle
import numpy as np
from PIL import Image
TRAIN_FILES = ['cifar-10-batches-py/data_batch_%d' % i for i in range(1,6)]
TEST_FILE = 'test_batch'
CAT_INPUT_LABEL = 3
DOG_INPUT_LABEL = 5
CAT_OUTPUT_LABEL = 1
DOG_OUTPUT_LABEL = 0
def unpickle(file):
with open(file, 'rb') as fo:
dict = cPickle.load(fo)
return dict
data = []
# Count number of cats/dogs
num_cats = 0
num_dogs = 0
for data_file in TRAIN_FILES:
d = unpickle(data_file)
data.append(d)
for label in d['labels']:
if label == CAT_INPUT_LABEL:
num_cats += 1
if label == DOG_INPUT_LABEL:
num_dogs += 1
# Copy the cats/dogs into new array
images = np.empty((num_cats + num_dogs, 32, 32, 3), dtype=np.uint8)
labels =
|
np.empty((num_cats + num_dogs), dtype=np.uint8)
index = 0
for data_batch in data:
for batch_index, label in enumerate(data_batch['labels']):
if label == CAT_INPUT_LABEL or label == DOG_INPUT_LABEL:
# Data is stored in B x 3072 format, convert to B' x 32 x 32 x 3
images[index, :, :, :] = np.transpose(
np.reshape(data_batch['dat
|
a'][batch_index, :],
newshape=(3, 32, 32)),
axes=(1, 2, 0))
if label == CAT_INPUT_LABEL:
labels[index] = CAT_OUTPUT_LABEL
else:
labels[index] = DOG_OUTPUT_LABEL
index += 1
np.save('catdog_data.npy', {'images': images, 'labels': labels})
# Make sure images look correct
img = Image.fromarray(images[10, :, :, :])
img.show()
|
hustbeta/python-web-recipes
|
server-sent-events/bottle-sse.py
|
Python
|
mit
| 517
| 0.005803
|
#!/usr/bin/env python
# -*- coding: utf
|
-8 -*-
import bottle
import datetime
import time
@bottle.get('/')
def index():
return bottle.static_file('index.html', root='.')
@bottle.get('/stream')
def stream():
bottle.response.content_type = 'text/event-stream'
bottle.response.cache_control = 'no-cache'
while True:
|
yield 'data: %s\n\n' % str(datetime.datetime.now())
time.sleep(5)
if __name__ == '__main__':
bottle.run(host='0.0.0.0', port=8080, debug=True)
|
apanda/modeling
|
mcnet/components/aclfirewall.py
|
Python
|
bsd-3-clause
| 2,245
| 0.015145
|
from . import NetworkObject
import z3
class AclFirewall (NetworkObject):
def _init(self, node, network, context):
super(AclFirewall, self).init_fail(node)
self.fw = node.z3Node
self.ctx = context
self.constraints = list ()
self.acls = list ()
network.SaneSend (self)
self._firewallSendRules ()
@property
def z3Node (self):
return self.fw
def SetPolicy (self, policy):
"""Wrap add acls"""
self.AddAcls(policy)
def AddAcls(self, acls):
if not isinstance(acls, list):
acls = [acls]
self.acls.extend(acls)
@property
def ACLs(self):
return self.acls
def _addConstraints(self, solver):
solver.add(self.constraints)
self._aclConstraints(solver)
def _firewallSendRules(self):
p_0 = z3.Const('%s_firewall_send_p_0'%(self.fw), self.ctx.packet)
n_0 = z3.Const('%s_firewall_send_n_0'%(self.fw), self.ctx.node)
n_1 = z3.Const('%s_firewall_send_n_1'%(self.fw), self.ctx.node)
t_0 = z3.Int('%s_firewall_send_t_0'%(self.fw))
t_1 = z3.Int('%s_firewall_send_t_1'%(self.fw))
self.acl_func = z3.Function('%s_
|
acl_func'%(self.fw), self.ctx.address, self.ctx.address, z3.BoolSort())
self.constraints.append(z3.ForAll([n_0, p_0, t_0],
z3.Implies(self.ctx.send(self.fw, n_0, p_0, t_0), \
z3.Exists([t_1], \
z3.And(t_1 < t_0, \
z3.Not(self.failed(t_1)), \
z
|
3.Not(self.failed(t_0)), \
z3.Exists([n_1], \
self.ctx.recv(n_1, self.fw, p_0, t_1)), \
z3.Not(self.acl_func(self.ctx.packet.src(p_0), self.ctx.packet.dest(p_0))))))))
def _aclConstraints(self, solver):
if len(self.acls) == 0:
return
a_0 = z3.Const('%s_firewall_acl_a_0'%(self.fw), self.ctx.address)
a_1 = z3.Const('%s_firewall_acl_a_1'%(self.fw), self.ctx.address)
acl_map = map(lambda (a, b): z3.Or(z3.And(a_0 == a, a_1 == b), z3.And(a_0 == b, a_1 == a)), self.acls)
solver.add(z3.ForAll([a_0, a_1], self.acl_func(a_0, a_1) == z3.Or(acl_map)))
|
latrop/GRCF
|
GRCFlibs/GRCFifaceFunctions.py
|
Python
|
gpl-3.0
| 80,548
| 0.004407
|
#! /usr/bin/env python
import os
import Tkinter as Tk
import tkFileDialog, tkMessageBox
import shelve
import time
from scipy.odr.odrpack import *
from scipy.ndimage import minimum_position
from pylab import *
import pylab
from PIL import Image
from PIL import ImageTk
from GRCFcommonFunctions import fig2img, fig2data
haloFirstLabels = {"isoterm": "Rc", "NFW": "C", "Burkert": u"\u03C1"}
haloSecondLabels = {"isoterm": "V(inf)", "NFW": "V200", "Burkert": "h"}
haloFirstLowerValues = {"isoterm": 1.0, "NFW": 3.0, "Burkert": 0.1}
haloFirstUpperValues = {"isoterm": 10.0, "NFW": 7.0, "Burkert": 10.0}
haloSecondLowerValues = {"isoterm": 150.0, "NFW": 100.0, "Burkert": 0.1}
haloSecondUpperValues = {"isoterm": 250.0, "NFW": 250.0, "Burkert": 50.0}
def fitByLine(xxx, yyy):
f = lambda B, x: B[0]*x + B[1]
fitting = ODR(RealData(xxx, yyy),
Model(f),
[-1.0, 0])
fitting.set_job()
result = fitting.run()
return result.beta[0], result.beta[1]
def geomLine(B, x):
return B[0]*x +B[1]
def mouse_wheel_up(event):
try:
oldvalue = float(event.widget.get())
except ValueError:
return None
newvalue = str(oldvalue+0.1)
event.widget.setvar(name=event.widget.cget("textvariable"), value=newvalue)
def mouse_wheel_down(event):
try:
oldvalue = float(event.widget.get())
except ValueError:
return None
newvalue = str(oldvalue-0.1)
event.widget.setvar(name=event.widget.cget("textvariable"), value=newvalue)
def onoffPanel(panel, newstate):
if newstate:
for c in panel.winfo_children():
if c.winfo_class() in ("Entry", "Radiobutton", "Spinbox", "Menubutton"):
c.config(state="normal")
else:
for c in panel.winfo_children():
if c.winfo_class() in ("Entry", "Radiobutton", "Spinbox", "Menubutton"):
c.config(state="disabled")
###### List of passbands for solar absolute magnitudes #######
mSunBandsList = ["Buser U (= Johnson) Vega",
"Buser U (= Johnson) AB",
"Straizys B (= Johnson) Vega",
"Straizys B (= Johnson) AB",
"Straizys V (= Johnson) Vega",
"Straizys V (= Johnson) AB",
"Bessell R Vega",
"Bessell R AB",
"Bessell I Vega",
"Bessell I AB",
"SDSS u' Vega",
"SDSS u' AB",
"SDSS g' Vega",
"SDSS g' AB",
"SDSS r' Vega",
"SDSS r' AB",
"SDSS i' Vega",
"SDSS i' AB",
"SDSS z' Vega",
"SDSS z' AB"
]
mSunBands = {"Buser U (= Johnson) Vega": 5.59,
"Buser U (= Johnson) AB": 6.32,
"Straizys B (= Johnson) Vega": 5.45,
"Straizys B (= Johnson) AB": 5.36,
"Straizys V (= Johnson) Vega": 4.78,
"Straizys V (= Johnson) AB": 4.79,
"Bessell R Vega": 4.46,
"Bessell R AB": 4.65,
"Bessell I Vega": 4.11,
"Bessell I AB": 4.55,
"SDSS u' Vega": 5.46,
"SDSS u' AB": 6.45,
"SDSS g' Vega": 5.22,
"SDSS g' AB": 5.14,
"SDSS r' Vega": 4.50,
"SDSS r' AB": 4.65,
"SDSS i' Vega": 4.16,
"SDSS i' AB": 4.56,
"SDSS z' Vega": 4.01,
"SDSS z' AB": 4.52
}
# filter Vega AB
#6 F300W 6.09 7.52
#7 F450W 5.32 5.25
#8 F555W 4.85 4.85
#9 F606W 4.66 4.75
#10 F702W 4.32 4.58
#11 F814W 4.15 4.57
#12 CFHT U 5.57 6.38
#13 CFHT B 5.49 5.32
#14 CFHT V 4.81 4.81
#15 CFHT R 4.44 4.64
#16 CFHT I 4.06 4.54
#17 KPNO U 5.59 6.32
#18 KPNO B 5.49 5.43
#19 KPNO V 4.79 4.79
#20 KPNO R 4.47 4.66
#21 KPNO I 4.11 4.55
# 22 Koo & Kron U 5.58 6.29
# 23 Koo & Kron J 5.31 5.26
# 24 Koo & Kron F 4.58 4.69
# 25 Koo & Kron N 4.11 4.53
# 31 ACS old z 3.99 4.52
# 3
|
2 FIS555 4.84 4.84
# 33 FIS606 4.63 4.72
# 34 FIS702 4.32 4.59
# 35 FIS814 4.12 4.53
# 36 LRIS B 5.46 5.42
# 37 LRIS V 4.82 4.83
# 38 LRIS R 4.46 4.63
# 39 LRIS Rs 4.33 4.59
# 40 LRIS I 4.04 4.53
# 41 LRIS Z 4.00 4.52
# 42 SPH Un 5.43 6.49
# 43 SPH G 5.21 5.11
# 44 SPH Rs 4.39 4.61
# 45 12k B 5.45 5.34
# 46 12k R 4.39 4.60
# 47 12k I 4.10 4.53
# 48 12k V 4.85 4.85
# 49 uh8K i 4.06 4.53
# 50 ACS B435 5.49 5.40
# 51 ACS V606
|
4.67 4.75
# 52 ACS SDSS i 4.14 4.54
# 53 ACS I814 4.11 4.53
# 54 ACS SDSS z 4.00 4.52
# 55 Bessell U 5.55 6.36
# 56 Bessell B 5.45 5.36
# 57 Bessell V 4.80 4.82
# 58 Bessell J 3.67 4.57
# 59 Bessell H 3.33 4.71
# 60 Bessell K 3.29 5.19
# 61 KPNO J 3.66 4.57
# 62 KPNO H 3.33 4.71
# 63 KPNO K 3.29 5.18
# 64 2500 7.96 9.80
# 65 2800 6.67 8.23
# 66 APM Bj 5.29 5.21
# 67 FOCA UV 10.50 12.39
# 68 DEIMOS R 4.44 4.62
# 69 Galex FUV 13.97 16.42
# 70 Galex NUV 8.45 10.31
# 71 SDSS u z=0.1 5.83 6.77
# 72 SDSS g z=0.1 5.46 5.36
# 73 SDSS r z=0.1 4.53 4.67
# 74 SDSS i z=0.1 4.12 4.48
# 75 SDSS z z=0.1 3.90 4.42
# 76 NIRI J 3.64 4.57
# 77 NIRI H 3.33 4.71
# 78 NIRI K 3.29 5.18
def saveParams(master, params):
"""Store all parameters of galaxy in a file"""
fileName = tkFileDialog.asksaveasfilename(parent=master,
filetypes=[("Data Base files", "*.db")],
title="Open file to save parameters")
if not fileName:
return
try:
os.remove(fileName)
except OSError:
pass
gParams = params[0]
bParams = params[1]
dParams = params[2]
hParams = params[3]
dataBase = shelve.open(fileName)
dataBase["gParams"] = gParams
dataBase["bParams"] = bParams
dataBase["dParams"] = dParams
dataBase["hParams"] = hParams
dataBase.close()
def loadParams(master, fName):
"""Load prevoiusly saved parameters from file"""
if fName is None:
fName = tkFileDialog.askopenfilename(parent=master,
filetypes=[("Data Base files", "*.db")],
title="Open file to load parameters")
if not fName:
return [None, None, None, None]
dataBase = shelve.open(fName)
gParams = dataBase["gParams"]
bParams = dataBase["bParams"]
dParams = dataBase["dParams"]
hParams = dataBase["hParams"]
dataBase.close()
return gParams, bParams, dParams, hParams
def saveVelocity(master, rotCurve):
if rotCurve.parametersChanged:
tkMessageBox.showerror("Save error", "Some parameters was changed.\nRun computation and try again.")
return
"""Save all computed velocity curves (and the observed one) to a text file"""
fileName = tkFileDialog.asksaveasfilename(parent=master,
filetypes=[("Data files", "*.dat"),
("Text files", "*.txt"),
("Velocity files", "*.vel")],
title="Open file to save velocity curves")
if not fileName:
return None
fout = open(fileName, "w", buffering=0)
fout.truncate(0)
includeBulge = int(rotCurve.bParams["include"])
includeDisc = int(rotCurve.dParams["include"])
includeHalo = int(rotCurve.hParams["include"])
delimeter = "#" * 40
fout.write("%s\n#\n" % (delimeter))
fout.write("# File generated by GRCF programm (see...) \n# at %s\n#\n" % time.ctime())
fout.write("# Input data file is %s \n#\n" % os.path.split(rotCurve.dataFileName)[1])
fout.write("# Parameters are:\n")
fout.write("# scale = %s\n" % (rotCurve.gParams["scale"]))
fout.write("# H0 = %s\n" % (rotCurve.gParams["hubble"]))
fout.write("# Msun = %s\n" % (rotCurve.gParams["Msun"]))
fout.write("#\n")
fout.write("# Parameters of bulge:\n")
|
bootleg/ret-sync
|
ext_bn/retsync/__init__.py
|
Python
|
gpl-3.0
| 2,930
| 0.003413
|
#!/usr/bin/env python3
"""
Copyright (C) 2020, Alexandre Gazet.
This file is part of ret-sync plugin for Binary Ninja.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modif
|
y, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PAR
|
TICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from collections import namedtuple
import binaryninjaui
if 'qt_major_version' in binaryninjaui.__dict__ and binaryninjaui.qt_major_version == 6:
from PySide6.QtCore import Qt
from PySide6.QtGui import QKeySequence
else:
from PySide2.QtCore import Qt
from PySide2.QtGui import QKeySequence
from binaryninjaui import UIAction, UIActionHandler
from .sync import SyncPlugin
from .retsync.rsconfig import rs_log
def add_commands(plugin):
DbgAction = namedtuple('DbgAction', 'name, key_seq, handler')
plugin_actions = (
DbgAction("SyncEnable", QKeySequence(Qt.ALT + Qt.Key_S), UIAction(plugin.cmd_sync)),
DbgAction("SyncDisable", QKeySequence(Qt.ALT + Qt.SHIFT + Qt.Key_S), UIAction(plugin.cmd_syncoff)),
DbgAction("SyncGo", QKeySequence(Qt.ALT + Qt.Key_F5), UIAction(plugin.cmd_go)),
DbgAction("SyncStepOver", QKeySequence(Qt.Key_F10), UIAction(plugin.cmd_so)),
DbgAction("SyncStepInto", QKeySequence(Qt.Key_F11), UIAction(plugin.cmd_si)),
DbgAction("SyncTranslate", QKeySequence(Qt.ALT + Qt.Key_F2), UIAction(plugin.cmd_translate)),
DbgAction("SyncBp", QKeySequence(Qt.Key_F2), UIAction(plugin.cmd_bp)),
DbgAction("SyncHwBp", QKeySequence(Qt.CTRL + Qt.Key_F2), UIAction(plugin.cmd_hwbp)),
DbgAction("SyncBpOneShot", QKeySequence(Qt.ALT + Qt.Key_F3), UIAction(plugin.cmd_bp1)),
DbgAction("SyncHwBpOneShot", QKeySequence(Qt.CTRL + Qt.Key_F3), UIAction(plugin.cmd_hwbp1))
)
for action in plugin_actions:
UIAction.registerAction(action.name, action.key_seq)
UIActionHandler.globalActions().bindAction(action.name, action.handler)
rs_log('commands added')
retsync_plugin = SyncPlugin()
retsync_plugin.init_widget()
add_commands(retsync_plugin)
|
stefanseefeld/numba
|
numba/cuda/tests/cudapy/test_operator.py
|
Python
|
bsd-2-clause
| 899
| 0
|
from __future__ import print_function, absolute_import, division
import numpy as np
from numba.cuda.testing import unittest
from numba import cuda
import operator
class TestOperatorModule(unittest.TestCase):
"""
Test if operator module is supported by the CUDA target.
"""
def operator_template(self, op):
@cuda.jit
def foo(a, b):
i = 0
a[i] = op(a[i]
|
, b[i])
a = np.ones(1)
b = np.ones(1)
res = a.copy()
foo[1, 1](res, b)
np.testing.assert_equal(res, op(a, b))
def test_add(self):
self.operator_template(operator.add)
def test_sub(self):
self.operator_template(operator.sub)
def test_mul(self):
self.operator_template(operator.mul)
def test_truediv(self):
self.operator_template(operator.truediv)
|
if __name__ == '__main__':
unittest.main()
|
satuma777/evoltier
|
evoltier/selection/nes_selection.py
|
Python
|
gpl-3.0
| 510
| 0.003922
|
import n
|
umpy as np
from ..weight import RankingBasedSelection
class NESSelection(RankingBasedSelection):
"""
This selection scheme is Non-increasing transformation as NES weight. See also,
[Wierstra et. al., 2014]<http://jmlr.org/papers/v15/wierstra14a.html>
"""
def transform(self, rank_based_vals, xp=np):
lam = len(rank_based_vals)
weight = x
|
p.maximum(0, xp.log((lam / 2) + 1) - xp.log(rank_based_vals))
weight /= weight.sum()
return weight - 1. / lam
|
banacer/door-wiz
|
src/identification/Identifier.py
|
Python
|
mit
| 1,449
| 0.006901
|
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
from scipy.spatial import distance
import matplotlib.pyplot as plt
from sklearn.cluster import DBSCAN
from sklearn import metrics
from sklearn.datasets.samples_generator import make_blobs
from sklearn.preprocessing import StandardScaler
from sklearn import decomposition # PCA
from sklearn.metrics import confusion_matrix
import json
import ml.Features as ft
from utils import Utils
class Identifier(object):
def __init__(self):
columns = ['mean_height', 'min_height', 'max_h
|
eight', 'mean_width', 'min_width', 'max_width', 'time', 'girth','id']
self.data = DataFrame(columns=columns)
self.event = []
@staticmethod
def subscribe(ch, method, properties, body):
"""
prints the body message. It's the default callback method
:param ch: keep null
:param method: keep null
:param properties: keep null
:par
|
am body: the message
:return:
"""
#first we get the JSON from body
#we check if it's part of the walking event
#if walking event is completed, we
if __name__ == '__main__':
# we setup needed params
MAX_HEIGHT = 203
MAX_WIDTH = 142
SPEED = 3
SAMPLING_RATE = 8
mq_host = '172.26.56.122'
queue_name = 'door_data'
# setting up MQTT subscriber
Utils.sub(queue_name=queue_name,callback=subscribe,host=mq_host)
|
nbro/ands
|
ands/algorithms/dp/subset_sum.py
|
Python
|
mit
| 3,075
| 0
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
# Meta-info
Author: Nelson Brochado
Created: 03/09/2015
Updated: 07/03/2018
# Description
# TODO
- Add description.
- Add complexity analysis.
- Add documentation to functions.
"""
__all__ = ["recursive_subset_sum", "bottom_up_subset_sum"]
from pprint import pprint
def _check_negativeness(subset):
"""Returns the largest negative number in the subset.
If no negative number exists, it returns 0."""
s = 0
for n in subset:
if n < s: # Initially s is 0.
s = n
return s
def _shift_numbers(subset, smallest):
m = -smallest
for i, _ in enumerate(subset):
subset[i] += m
def _recursive_subset_sum_aux(subset, current_sum, index, n, solution):
if current_sum == n: # Solution was found...
print("Subset found.")
for i, s in enumerate(solution):
if s == 1:
print(subset[i])
elif index == len(subset):
return
else:
# Include the current ith element.
solution[index] = 1
current_sum += subset[index]
_recursive_subset_sum_aux(subset, current_sum, index + 1, n, solution)
# Do not to include the ith element.
solution[index] = 0
current_sum -= subset[index]
_recursive_subset_sum_aux(subset, current_sum, index + 1, n, solution)
def recursive_subset_sum(subset, s):
# Allows negative numbers too...
c_sum = 0
i = 0
solution = [0] * len(subset)
return _recursive_subset_sum_aux(subset, c_sum, i, s, solution)
def _get_subset_sum_matrix(subset, s):
m = [[0 for _ in range(s + 1)] for _ in range(len(subset) + 1)]
for i in range(1, s + 1):
m[0][i] = 0
for j in range(0, len(subset) + 1):
m[j][0] = 1
return m
def bottom_up_subset_sum(subset: list, s: int, return_matrix: bool = False):
"""Returns 1 if there's a subset whose sum of the numbers is equal to s, if
return_matrix == True, else it returns the matrix used during the
computation.
Note: the subset can only contain positive integers."""
m = _get_subset_sum_matrix(subset, s)
for i in range(1, len(subset) + 1):
for j in range(1, s + 1):
if subset[i - 1] ==
|
j:
m[i][j] = 1
else:
# We can include the current element,
# because it is less than the current number j.
if subset[i - 1] <= j:
m[i][j] = max(m[i - 1][j], m[i - 1][j - subset[i - 1]])
else:
m[i][j] = m[i - 1][j]
return m[-1][-1] if not return_matrix else m
if __name__ == "__main__":
# print(bottom_up_subset_sum((1, 3, 5, 5, 2
|
, 1, 1, 6), 12))
pprint(bottom_up_subset_sum([2, 2, 2, 6], 6, return_matrix=True))
print(bottom_up_subset_sum((1, 1, 6), 2, return_matrix=True))
recursive_subset_sum([-2, 8, 6], 6)
# recursive_subset_sum((4, 2, 6), 6)
# recursive_subset_sum((0, 0, 6), 6)
# recursive_subset_sum((1, 3, 5, 5, 2, 1, 1, 6), 12)
|
EricssonResearch/calvin-base
|
calvinextras/calvinsys/media/audio/play/BasePlay.py
|
Python
|
apache-2.0
| 1,328
| 0.002259
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/lic
|
enses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the Licen
|
se for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.calvinsys import base_calvinsys_object
class BasePlay(base_calvinsys_object.BaseCalvinsysObject):
"""
Play audio file
"""
init_schema = {
"type": "object",
"properties": {
"audiofile": {
"description": "location of file to play",
"type": "string"
}
},
"description": "Set up audio player"
}
can_write_schema = {
"description": "True iff audio is ready to be played",
"type": "boolean"
}
write_schema = {
"description": "Play specified audio file, either specified in argument or at init",
"type": ["null", "boolean", "string"]
}
|
nicolas-petit/clouder
|
clouder/clouder_runner_docker/runner.py
|
Python
|
gpl-3.0
| 7,557
| 0.000662
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2015, TODAY Clouder SASU
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License with Attribution
# clause as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License with
# Attribution clause along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
##############################################
|
################################
from openerp import models, api, _, modules
from openerp.exceptions import except_orm
import time
import logging
_logger = log
|
ging.getLogger(__name__)
class ClouderImageVersion(models.Model):
"""
Add methods to manage the docker build specificity.
"""
_inherit = 'clouder.image.version'
@api.multi
def hook_build(self, dockerfile):
res = super(ClouderImageVersion, self).hook_build(dockerfile)
if self.registry_id.application_id.type_id.name == 'registry':
tmp_dir = '/tmp/' + self.image_id.name + '_' + self.fullname
server = self.registry_id.server_id
server.execute(['rm', '-rf', tmp_dir])
server.execute(['mkdir', '-p', tmp_dir])
if self.image_id.type_id:
if self.image_id.type_id.name == 'backup':
sources_path = \
modules.get_module_path('clouder') + '/sources'
else:
sources_path = modules.get_module_path(
'clouder_template_' + self.image_id.type_id.name
) + '/sources'
if self.local_dir_exist(sources_path):
server.send_dir(sources_path, tmp_dir + '/sources')
server.execute([
'echo "' + dockerfile.replace('"', '\\"') +
'" >> ' + tmp_dir + '/Dockerfile'])
server.execute(
['docker', 'build', '--pull', '--no-cache', '-t', self.fullname, tmp_dir])
server.execute(['docker', 'tag', self.fullname,
self.fullpath])
server.execute(
['docker', 'push', self.fullpath])
# TODO
# server.execute(['docker', 'rmi', self.fullname])
# server.execute(['docker', 'rmi', self.fullpath_localhost])
server.execute(['rm', '-rf', tmp_dir])
return res
@api.multi
def purge(self):
"""
Delete an image from the private registry.
"""
res = super(ClouderImageVersion, self).purge()
if self.registry_id.application_id.type_id.name == 'registry':
img_address = self.registry_id and 'localhost:' + \
self.registry_id.ports['registry']['localport'] +\
'/v1/repositories/' + self.image_id.name + \
'/tags/' + self.name
self.registry_id.execute(
['curl', '-o curl.txt -X', 'DELETE', img_address])
return res
class ClouderContainer(models.Model):
"""
Add methods to manage the docker container specificities.
"""
_inherit = 'clouder.container'
@api.multi
def hook_deploy_source(self):
res = super(ClouderContainer, self).hook_deploy_source()
if res:
return res
else:
if self.server_id == self.image_version_id.registry_id.server_id:
return self.image_version_id.fullpath
else:
folder = '/etc/docker/certs.d/' +\
self.image_version_id.registry_address
certfile = folder + '/ca.crt'
tmp_file = '/tmp/' + self.fullname
self.server_id.execute(['rm', certfile])
self.image_version_id.registry_id.get(
'/etc/ssl/certs/docker-registry.crt', tmp_file)
self.server_id.execute(['mkdir', '-p', folder])
self.server_id.send(tmp_file, certfile)
self.server_id.execute_local(['rm', tmp_file])
return self.image_version_id.fullpath
@api.multi
def hook_deploy_special_args(self, cmd):
return cmd
@api.multi
def hook_deploy(self, ports, volumes):
"""
Deploy the container in the server.
"""
res = super(ClouderContainer, self).hook_deploy(ports, volumes)
if not self.server_id.runner_id or \
self.server_id.runner_id.application_id.type_id.name \
== 'docker':
cmd = ['docker', 'run', '-d', '-t', '--restart=always']
for port in ports:
cmd.extend(
['-p',
(self.server_id.public_ip and self.server_id.ip + ':' or '') \
+ str(port.hostport) + ':' + port.localport \
+ (port.udp and '/udp' or '')])
volumes_from = {}
for volume in volumes:
if volume.hostpath:
arg = volume.hostpath + ':' + volume.name
if volume.readonly:
arg += ':ro'
cmd.extend(['-v', arg])
if volume.from_id:
volumes_from[volume.from_id.name] = volume.from_id.name
for key, volume in volumes_from.iteritems():
cmd.extend(['--volumes-from', volume])
for link in self.link_ids:
if link.name.make_link \
and link.target.server_id == self.server_id:
cmd.extend(['--link', link.target.name +
':' + link.name.name.code])
cmd = self.hook_deploy_special_args(cmd)
cmd.extend(['--name', self.name])
cmd.extend([self.hook_deploy_source()])
# Run container
self.server_id.execute(cmd)
return res
@api.multi
def hook_purge(self):
"""
Remove the container.
"""
res = super(ClouderContainer, self).hook_purge()
if not self.server_id.runner_id or \
self.server_id.runner_id.application_id.type_id.name\
== 'docker':
self.server_id.execute(['docker', 'rm', self.name])
return res
@api.multi
def stop_exec(self):
"""
Stop the container.
"""
res = super(ClouderContainer, self).stop_exec()
if not self.server_id.runner_id or \
self.server_id.runner_id.application_id.type_id.name\
== 'docker':
self.server_id.execute(['docker', 'stop', self.name])
return res
@api.multi
def start_exec(self):
"""
Restart the container.
"""
res = super(ClouderContainer, self).start_exec()
if not self.server_id.runner_id or \
self.server_id.runner_id.application_id.type_id.name\
== 'docker':
self.server_id.execute(['docker', 'start', self.name])
time.sleep(3)
return res
|
lbryio/lbryum-server
|
benchmarks/test_claims.py
|
Python
|
agpl-3.0
| 1,512
| 0.000661
|
import os
from ConfigParser import ConfigParser
from lbryumserver import deserialize
from lbryumserver.claims_storage import ClaimsStorage
from lbryumserver.processor import Dispatcher
from .fixtures import raw_tx_with_claim
def _get_config_for_test_storage(tmpdir):
config = ConfigParser()
config.add_section('leveldb')
config.set('leveldb', 'utxo_cache', st
|
r(64 * 1024 * 1024))
config.set('leveldb', 'hist_cache', str(80 * 1024))
config.set('leveldb', 'addr_cache', str(16 * 1024 * 1024))
config.set('leveldb', 'claimid_cache', str(16 * 1024 * 1024 * 8))
config.set('leveldb', 'claim_value_cache', str(1024 * 1024 * 1024))
config.set('leveldb', 'profiler', 'no')
config.set('leveldb', 'path', o
|
s.path.join(tmpdir.strpath, 'lbryum_db'))
return config
def setup_claim_storage(tmpdir):
config = _get_config_for_test_storage(tmpdir)
dispatcher = Dispatcher(config)
shared = dispatcher.shared
return ClaimsStorage(config, shared, False)
def deserialize_raw_tx(raw_tx):
vds = deserialize.BCDataStream()
vds.write(raw_tx.decode('hex'))
return deserialize.parse_Transaction(vds, is_coinbase=False)
def test_import_claim(tmpdir, benchmark):
claim_storage = setup_claim_storage(tmpdir)
txid = "6236b6fc441cf42161051a03041a251f62a23e52b3d3348d6c33244770f265b4"
block_height = 313486
tx_with_claim = deserialize_raw_tx(raw_tx_with_claim)
benchmark(claim_storage.import_claim_transaction, txid, tx_with_claim, block_height)
|
dscottcs/superluminal
|
superluminal/sample/forward_sample.py
|
Python
|
apache-2.0
| 510
| 0.001961
|
import requests
import json
import logging
LOG =
|
logging.getLogger(__name__)
class Forwarder(object):
def __init__(self):
this.fwd_url = 'http://localhost:9999/forward'
def forward(self, reason, host=None, data=None):
body = {
'reason': reason
}
if host is not None:
body['host'] = host
if data is not None:
body['data'] = data
resp = requests.post(this.fwd_url,
body=j
|
son.dumps(body))
|
SuLab/genewiki
|
old-assets/scripts/create_template.py
|
Python
|
mit
| 480
| 0.004167
|
# -*- coding: utf-8 -*-
'''
Creates a Gene Wiki protein box template around a gene specified by
the first argument passed to it on the command line.
Usage: `python create_template.py <entrez_id>`
'''
import sys
from genewiki.mygeneinfo import parse
if len(sys.argv[1]) > 1:
entrez = sys.ar
|
gv[1]
try: int(entrez)
except ValueError:
sys.stderr.write("Entrez ids must contain only digits.")
sys.exit(1)
|
sys.stdout.write(str(parse(entrez)))
|
ramnes/qtile
|
libqtile/widget/gmail_checker.py
|
Python
|
mit
| 2,825
| 0.000354
|
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014, 2019 zordsdavini
# Copyright (c) 2014 Alexandr Kriptonov
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import imaplib
import re
from libqtile.log_utils import logger
from libqtile.widget import base
class GmailChecker(base.ThreadPoolText):
"""A simple gmail checker. If 'status_only_unseen' is True - set 'fmt' for one argument, ex. 'unseen: {0}'"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
("update_interval", 30, "Update time in seconds."),
("username", None, "username"),
("password", None, "password"),
("email_path", "INBOX", "email_path"),
("display_fmt", "inbox[{0}],unseen[{1}]", "Display format"),
("status_only_unseen", False, "Only show unseen messages"),
]
def __init__(self, **config):
base.ThreadPoolText.__init__(self, "", **config)
self.add_defaults(GmailChecker.defaults)
def poll(self):
self.gmail = imaplib.IMAP4_SSL('imap.gmail.com')
self.gmail.login(self.username, self.password)
answer, raw_data = self.gmail.status(self.email_path,
'(MESSAGES UNSEEN)')
if answer == "OK":
dec = raw_data[0].decode()
messages = int(re.se
|
arch(r'MESSAGES\s+(\d+)', dec).group(1))
unseen = int(re.search(r'UNSEEN\s+(\d+)', dec).group(1))
if(self.status_only_unseen):
return self.display_fmt.format(unseen)
else:
return self.
|
display_fmt.format(messages, unseen)
else:
logger.exception(
'GmailChecker UNKNOWN error, answer: %s, raw_data: %s',
answer, raw_data)
return "UNKNOWN ERROR"
|
tensorflow/tensorboard
|
tensorboard/plugins/hparams/download_data_test.py
|
Python
|
apache-2.0
| 8,571
| 0.00035
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for download_data."""
from unittest import mock
from google.protobuf import text_format
import tensorflow as tf
from tensorboard.backend.event_processing import plugin_event_multiplexer
from tensorboard.plugins import base_plugin
from tensorboard.plugins.hparams import api_pb2
from tensorboard.plugins.hparams import backend_context
from tensorboard.plugins.hparams import download_data
EXPERIMENT = """
description: 'Test experiment'
user: 'Test user'
hparam_infos: [
{
|
name: 'initial_temp'
type: DATA_TYPE_FLOAT64
},
{
name: 'final_temp'
type: DATA
|
_TYPE_FLOAT64
},
{ name: 'string_hparam' },
{ name: 'bool_hparam' },
{ name: 'optional_string_hparam' }
]
metric_infos: [
{ name: { tag: 'current_temp' } },
{ name: { tag: 'delta_temp' } },
{ name: { tag: 'optional_metric' } }
]
"""
SESSION_GROUPS = """
session_groups {
name: "group_1"
hparams { key: "bool_hparam" value { bool_value: true } }
hparams { key: "final_temp" value { number_value: 150.0 } }
hparams { key: "initial_temp" value { number_value: 270.0 } }
hparams { key: "string_hparam" value { string_value: "a string" } }
metric_values {
name { tag: "current_temp" }
value: 10
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" } value: 15
training_step: 2
wall_time_secs: 10.0
}
metric_values { name { tag: "optional_metric" } value: 33
training_step: 20
wall_time_secs: 2.0
}
sessions {
name: "session_1"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_SUCCESS
metric_values {
name { tag: "current_temp" }
value: 10
training_step: 1
wall_time_secs: 1.0
}
metric_values {
name { tag: "delta_temp" }
value: 15
training_step: 2
wall_time_secs: 10.0
}
metric_values {
name { tag: "optional_metric" }
value: 33
training_step: 20
wall_time_secs: 2.0
}
}
}
session_groups {
name: "group_2"
hparams { key: "bool_hparam" value { bool_value: false } }
hparams { key: "final_temp" value { number_value: 100.0 } }
hparams { key: "initial_temp" value { number_value: 280.0 } }
hparams { key: "string_hparam" value { string_value: "AAAAA"}}
metric_values {
name { tag: "current_temp" }
value: 51.0
training_step: 1
wall_time_secs: 1.0
}
metric_values {
name { tag: "delta_temp" }
value: 44.5
training_step: 2
wall_time_secs: 10.3333333
}
sessions {
name: "session_2"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_SUCCESS
metric_values {
name { tag: "current_temp" }
value: 100
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" }
value: 150
training_step: 3
wall_time_secs: 11.0
}
}
sessions {
name: "session_3"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_FAILURE
metric_values {
name { tag: "current_temp" }
value: 1.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" }
value: 1.5
training_step: 2
wall_time_secs: 10.0
}
}
sessions {
name: "session_5"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_SUCCESS
metric_values {
name { tag: "current_temp" }
value: 52.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" }
value: -18
training_step: 2
wall_time_secs: 10.0
}
}
}
session_groups {
name: "group_3"
hparams { key: "bool_hparam" value { bool_value: true } }
hparams { key: "final_temp" value { number_value: 0.000012 } }
hparams { key: "initial_temp" value { number_value: 300.0 } }
hparams { key: "string_hparam" value { string_value: "a string_3"}}
hparams {
key: 'optional_string_hparam' value { string_value: 'BB' }
}
metric_values {
name { tag: "current_temp" }
value: 101.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" } value: -15100000.0
training_step: 2
wall_time_secs: 10.0
}
sessions {
name: "session_4"
start_time_secs: 314159
end_time_secs: 314164
status: STATUS_UNKNOWN
metric_values {
name { tag: "current_temp" }
value: 101.0
training_step: 1
wall_time_secs: 1.0
}
metric_values { name { tag: "delta_temp" } value: -151000000.0
training_step: 2
wall_time_secs: 10.0
}
}
}
total_size: 3
"""
EXPECTED_LATEX = r"""\begin{table}[tbp]
\begin{tabular}{llllllll}
initial\_temp & final\_temp & string\_hparam & bool\_hparam & optional\_string\_hparam & current\_temp & delta\_temp & optional\_metric \\ \hline
$270$ & $150$ & a string & $1$ & & $10$ & $15$ & $33$ \\
$280$ & $100$ & AAAAA & $0$ & & $51$ & $44.5$ & - \\
$300$ & $1.2\cdot 10^{-5}$ & a string\_3 & $1$ & BB & $101$ & $-1.51\cdot 10^{7}$ & - \\
\hline
\end{tabular}
\end{table}
"""
EXPECTED_CSV = """initial_temp,final_temp,string_hparam,bool_hparam,optional_string_hparam,current_temp,delta_temp,optional_metric\r
270.0,150.0,a string,True,,10.0,15.0,33.0\r
280.0,100.0,AAAAA,False,,51.0,44.5,\r
300.0,1.2e-05,a string_3,True,BB,101.0,-15100000.0,\r
"""
class DownloadDataTest(tf.test.TestCase):
def setUp(self):
self._mock_multiplexer = mock.create_autospec(
plugin_event_multiplexer.EventMultiplexer
)
self._mock_tb_context = base_plugin.TBContext(
multiplexer=self._mock_multiplexer
)
def _run_handler(self, experiment, session_groups, response_format):
experiment_proto = text_format.Merge(experiment, api_pb2.Experiment())
session_groups_proto = text_format.Merge(
session_groups, api_pb2.ListSessionGroupsResponse()
)
num_columns = len(experiment_proto.hparam_infos) + len(
experiment_proto.metric_infos
)
handler = download_data.Handler(
backend_context.Context(self._mock_tb_context),
experiment_proto,
session_groups_proto,
response_format,
[True] * num_columns,
)
return handler.run()
def test_csv(self):
body, mime_type = self._run_handler(
EXPERIMENT, SESSION_GROUPS, download_data.OutputFormat.CSV
)
self.assertEqual("text/csv", mime_type)
self.assertEqual(EXPECTED_CSV, body)
def test_latex(self):
body, mime_type = self._run_handler(
EXPERIMENT, SESSION_GROUPS, download_data.OutputFormat.LATEX
)
self.assertEqual("application/x-latex", mime_type)
self.assertEqual(EXPECTED_LATEX, body)
def test_json(self):
body, mime_type = self._run_handler(
EXPERIMENT, SESSION_GROUPS, download_data.OutputFormat.JSON
)
self.assertEqual("application/json", mime_type)
expected_result = {
"header": [
"initial_temp",
"final_temp",
"string_hparam",
"bool_hparam",
"optional_string_hparam",
"current_temp",
"delta_temp",
"optional_metric",
],
"rows": [
[270.0, 150.0, "a string", True, "", 10.0, 15.0, 33.0],
[280.0, 100.0, "AAAAA", False, "", 51.0, 44.5, No
|
FEniCS/dolfin
|
test/unit/python/mesh/test_manifold_point_search.py
|
Python
|
lgpl-3.0
| 971
| 0
|
#!/usr/bin/env py.test
import pytest
import numpy
from dolfin import *
def test_manifold_point_search():
# Simple two-triangle surface in 3d
vertices = [
(0.0, 0.0, 1.
|
0),
(1.0, 1.0, 1.0),
(1.0, 0.0, 0.0),
(0.0, 1.0, 0.0),
]
cells = [
(0, 1, 2),
(0, 1, 3),
]
mesh = Mesh()
me = MeshEditor()
me.open(mesh, "triangle", 2, 3)
me.init_vertices(len(vertices))
for i, v in enumerate(vertices):
me.add_vertex(i, numpy.array(v, dtype='float'))
me.init_cells(len(cells))
for i, c in enumerate(cells):
me.add_c
|
ell(i, numpy.array(c, dtype='uint'))
me.close()
mesh.init_cell_orientations(Expression(("0.0", "0.0", "1.0"), degree=0))
bb = mesh.bounding_box_tree()
p = Point(2.0/3.0, 1.0/3.0, 2.0/3.0)
assert bb.compute_first_entity_collision(p) == 0
p = Point(1.0/3.0, 2.0/3.0, 2.0/3.0)
assert bb.compute_first_entity_collision(p) == 1
|
epage/Gonvert
|
gonvert/constants.py
|
Python
|
gpl-2.0
| 161
| 0
|
__pretty_app_name__ = "Gonvert"
__app_name__ = "gonvert"
__version__ = "1.1.6"
__build__ = 0
__app_magic__
|
= 0xdeadbeef
PROFILE_STARTUP = False
IS_MAE
|
MO = True
|
GeeteshKhatavkar/gh0st_kernel_samsung_royxx
|
arm-2010.09/arm-none-eabi/lib/armv6-m/libstdc++.a-gdb.py
|
Python
|
gpl-2.0
| 2,346
| 0.00682
|
# -*- python -*-
# Copyright (C) 2009 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the
|
hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/opt/codesourcery/arm-none-eabi/share/gcc-4.5.1/python'
libdir = '/opt/codesour
|
cery/arm-none-eabi/lib/armv6-m'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir in sys.path:
sys.path.insert(0, dir)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
|
jdmonaco/vmo-feedback-model
|
src/remapping/trends.py
|
Python
|
mit
| 9,633
| 0.005294
|
# encoding: utf-8
"""
trends.py -- Analysis of trends in response changes across mismatch angle
Exported namespace: MismatchTrends
Created by Joe Monaco on 2010-02-17.
Copyright (c) 2009-2011 Johns Hopkins University. All rights reserved.
This software is provided AS IS under the terms of the Open Source MIT License.
See http://www.opensource.org/licenses/mit-license.php.
"""
# Library imports
import numpy as np
# Package imports
from . import CL_LABELS, CL_COLORS
from ..core.analysis import BaseAnalysis
from ..tools.stats import smooth_pdf
from ..tools import circstat
class MismatchTrends(BaseAnalysis):
"""
Collate data from multiple MismatchAnalysis results to show trends in
response changes across mismatch angles.
"""
label = "mismatch trends"
def collect_data(self, *args):
"""Collate data from previous results for analysis and visualization
"""
# Sort analysis results data and get list of mismatch angles
self.out('Sorting results data according to mismatch...')
sort_data = []
for session in args:
data = session.results
angle = data['mismatch']
if type(angle) is tuple:
label = ', '.join([str(a) for a in angle])
angle = min(angle)
elif type(angle) is int:
label = str(angle)
else:
raise TypeError, \
'bad mismatch angle type (%s)'%str(type(angle))
sort_data.append((angle, label, data))
sort_data.sort()
mismatch, angle_labels, data_list = np.array(sort_data, 'O').T
self.results['mismatch_labels'] = angle_labels
self.results['N_mismatch'] = N = len(data_list)
# Mean and SEM of rotation angles and peak correlations
self.out('Computing statistics of rotations and correlations...')
self.results['rotations_mean'] = rot_mean = np.empty(N, 'd')
self.results['rotations_sem'] = rot_sem = np.empty(N, 'd')
self.results['correlations_mean'] = corr_mean = np.empty(N, 'd')
self.results['correlations_sem'] = corr_sem = np.empty(N, 'd')
self.results['N_common'] = N_common = np.empty(N, 'd')
for i, data in enumerate(data_list):
rots, corrs = data['rotcorr']
rots = (np.pi/180)*rots
N_common[i] = rots.shape[0]
rot_mean[i] = circstat.mean(rots)
rot_sem[i] = circstat.std(rots) / np.sqrt(N_common[i])
corr_mean[i] = np.mean(corrs)
corr_sem[i] = np.std(corrs) / np.sqrt(N_common[i])
rot_mean[rot_mean>np.pi] -= 2*np.pi # make distal rots negative
rot_mean *= 180/np.pi # convert back to degrees
# Smoothed density
|
distributions
self.out('Computing smoothed density estimates...')
rot_pdf = []
corr_pdf = []
for data in data_list:
rots, corrs = data['rotcorr'].copy()
rots[rots>180] -= 360 # make distal rots negative
rot_pdf.append(smooth_pdf(rots))
corr_pdf.append(smooth_pdf(corrs)
|
)
self.results['rotations_pdf'] = np.array(rot_pdf, 'O')
self.results['correlations_pdf'] = np.array(corr_pdf, 'O')
# Population code rotation via correlation diagonals
self.out('Collating correlation diagonals...')
diags = [data['diags_MIS'] for data in data_list]
self.results['diagonals'] = np.array(diags, 'O')
# Response category distribution
self.out('Collating categorical response distributions...')
self.results['categories'] = categories = {}
N_total = []
for data in data_list:
N_total.append(sum([data['response_tallies'][key] for key in
CL_LABELS]))
for key in CL_LABELS:
categories[key] = \
np.array(
[data_list[i]['response_tallies'][key]/float(N_total[i])
for i in xrange(N)])
self.results['N_total'] = np.array(N_total)
# Good-bye!
self.out('All done!')
def create_plots(self):
"""Create trends plots for rotations, peak correlations and categorical
remapping statistics.
"""
from pylab import figure, subplot, rcParams, draw
from ..tools.images import tiling_dims
self.figure = {}
res = self.results
labels = res['mismatch_labels']
N = res['N_mismatch']
N_total = res['N_total']
diagonals_figsize = 10, 5
rotation_figsize = 13, 9
category_figsize = 10, 7
category_pie_figsize = 10, 10
# Population mismatch correlation diagonals
rcParams['figure.figsize'] = diagonals_figsize
self.figure['diagonals_trends'] = f = figure()
f.set_size_inches(diagonals_figsize)
f.suptitle('Population Mismatch Correlations', fontsize=16)
line_kwargs = dict(lw=2, aa=True)
ax = subplot(111)
ax.hold(True)
for i in xrange(N):
ax.plot(*res['diagonals'][i], label=labels[i], **line_kwargs)
ax.axis('tight')
ax.set_ylim(0,1)
ax.set_xlabel('Rotation (degrees)')
ax.set_ylabel('Diagonal Correlation')
ax.legend(fancybox=True, loc=2)
# Population and cluster rotations and correlations
rcParams['figure.figsize'] = rotation_figsize
self.figure['rotation_trends'] = f = figure()
f.set_size_inches(rotation_figsize)
f.suptitle('Cluster Rotation and Peak Correlation', fontsize=16)
ax = subplot(221)
ax.hold(True)
for i in xrange(N):
ax.plot(*res['rotations_pdf'][i], label=labels[i], **line_kwargs)
ax.set_xlim(-180, 180)
ax.set_xlabel('Rotation (degrees)')
ax.set_ylabel('Pr[Rotation]')
ax.legend(fancybox=True, loc=2)
ax = subplot(223)
ax.hold(True)
for i in xrange(N):
ax.plot(*res['correlations_pdf'][i], label=labels[i], **line_kwargs)
ax.set_xlim(0, 1)
ax.set_xlabel('Peak Correlation')
ax.set_ylabel('Pr[Peak Correlation]')
err_kwargs = dict(fmt='-', ecolor='k', capsize=5, ms=3, elinewidth=1,
lw=2, aa=True)
ax1 = subplot(122)
ax2 = ax1.twinx()
x = np.arange(1, N+1)
rh = ax1.errorbar(x, res['rotations_mean'], yerr=res['rotations_sem'],
c=(0.2, 0.0, 0.8), **err_kwargs)
ch = ax2.errorbar(x, res['correlations_mean'], yerr=res['correlations_sem'],
c=(0.8, 0.0, 0.2), **err_kwargs)
ax2.set_ylabel('Peak Correlation', rotation=270)
ax2.set_ylim(0, 1)
ax1.set_ylabel('Rotation (degrees)')
ax1.set_xlabel('Mismatch')
ax1.set_xlim(0.5, N+0.5)
ax1.set_xticks(x)
ax1.set_xticklabels(labels)
ax1.legend((rh[0], ch[0]), ('Rotation', 'Peak Correlation'),
fancybox=True, loc=0)
# Category distribution stacked bar chart
rcParams['figure.figsize'] = category_figsize
self.figure['response_trends'] = f = figure()
f.set_size_inches(category_figsize)
f.suptitle('Response Changes: Trends', fontsize=16)
plot_category_chart(subplot(111), res)
# Category distribution pie charts
rcParams['figure.figsize'] = category_pie_figsize
self.figure['response_trends_pie'] = f = figure()
f.set_size_inches(category_pie_figsize)
f.suptitle('Response Changes', fontsize=16)
r, c = tiling_dims(N)
pie_kwargs = dict(labels=CL_LABELS, colors=CL_COLORS, autopct='%.1f',
pctdistance=0.8, labeldistance=100)
for i in xrange(N):
ax = subplot(r, c, i+1)
tally = [cats[k][i] for k in CL_LABELS]
ax.pie(tally, **pie_kwargs)
ax.axis('equal')
ax.set_xlim(-1.04, 1.04) # fixes weird edge clipping
ax.set_title('%s (%d total)'%(labels[i], N_total[i]))
if i == 0:
|
czielinski/portfolioopt
|
portfolioopt/portfolioopt.py
|
Python
|
mit
| 9,550
| 0.001152
|
# The MIT License (MIT)
#
# Copyright (c) 2015 Christian Zielinski
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""PortfolioOpt: Financial Portfolio Optimization
This module provides a set of functions for financial portfolio
optimization, such as construction of Markowitz portfolios, minimum
variance portfolios and tangency portfolios (i.e. maximum Sharpe ratio
portfolios) in Python. The construction of long-only, long/short and
market neutral portfolios is supported."""
import numpy as np
import pandas as pd
import cvxopt as opt
import cvxopt.solvers as optsolvers
import warnings
__all__ = ['markowitz_portfolio',
'min_var_portfolio',
'tangency_portfolio',
'max_ret_portfolio',
'truncate_weights']
def markowitz_portfolio(cov_mat, exp_rets, target_ret,
allow_short=False, market_neutral=False):
"""
Computes a Markowitz portfolio.
Parameters
----------
cov_mat: pandas.DataFrame
Covariance matrix of asset returns.
exp_rets: pandas.Series
Expected asset returns (often historical returns).
target_ret: float
Target return of portfolio.
allow_short: bool, optional
If 'False' construct a long-only portfolio.
If 'True' allow shorting, i.e. negative weights.
market_neutral: bool, optional
If 'False' sum of weights equals one.
If 'True' sum of weights equal zero, i.e. create a
market neutral portfolio (implies allow_short=True).
Returns
-------
weights: pandas.Series
Optimal asset weights.
"""
if not isinstance(cov_mat, pd.DataFrame):
raise ValueError("Covariance matrix is not a DataFrame")
if not isinstance(exp_rets, pd.Series):
raise ValueError("Expected returns is not a Series")
if not isinstance(target_ret, float):
raise ValueError("Target return is not a float")
if not cov_mat.index.equals(exp_rets.index):
raise ValueError("Indices do not match")
if market_neutral and not allow_short:
warnings.warn("A market neutral portfolio implies shorting")
allow_short=True
n = len(cov_mat)
P = opt.matrix(cov_mat.values)
q = opt.matrix(0.0, (n, 1))
# Constraints Gx <= h
if not allow_short:
# exp_rets*x >= target_ret and x >= 0
G = opt.matrix(np.vstack((-exp_rets.values,
-np.identity(n))))
h = opt.matrix(np.vstack((-target_ret,
+np.zeros((n, 1)))))
else:
# exp_rets*x >= target_ret
G = opt.matrix(-exp_rets.values).T
h = opt.matrix(-target_ret)
# Constraints Ax = b
# sum(x) = 1
A = opt.matrix(1.0, (1, n))
if not market_neutral:
b = opt.matrix(1.0)
else:
b = opt.matrix(0.0)
# Solve
optsolvers.options['show_progress'] = False
sol = optsolvers.qp(P, q, G, h, A, b)
if sol['status'] != 'optimal':
warnings.warn("Convergence problem")
# Put weights into a labeled series
weights = pd.Series(sol['x'], index=cov_mat.index)
return weights
def min_var_portfolio(cov_mat, allow_short=False):
"""
Computes the minimum variance portfolio.
Note: As the variance is not invariant with respect
to leverage, it is not possible to construct non-trivial
market neutral minimum variance portfolios. This is because
the variance approaches zero with decreasing leverage,
i.e. the market neutral portfolio with minimum variance
is not invested at all.
Parameters
----------
cov_mat: pandas.DataFrame
Covariance matrix of asset returns.
allow_short: bool, optional
If 'False' construct a long-only portfolio.
If 'True' allow shorting, i.e. negative weights.
Returns
-------
weights: pandas.Series
Optimal asset weights.
"""
if not isinstance(cov_mat, pd.DataFrame):
raise ValueError("Covariance matrix is not a DataFrame")
n = len(cov_mat)
P = opt.matrix(cov_mat.values)
q = opt.matrix(0.0, (n, 1))
# Constraints Gx <= h
if not allow_short:
# x >= 0
G = opt.matrix(-np.identity(n))
h = opt.matrix(0.0, (n, 1))
else:
G = None
h = None
# Constraints Ax = b
# sum(x) = 1
A = opt.matrix(1.0, (1, n))
b = opt.matrix(1.0)
# Solve
optsolvers.options['show_progress'] = False
sol = optsolvers.qp(P, q, G, h, A, b)
if sol['status'] != 'optimal':
warnings.warn("Convergence problem")
# Put weights into a labeled series
weights = pd.Series(sol['x'], index=cov_mat.index)
return weights
def tangency_portfolio(cov_mat, exp_rets, allow_short=False):
"""
Computes a tangency portfolio, i.e. a maximum Sharpe ratio portfolio.
Note: As the Sharpe ratio is not invariant with respect
to leverage, it is not possible to construct non-trivial
market neutral tangency portfolios. This is because for
a positive initial Sharpe ratio the sharpe grows unbound
with increasing leverage.
Parameters
----------
cov_mat: pandas.DataFrame
Covariance matrix of asset returns.
exp_rets: pandas.Series
Expected asset returns (often historical returns).
allow_short: bool, optional
If 'False' construct a long-only portfolio.
If 'True' allow shorting, i.e. negative weights.
Returns
-------
weights: pandas.Series
Optimal asset weights.
"""
if not isinstance(cov_mat, pd.DataFrame):
raise ValueError("Covariance matrix is not a DataFrame")
if not isinstance(exp_rets, pd.Series):
raise ValueError("Expected returns is not a Series")
if not cov_mat.index.equals(exp_rets.index):
raise ValueError("Indices do not match")
n = len(cov_mat)
P = opt.matrix(cov_mat.values)
q = opt.matrix(0.0, (n, 1))
# Constraints Gx <= h
if not allow_short:
# exp_rets*x >= 1 and x >= 0
G = opt.matrix(np.vstack((-exp_rets.values,
|
-np.identity(n))))
h = opt.matrix(np.vstack((-1.0,
np.zeros((n, 1)))))
else:
# exp_rets*x >= 1
G = opt.matrix(-exp_rets.values).T
h = opt.matrix(-1.0)
# Solve
optsolvers.options['show_progress'] = False
sol = optsolvers.qp(P, q, G, h)
if sol['status'] != 'optimal':
warnings.warn("Convergence problem")
# Put weights i
|
nto a labeled series
weights = pd.Series(sol['x'], index=cov_mat.index)
# Rescale weights, so that sum(weights) = 1
weights /= weights.sum()
return weights
def max_ret_portfolio(exp_rets):
"""
Computes a long-only maximum return portfolio, i.e. selects
the assets with maximal return. If there is more than one
asset with maximal return, equally weight all of them.
Parameters
----------
exp_rets: pandas.Series
Expected asset returns (often historical returns).
Returns
-------
weights: pandas.Series
Optimal as
|
arbn/pysaml2
|
tests/sp_2_conf.py
|
Python
|
bsd-2-clause
| 1,571
| 0.007638
|
from pathutils import full_path
CONFIG = {
"entityid" : "urn:mace:example.com:saml:roland:sp",
"name" : "urn:mace:example.com:saml:roland:sp",
"description": "My own SP",
"service": {
"sp": {
|
"endpoints":{
"assertion_consumer_service": ["http://lingon.catalogix.se:8087/"],
},
"required_attributes": ["surName", "givenName", "mail"],
"optional_attribu
|
tes": ["title"],
"idp": ["urn:mace:example.com:saml:roland:idp"],
}
},
"debug" : 1,
"key_file" : full_path("test.key"),
"cert_file" : full_path("test.pem"),
"xmlsec_binary" : None,
"metadata": {
"local": [full_path("idp_2.xml")],
},
"virtual_organization" : {
"urn:mace:example.com:it:tek":{
"nameid_format" : "urn:oid:1.3.6.1.4.1.1466.115.121.1.15-NameID",
"common_identifier": "umuselin",
}
},
"subject_data": full_path("subject_data.db"),
"accepted_time_diff": 60,
"attribute_map_dir" : full_path("attributemaps"),
"organization": {
"name": ("AB Exempel", "se"),
"display_name": ("AB Exempel", "se"),
"url": "http://www.example.org",
},
"contact_person": [{
"given_name": "Roland",
"sur_name": "Hedberg",
"telephone_number": "+46 70 100 0000",
"email_address": ["tech@eample.com", "tech@example.org"],
"contact_type": "technical"
},
],
"secret": "0123456789",
"only_use_keys_in_metadata": True
}
|
darkonie/dcos
|
dcos_installer/test_backend.py
|
Python
|
apache-2.0
| 12,196
| 0.002378
|
import json
import os
import subprocess
import uuid
import passlib.hash
import pytest
import gen
import gen.build_deploy.aws
import release
from dcos_installer import backend
from dcos_installer.config import Config, make_default_config_if_needed, to_config
os.environ["BOOTSTRAP_ID"] = "12345"
@pytest.fixture(scope='module')
def config():
if not os.path.exists('dcos-release.config.yaml'):
pytest.skip("Skipping because there is no configuration in dcos-release.config.yaml")
return release.load_config('dcos-release.config.yaml')
@pytest.fixture(scope='module')
def config_testing(config):
if 'testing' not in config:
pytest.skip("Skipped because there is no `testing` configuration in dcos-release.config.yaml")
return config['testing']
@pytest.fixture(scope='module')
def config_aws(config_testing):
if 'aws' not in config_testing:
pytest.skip("Skipped because there is no `testing.aws` configuration in dcos-release.config.yaml")
return config_testing['aws']
def test_password_hash():
"""Tests that the password hashing method creates de-cryptable hash
"""
password = 'DcosTestingPassword!@#'
# only reads from STDOUT
hash_pw = subprocess.check_output(['dcos_installer', '--hash-password', password])
print(hash_pw)
hash_pw = hash_pw.decode('ascii').strip('\n')
assert passlib.hash.sha512_crypt.verify(password, hash_pw), 'Hash does not match password'
def test_set_superuser_password(tmpdir):
"""Test that --set-superuser-hash works"""
with tmpdir.as_cwd():
tmpdir.join('gen
|
conf').ensure(dir=True)
# TODO(cmaloney): Add test
|
s for the behavior around a non-existent config.yaml
# Setting in a non-empty config.yaml which has no password set
make_default_config_if_needed('genconf/config.yaml')
assert 'superuser_password_hash' not in Config('genconf/config.yaml').config
# Set the password
create_fake_build_artifacts(tmpdir)
subprocess.check_call(['dcos_installer', '--set-superuser-password', 'foo'], cwd=str(tmpdir))
# Check that config.yaml has the password set
config = Config('genconf/config.yaml')
assert passlib.hash.sha512_crypt.verify('foo', config['superuser_password_hash'])
def test_generate_node_upgrade_script(tmpdir, monkeypatch):
upgrade_config = """
---
# The name of your DC/OS cluster. Visable in the DC/OS user interface.
cluster_name: 'DC/OS'
master_discovery: static
exhibitor_storage_backend: 'static'
resolvers:
- 8.8.8.8
- 8.8.4.4
ssh_port: 22
process_timeout: 10000
bootstrap_url: file:///opt/dcos_install_tmp
master_list: ['10.0.0.1', '10.0.0.2', '10.0.0.5']
"""
monkeypatch.setenv('BOOTSTRAP_VARIANT', '')
create_config(upgrade_config, tmpdir)
create_fake_build_artifacts(tmpdir)
output = subprocess.check_output(['dcos_installer', '--generate-node-upgrade-script', 'fake'], cwd=str(tmpdir))
assert output.decode('utf-8').splitlines()[-1].split("Node upgrade script URL: ", 1)[1]\
.endswith("dcos_node_upgrade.sh")
try:
subprocess.check_output(['dcos_installer', '--generate-node-upgrade-script'], cwd=str(tmpdir))
except subprocess.CalledProcessError as e:
print(e.output)
assert e.output.decode('ascii') == "Must provide the version of the cluster upgrading from\n"
else:
raise Exception("Test passed, this should not pass without specifying a version number")
def test_version(monkeypatch):
monkeypatch.setenv('BOOTSTRAP_VARIANT', 'some-variant')
version_data = subprocess.check_output(['dcos_installer', '--version']).decode()
assert json.loads(version_data) == {
'version': '1.9-dev',
'variant': 'some-variant'
}
def test_good_create_config_from_post(tmpdir):
"""
Test that it creates the config
"""
# Create a temp config
workspace = tmpdir.strpath
temp_config_path = workspace + '/config.yaml'
make_default_config_if_needed(temp_config_path)
temp_ip_detect_path = workspace + '/ip-detect'
f = open(temp_ip_detect_path, "w")
f.write("#/bin/bash foo")
good_post_data = {
"agent_list": ["10.0.0.2"],
"master_list": ["10.0.0.1"],
"cluster_name": "Good Test",
"resolvers": ["4.4.4.4"],
"ip_detect_filename": temp_ip_detect_path
}
expected_good_messages = {}
create_fake_build_artifacts(tmpdir)
with tmpdir.as_cwd():
messages = backend.create_config_from_post(
post_data=good_post_data,
config_path=temp_config_path)
assert messages == expected_good_messages
def test_bad_create_config_from_post(tmpdir):
# Create a temp config
workspace = tmpdir.strpath
temp_config_path = workspace + '/config.yaml'
make_default_config_if_needed(temp_config_path)
bad_post_data = {
"agent_list": "foo",
"master_list": ["foo"],
}
expected_bad_messages = {
"agent_list": "Must be a JSON formatted list, but couldn't be parsed the given value `foo` as "
"one because of: Expecting value: line 1 column 1 (char 0)",
"master_list": 'Invalid IPv4 addresses in list: foo',
}
create_fake_build_artifacts(tmpdir)
with tmpdir.as_cwd():
messages = backend.create_config_from_post(
post_data=bad_post_data,
config_path=temp_config_path)
assert messages == expected_bad_messages
def test_do_validate_config(tmpdir, monkeypatch):
monkeypatch.setenv('BOOTSTRAP_VARIANT', 'test_variant')
# Create a temp config
genconf_dir = tmpdir.join('genconf')
genconf_dir.ensure(dir=True)
temp_config_path = str(genconf_dir.join('config.yaml'))
# Initialize with defautls
make_default_config_if_needed(temp_config_path)
create_fake_build_artifacts(tmpdir)
expected_output = {
'ip_detect_contents': 'ip-detect script `genconf/ip-detect` must exist',
'ssh_user': 'Must set ssh_user, no way to calculate value.',
'master_list': 'Must set master_list, no way to calculate value.',
'ssh_key_path': 'could not find ssh private key: genconf/ssh_key'
}
with tmpdir.as_cwd():
assert Config(config_path='genconf/config.yaml').do_validate(include_ssh=True) == expected_output
def test_get_config(tmpdir):
workspace = tmpdir.strpath
temp_config_path = workspace + '/config.yaml'
expected_data = {
'cluster_name': 'DC/OS',
'master_discovery': 'static',
'exhibitor_storage_backend': 'static',
'resolvers': ['8.8.8.8', '8.8.4.4'],
'ssh_port': 22,
'process_timeout': 10000,
'bootstrap_url': 'file:///opt/dcos_install_tmp'
}
make_default_config_if_needed(temp_config_path)
config = Config(temp_config_path)
assert expected_data == config.config
def test_determine_config_type(tmpdir):
# Ensure the default created config is of simple type
workspace = tmpdir.strpath
temp_config_path = workspace + '/config.yaml'
make_default_config_if_needed(temp_config_path)
got_output = backend.determine_config_type(config_path=temp_config_path)
expected_output = {
'message': '',
'type': 'minimal',
}
assert got_output == expected_output
def test_success():
mock_config = to_config({
'master_list': ['10.0.0.1', '10.0.0.2', '10.0.0.5'],
'agent_list': ['10.0.0.3', '10.0.0.4']
})
expected_output = {
"success": "http://10.0.0.1",
"master_count": 3,
"agent_count": 2
}
expected_output_bad = {
"success": "",
"master_count": 0,
"agent_count": 0
}
got_output, code = backend.success(mock_config)
mock_config.update({'master_list': '', 'agent_list': ''})
bad_out, bad_code = backend.success(mock_config)
assert got_output == expected_output
assert code == 200
assert bad_out == expected_output_bad
assert bad_code == 400
def test_accept_overrides_for_undefined_config_params(tmpdir):
temp_config_path = tmpdir.strpath + '/config.yaml'
par
|
darkwing/kuma
|
kuma/users/tests/test_templates.py
|
Python
|
mpl-2.0
| 13,321
| 0.00015
|
import requests_mock
from django.conf import settings
from jingo.helpers import urlparams
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from waffle.models import Flag
from kuma.core.urlresolvers import reverse
from . import UserTestCase
from .test_views import TESTUSER_PASSWORD
def add_persona_verify_response(mock_requests, data):
mock_requests.post(
settings.PERSONA_VERIFIER_URL,
json=data,
headers={
'content_type': 'application/json',
}
)
@requests_mock.mock()
class SignupTests(UserTestCase):
localizing_client = False
def test_signup_page(self, mock_requests):
add_persona_verify_response(mock_requests, {
'status': 'okay',
'email': 'newuser@test.com',
'audience': 'https://developer-local.allizom.org',
})
url = reverse('persona_login')
response = self.client.post(url, follow=True)
self.assertNotContains(response, 'Sign In Failure')
test_strings = ['Create your MDN profile to continue',
'choose a username',
'having trouble',
'I agree',
'to Mozilla',
'Terms',
'Privacy Notice']
for test_string in test_strings:
self.assertContains(response, test_string)
def test_signup_page_disabled(self, mock_requests):
add_persona_verify_response(mock_requests, {
'status': 'okay',
'email': 'newuser@test.com',
'audience': 'https://developer-local.allizom.org',
})
url = reverse('persona_login')
registration_disabled = Flag.objects.create(
name='registration_disabled',
everyone=True
)
response = self.client.post(url, follow=True)
self.assertNotContains(response, 'Sign In Failure')
self.assertContains(response, 'Profile Creation Disabled')
# re-enable registration
registration_disabled.everyone = False
registration_disabled.save()
response = self.client.post(url, follow=True)
test_strings = ['Create your MDN profile to continue',
'choose a username',
'having trouble']
for test_string in test_strings:
self.assertContains(response, test_string)
class AccountEmailTests(UserTestCase):
localizing_client = True
def test_account_email_page_requires_signin(self):
url = reverse('account_email')
response = self.client.get(url, follow=True)
self.assertContains(response, 'Please sign in')
ok_(len(response.redirect_chain) > 0)
def test_account_email_page_single_email(self):
u = self.user_model.objects.get(username='testuser')
self.client.login(username=u.username, password=TESTUSER_PASSWORD)
url = reverse('account_email')
response = self.client.get(url)
self.assertContains(response, 'is your <em>primary</em> email address')
for test_string in ['Make Primary',
'Re-send Confirmation',
'Remove']:
self.assertNotContains(response, test_string)
def test_account_email_page_multiple_emails(self):
u = self.user_model.objects.get(username='testuser2')
self.client.login(username=u.username, password=TESTUSER_PASSWORD)
url = reverse('account_email')
response = self.client.get(url)
for test_string in ['Make Primary',
'Re-send Confirmation',
'Remove',
'Add Email',
'Edit profile']:
self.assertContains(response, test_string)
class SocialAccountConnectionsTests(UserTestCase):
localizing_client = True
def test_account_connections_page_requires_signin(self):
url = reverse('socialaccount_connections')
response = self.client.get(url, follow=True)
self.assertContains(response, 'Please sign in')
ok_(len(response.redirect_chain) > 0)
def test_account_connections_page(self):
u = self.user_model.objects.get(username='testuser')
self.client.login(username=u.username, password=TESTUSER_PASSWORD)
url = reverse('socialaccount_connections')
response = self.client.get(url)
for test_string in ['Disconnect', 'Connect a new account',
'Edit profile', 'Connect with']:
self.assertContains(response, test_string)
class AllauthPersonaTestCase(UserTestCase):
existing_persona_email = 'testuser@test.com'
existing_persona_username = 'testuser'
localizing_client = False
@requests_mock.mock()
def test_persona_auth_failure_copy(self, mock_requests):
"""
The explanatory page for failed Persona auth contains the
failure copy, and does not contain success messages or a form
to choose a username.
"""
add_persona_verify_response(mock_requests, {
'status': 'failure',
'reason': 'this email address has been naughty'
})
response = self.client.post(reverse('persona_login'), follow=True)
for expected_string in ('Account Sign In Failure',
'An error occurred while attempting to sign '
'in with your account.'):
self.assertContains(response, expected_string)
for unexpected_string in (
'Thanks for signing in to MDN with Persona.',
('<form class="submission readable-line-length" method="post" '
'action="/en-US/users/account/signup">'),
('<input name="username" maxlength="30" type="text"'
' autofocus="autofocus" required="required" '
'placeholder="Username" id="id_username" />'),
'<input type="hidden" name="email" value="',
'" id="id_email" />'):
self.assertNotContains(response, unexpected_string)
@requests_mock.mock()
def test_persona_auth_success_copy(self, mock_requests):
"""
Successful Persona auth of a new user displays a success
message and the Persona-specific signup form, correctly
populated, and does not display the failure copy.
"""
persona_signup_email = 'templates_persona_auth_copy@example.com'
add_persona_verify_response(mock_requests, {
'status': 'okay',
'email': persona_signup_email,
})
response = self.client.post(reverse('persona_login'),
follow=True)
for expected_string in (
# Test that we got:
#
# * Persona sign-in success message
#
# * Form with action set to the account-signup URL.
#
|
# * Username field, blank
#
# * Hidden email address field, pre-populated with the
# address used to authentic
|
ate to Persona.
'Thanks for signing in to MDN with Persona.',
('<form class="submission readable-line-length" method="post" '
'action="/en-US/users/account/signup">'),
('<input autofocus="autofocus" id="id_username" '
'maxlength="30" name="username" placeholder="Username" '
'required="required" type="text" />'),
('<input id="id_email" name="email" type="hidden" '
'value="%s" />' % persona_signup_email)):
self.assertContains(response, expected_string)
for unexpected_string in (
'<Account Sign In Failure',
'<An error occurred while attempting to sign '
'in with your account.'):
self.assertNotContains(response, unexpected_string)
@requests_mock.mock()
def test_persona_signin_copy(self, mock_requests):
"""
After an existing user successfully authenticates with
Persona, their username, an indication that Persona was used
|
vdloo/raptiformica
|
tests/unit/raptiformica/shell/consul/test_ensure_latest_consul_release.py
|
Python
|
mit
| 2,741
| 0.002189
|
from raptiformica.settings import conf
from raptiformica.shell.consul import ensure_latest_consul_release
from tests.testcase import TestCase
class TestEnsureLatestConsulRelease(TestCase):
def setUp(self):
self.log = self.set_up_patch('raptiformica.shell.consul.log')
self.execute_process = self.set_up_patch(
'raptiformica.shell.execute.execute_process'
)
self.remove = self.set_up_patch(
'raptiformica.shell.consul.remove'
)
self.process_output = (0, 'standard out output', '')
self.execute_process.return_value = self.process_output
def test_ensure_latest_consul_release_logs_ensuring_latest_consul_release_message(self):
ensure_latest_consul_release('1.2.3.4', port=2222)
self.assertTrue(self.log.info.called)
def test_ensure_latest_consul_release_removes_previously_existing_zip(self):
ensure_latest_consul_release('1.2.3.4', port=2222)
if conf().MACHINE_ARCH == 'armv7l':
consul_zip = 'consul_1.0.2_linux_arm.zip'
else:
consul_zip = 'consul_1.0.2_linux_amd64.zip'
self.remove.assert_called_once_with(
consul_zip
)
def test_ensure_latest_consul_release_ignores_no_previously_existing
|
_zip(self):
self.remove.side_effect = FileNotFoundError
# Does not raise FileNotFoundError
ensure_latest_consul_release('1.2.3.4', port=2222)
def test_ensure_latest_consul_release_downloads_latest_consul_release_with_no_clobber(self):
ensure_latest_consul_release('1.2.3.4', port=2222)
if conf().MACHINE_A
|
RCH == 'armv7l':
consul_zip = 'consul_1.0.2_linux_arm.zip'
else:
consul_zip = 'consul_1.0.2_linux_amd64.zip'
expected_binary_command = [
'/usr/bin/env', 'ssh', '-A',
'-o', 'ConnectTimeout=5',
'-o', 'StrictHostKeyChecking=no',
'-o', 'ServerAliveInterval=10',
'-o', 'ServerAliveCountMax=3',
'-o', 'UserKnownHostsFile=/dev/null',
'-o', 'PasswordAuthentication=no',
'root@1.2.3.4',
'-p', '2222', 'wget', '-4', '-nc',
'https://releases.hashicorp.com/consul/1.0.2/{}'.format(
consul_zip
)
]
self.execute_process.assert_called_once_with(
expected_binary_command, buffered=False, shell=False, timeout=15
)
def test_ensure_latest_consul_release_raises_error_when_ensuring_latest_release_fails(self):
self.execute_process.return_value = (1, 'standard out output', '')
with self.assertRaises(RuntimeError):
ensure_latest_consul_release('1.2.3.4', port=2222)
|
AntonSax/plantcv
|
plantcv/invert.py
|
Python
|
mit
| 794
| 0.001259
|
# Invert gray image
import cv2
from . import print_image
from . import plot_image
def invert(img, device, debug=None):
"""Inverts grayscale images.
Inputs:
img = image object, grayscale
device = device number. Used to count steps in the pipeline
debug =
|
None, print, or plot. Print = save to file, Plot = print to screen.
Returns:
device = device number
img_inv = inverted image
:param img: numpy array
:param device: int
:param debug: str
:return device: int
:return img_inv: numpy array
"""
device += 1
img_inv = cv2.bitwise_not(img)
if debug == 'print':
print_image(img_inv, (str(device) + '_invert.png'))
elif debug == 'plot':
plot_ima
|
ge(img_inv, cmap='gray')
return device, img_inv
|
mitsuhiko/flask
|
src/flask/app.py
|
Python
|
bsd-3-clause
| 82,515
| 0.000267
|
import functools
import inspect
import logging
import os
import sys
import typing as t
import weakref
from datetime import timedelta
from itertools import chain
from threading import Lock
from types import TracebackType
from werkzeug.datastructures import Headers
from werkzeug.datastructures import ImmutableDict
from werkzeug.exceptions import BadRequest
from werkzeug.exceptions import BadRequestKeyError
from werkzeug.exceptions import HTTPException
from werkzeug.exceptions import InternalServerError
from werkzeug.local import ContextVar
from werkzeug.routing import BuildError
from werkzeug.routing import Map
from werkzeug.routing import MapAdapter
from werkzeug.routing import RequestRedirect
from werkzeug.routing import RoutingException
from werkzeug.routing import Rule
from werkzeug.wrappers import Response as BaseResponse
from . import cli
from . import json
from .config import Config
from .config import ConfigAttribute
from .ctx import _AppCtxGlobals
from .ctx import AppContext
from .ctx import RequestContext
from .globals import _request_ctx_stack
from .globals import g
from .globals import request
from .globals import session
from .helpers import _split_blueprint_path
from .helpers import get_debug_flag
from .helpers import get_env
from .helpers import get_flashed_messages
from .helpers import get_load_dotenv
from .helpers import locked_cached_property
from .helpers import url_for
from .json import jsonify
from .logging import create_logger
from .scaffold import _endpoint_from_view_func
from .scaffold import _sentinel
from .scaffold import find_package
from .scaffold import Scaffold
from .scaffold import setupmethod
from .sessions import SecureCookieSessionInterface
from .signals import appcontext_tearing_down
from .signals import got_request_exception
from .signals import request_finished
from .signals import request_started
from .signals import request_tearing_down
from .templating import DispatchingJinjaLoader
from .templating import Environment
from .typing import AfterRequestCallable
from .typing import BeforeFirstRequestCallable
from .typing import BeforeRequestCallable
from .typing import ResponseReturnValue
from .typing import TeardownCallable
from .typing import TemplateContextProcessorCallable
from .typing import TemplateFilterCallable
from .typing import TemplateGlobalCallable
from .typing import TemplateTestCallable
from .typing import URLDefaultCallable
from .typing import URLValuePreprocessorCallable
from .wrappers import Request
from .wrappers import Response
if t.TYPE_CHECKING:
import typing_extensions as te
from .blueprints import Blueprint
from .testing import FlaskClient
from .testing import FlaskCliRunner
from .typing import ErrorHandlerCallable
if sys.version_info >= (3, 8):
iscoroutinefunction = inspect.iscoroutinefunction
else:
def iscoroutinefunction(func: t.Any) -> bool:
while inspect.ismethod(func):
func = func.__func__
while isinstance(func, functools.partial):
func = func.func
return inspect.iscoroutinefunction(func)
def _make_timedelta(value: t.Optional[timedelta]) -> t.Optional[timedelta]:
if value is None or isinstance(value, timedelta):
return value
return timedelta(seconds=value)
class Flask(Scaffold):
"""The flask object implements a WSGI application and acts as the central
object. It is passed the name of the module or package of the
application. Once it is created it will act as a central registry for
the view functions, the URL rules, template configuration and much more.
The name of the package is used to resolve resources from inside the
package or the folder the module is contained in depending on if the
package parameter resolves to an actual python package (a folder with
an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file).
For more information about resource loading, see :func:`open_resource`.
Usually you create a :class:`Flask` instance in your main module or
in the :file:`__init__.py` file of your package like this::
from flask import Flask
app = Flask(__name__)
.. admonition:: About the First Parameter
The idea of the first parameter is to give Flask an idea of what
belongs to your application. This name is used to find resources
on the filesystem, can be used by extensions to improve debugging
information and a lot more.
So it's important what you provide there. If you are using a single
module, `__name__` is always the correct value. If you however are
using a package, it's usually recommended to hardcode the name of
your package there.
For example if your application is defined in :file:`yourapplication/app.py`
you should create it with one of the two versions below::
app = Flask('yourapplication')
app = Flask(__name__.split('.')[0])
Why is that? The application will work even with `__name__`, thanks
to how resources are looked up. However it will make debugging more
painful. Certain extensions can make assumptions based on the
import name of your application. For example the Flask-SQLAlchemy
extension will look for the code in your application that triggered
an SQL query in debug mode. If the import name is not properly set
up, that debugging information is lost. (For example it would only
pick up SQL queries in `yourapplication.app` and not
`yourapplication.views.frontend`)
.. versionadded:: 0.7
The `static_url_path`, `static_folder`, and `template_folder`
parameters were added.
.. versionadded:: 0.8
The `instance_path` and `instance_relative_config` parameters were
added.
.. versionadded:: 0.11
The `root_path` parameter was added.
.. versionadded:
|
: 1.0
The ``host_matching`` and ``static_host`` parameters were added.
.. versionadded:: 1.0
The ``subdomain_matching`` parameter was added. Subdomain
matching needs to be enabled manually now. Setting
:data:`SERVER_NAME` does not implicitly enable it.
|
:param import_name: the name of the application package
:param static_url_path: can be used to specify a different path for the
static files on the web. Defaults to the name
of the `static_folder` folder.
:param static_folder: The folder with static files that is served at
``static_url_path``. Relative to the application ``root_path``
or an absolute path. Defaults to ``'static'``.
:param static_host: the host to use when adding the static route.
Defaults to None. Required when using ``host_matching=True``
with a ``static_folder`` configured.
:param host_matching: set ``url_map.host_matching`` attribute.
Defaults to False.
:param subdomain_matching: consider the subdomain relative to
:data:`SERVER_NAME` when matching routes. Defaults to False.
:param template_folder: the folder that contains the templates that should
be used by the application. Defaults to
``'templates'`` folder in the root path of the
application.
:param instance_path: An alternative instance path for the application.
By default the folder ``'instance'`` next to the
package or module is assumed to be the instance
path.
:param instance_relative_config: if set to ``True`` relative filenames
for loading the config are assumed to
be relative to the instance path instead
of the application root.
:param root_path: The path to the root of the application files.
This should only be set manually when it can't be detected
automatically, such as for namespace packages.
"""
#: The class that is used for request objec
|
fredokun/TikZ-Editor
|
setup.py
|
Python
|
gpl-2.0
| 1,449
| 0.031056
|
#!/usr/bin/env python
# Copyright 2012 (C) Mickael Menu <mickael.menu@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this progr
|
am. If not, see <http://www.gnu.org/licenses/>.
# automatically downloads setuptools if needed
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup, find_packages
import tikz_editor.globals as globals
setup(
name = globals.APPLICATION_NAME,
version = globals.VERSION,
packages = find_packages(),
include_package_data = True,
package_data = {'': ['*.png', '*.html']},
# metadatas
author
|
= globals.AUTHORS,
author_email = globals.EMAIL,
description = globals.APPLICATION_DESCRIPTION,
license = "GPL v2",
keywords = "tikz code editor latex preview",
url = globals.WEBSITE,
# auto-creates a GUI Python script to launch the application
entry_points = {'gui_scripts': ['tikz-editor = tikz_editor:start']}
)
|
pmdarrow/locust
|
locust/test/test_web.py
|
Python
|
mit
| 3,957
| 0.009351
|
import csv
import json
import sys
import traceback
from six.moves import StringIO
import requests
import mock
import gevent
from gevent import wsgi
from locust import web, runners, stats
from locust.runners import LocustRunner
from locust.main import parse_options
from .testcases import LocustTestCase
class TestWebUI(LocustTestCase):
def setUp(self):
super(TestWebUI, self).setUp()
stats.global_stats.clear_all()
parser = parse_options()[0]
options = parser.parse_args([])[0]
runners.locust_runner = LocustRunner([], options)
self._web_ui_server = wsgi.WSGIServer(('127.0.0.1', 0), web.app, log=None)
gevent.spawn(lambda: self._web_ui_server.serve_forever())
gevent.sleep(0.01)
self.web_port = self._web_ui_server.server_port
def tearDown(self):
super(TestWebUI, self).tearDown()
self._web_ui_server.stop()
def test_index(self):
self.assertEqual(200, requests.get("http://127.0.0.1:%i/" % self.web_port).status_code)
def test_stats_no_data(self):
self.assertEqual(200, requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).status_code)
def test_stats(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(2, len(data["stats"])) # one entry plus Total
self.assertEqual("/test", data["stats"][0]["name"])
self.assertEqual("GET", data["stats"][0]["method"])
self.assertEqual(120, data["stats"][0]["avg_response_time"])
def test_stats_cache(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port)
self.assertEqual(200, response.status_code)
data = json.loads(response.text)
self.assertEqual(2, len(data["stats"])) # one entry plus Total
# add another entry
stats.global_stats.get("/test2", "GET").log(120, 5612)
data = json.loads(requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).text)
self.assertEqual(2, len(data["stats"])) # old value should be cached now
web.request_stats.clear_cache()
data = json.loads(requests.get("http://127.0.0.1:%i/stats/requests" % self.web_port).text)
self.assertEqual(3, len(data["stats"])) # this should no longer be cached
def test_request_stats_csv(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/requests/
|
csv" % self.web_port)
self.assertEqual(200, response.status_code)
def test_distribution_stats_csv(self):
stats.global_stats.get("/test", "GET").log(120, 5612)
response = requests.get("http://127.0.0.1:%i/stats/distribution/csv" % self.web_port)
self.assertEqual(200, response.status_code)
def test_exceptions_csv(self):
try:
raise Exception("Test exception")
except Exception as e:
tb =
|
sys.exc_info()[2]
runners.locust_runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
runners.locust_runner.log_exception("local", str(e), "".join(traceback.format_tb(tb)))
response = requests.get("http://127.0.0.1:%i/exceptions/csv" % self.web_port)
self.assertEqual(200, response.status_code)
reader = csv.reader(StringIO(response.text))
rows = []
for row in reader:
rows.append(row)
self.assertEqual(2, len(rows))
self.assertEqual("Test exception", rows[1][1])
self.assertEqual(2, int(rows[1][0]), "Exception count should be 2")
|
kartikshah1/Test
|
user_profile/countries.py
|
Python
|
mit
| 6,689
| 0
|
from model_utils import Choices
COUNTRIES = Choices(
('AF', 'Afghanistan'),
('AX', 'Aland Islands'),
('AL', 'Albania'),
('DZ', 'Algeria'),
('AS', 'American Samoa'),
('AD', 'Andorra'),
('AO', 'Angola'),
('AI', 'Anguilla'),
('AQ', 'Antarctica'),
('AG', 'Antigua and Barbuda'),
('AR', 'Argentina'),
('AM', 'Armenia'),
('AW', 'Aruba'),
('AU', 'Australia'),
('AT', 'Austria'),
('AZ', 'Azerbaijan'),
('BS', 'Bahamas'),
('BH', 'Bahrain'),
('BD', 'Bangladesh'),
('BB', 'Barbados'),
('BY', 'Belarus'),
('BE', 'Belgium'),
('BZ', 'Belize'),
('BJ', 'Benin'),
('BM', 'Bermuda'),
('BT', 'Bhutan'),
('BO', 'Bolivia'),
('BA', 'Bosnia and Herzegovina'),
('BW', 'Botswana'),
('BV', 'Bouvet Island'),
('BR', 'Brazil'),
('IO', 'British Indian Ocean Territory'),
('BN', 'Brunei Darussalam'),
('BG', 'Bulgaria'),
('BF', 'Burkina Faso'),
('BI', 'Burundi'),
('KH', 'Cambodia'),
('CM', 'Cameroon'),
('CA', 'Canada'),
('CV', 'Cape Verde'),
('KY', 'Cayman Islands'),
('CF', 'Central African Republic'),
('TD', 'Chad'),
('CL', 'Chile'),
('CN', 'China'),
('CX', 'Christmas Island'),
('CC', 'Cocos (Keeling) Islands'),
('CO', 'Colombia'),
('KM', 'Comoros'),
('CG', 'Congo'),
('CD', 'Congo, The Democratic Republic of the'),
('CK', 'Cook Islands'),
('CR', 'Costa Rica'),
('CI', 'Cote d\'Ivoire'),
('HR', 'Croatia'),
('CU', 'Cuba'),
('CY', 'Cyprus'),
('CZ', 'Czech Republic'),
('DK', 'Denmark'),
('DJ', 'Djibouti'),
('DM', 'Dominica'),
('DO', 'Dominican Republic'),
|
('EC', 'Ecuador'),
('EG', 'Egypt'),
('SV', 'El Salvador'),
('GQ', 'Equatorial Guinea'),
('ER', 'Eritrea'),
('EE', 'Estonia'),
('ET', 'Ethiopia'),
('FK', 'Falkland Islands (Malvinas)'),
('FO', 'Faroe Islands'),
('FJ', 'Fiji'),
('FI', 'Finland'),
('FR', 'France'),
('GF', 'French Guiana'),
('PF', 'French Polynesia'),
|
('TF', 'French Southern Territories'),
('GA', 'Gabon'),
('GM', 'Gambia'),
('GE', 'Georgia'),
('DE', 'Germany'),
('GH', 'Ghana'),
('GI', 'Gibraltar'),
('GR', 'Greece'),
('GL', 'Greenland'),
('GD', 'Grenada'),
('GP', 'Guadeloupe'),
('GU', 'Guam'),
('GT', 'Guatemala'),
('GG', 'Guernsey'),
('GN', 'Guinea'),
('GW', 'Guinea-Bissau'),
('GY', 'Guyana'),
('HT', 'Haiti'),
('HM', 'Heard Island and McDonald Islands'),
('VA', 'Holy See (Vatican City State)'),
('HN', 'Honduras'),
('HK', 'Hong Kong'),
('HU', 'Hungary'),
('IS', 'Iceland'),
('IN', 'India'),
('ID', 'Indonesia'),
('IR', 'Iran, Islamic Republic of'),
('IQ', 'Iraq'),
('IE', 'Ireland'),
('IM', 'Isle of Man'),
('IL', 'Israel'),
('IT', 'Italy'),
('JM', 'Jamaica'),
('JP', 'Japan'),
('JE', 'Jersey'),
('JO', 'Jordan'),
('KZ', 'Kazakhstan'),
('KE', 'Kenya'),
('KI', 'Kiribati'),
('KP', 'Korea, Democratic People\'s Republic of'),
('KR', 'Korea, Republic of'),
('KW', 'Kuwait'),
('KG', 'Kyrgyzstan'),
('LA', 'Lao People\'s Democratic Republic'),
('LV', 'Latvia'),
('LB', 'Lebanon'),
('LS', 'Lesotho'),
('LR', 'Liberia'),
('LY', 'Libyan Arab Jamahiriya'),
('LI', 'Liechtenstein'),
('LT', 'Lithuania'),
('LU', 'Luxembourg'),
('MO', 'Macao'),
('MK', 'Macedonia, The Former Yugoslav Republic of'),
('MG', 'Madagascar'),
('MW', 'Malawi'),
('MY', 'Malaysia'),
('MV', 'Maldives'),
('ML', 'Mali'),
('MT', 'Malta'),
('MH', 'Marshall Islands'),
('MQ', 'Martinique'),
('MR', 'Mauritania'),
('MU', 'Mauritius'),
('YT', 'Mayotte'),
('MX', 'Mexico'),
('FM', 'Micronesia, Federated States of'),
('MD', 'Moldova'),
('MC', 'Monaco'),
('MN', 'Mongolia'),
('ME', 'Montenegro'),
('MS', 'Montserrat'),
('MA', 'Morocco'),
('MZ', 'Mozambique'),
('MM', 'Myanmar'),
('NA', 'Namibia'),
('NR', 'Nauru'),
('NP', 'Nepal'),
('NL', 'Netherlands'),
('AN', 'Netherlands Antilles'),
('NC', 'New Caledonia'),
('NZ', 'New Zealand'),
('NI', 'Nicaragua'),
('NE', 'Niger'),
('NG', 'Nigeria'),
('NU', 'Niue'),
('NF', 'Norfolk Island'),
('MP', 'Northern Mariana Islands'),
('NO', 'Norway'),
('OM', 'Oman'),
('PK', 'Pakistan'),
('PW', 'Palau'),
('PS', 'Palestinian Territory, Occupied'),
('PA', 'Panama'),
('PG', 'Papua New Guinea'),
('PY', 'Paraguay'),
('PE', 'Peru'),
('PH', 'Philippines'),
('PN', 'Pitcairn'),
('PL', 'Poland'),
('PT', 'Portugal'),
('PR', 'Puerto Rico'),
('QA', 'Qatar'),
('RE', 'Reunion'),
('RO', 'Romania'),
('RU', 'Russian Federation'),
('RW', 'Rwanda'),
('BL', 'Saint Barthelemy'),
('SH', 'Saint Helena'),
('KN', 'Saint Kitts and Nevis'),
('LC', 'Saint Lucia'),
('MF', 'Saint Martin'),
('PM', 'Saint Pierre and Miquelon'),
('VC', 'Saint Vincent and the Grenadines'),
('WS', 'Samoa'),
('SM', 'San Marino'),
('ST', 'Sao Tome and Principe'),
('SA', 'Saudi Arabia'),
('SN', 'Senegal'),
('RS', 'Serbia'),
('SC', 'Seychelles'),
('SL', 'Sierra Leone'),
('SG', 'Singapore'),
('SK', 'Slovakia'),
('SI', 'Slovenia'),
('SB', 'Solomon Islands'),
('SO', 'Somalia'),
('ZA', 'South Africa'),
('GS', 'South Georgia and the South Sandwich Islands'),
('ES', 'Spain'),
('LK', 'Sri Lanka'),
('SD', 'Sudan'),
('SR', 'Suriname'),
('SJ', 'Svalbard and Jan Mayen'),
('SZ', 'Swaziland'),
('SE', 'Sweden'),
('CH', 'Switzerland'),
('SY', 'Syrian Arab Republic'),
('TW', 'Taiwan, Province of China'),
('TJ', 'Tajikistan'),
('TZ', 'Tanzania, United Republic of'),
('TH', 'Thailand'),
('TL', 'Timor-Leste'),
('TG', 'Togo'),
('TK', 'Tokelau'),
('TO', 'Tonga'),
('TT', 'Trinidad and Tobago'),
('TN', 'Tunisia'),
('TR', 'Turkey'),
('TM', 'Turkmenistan'),
('TC', 'Turks and Caicos Islands'),
('TV', 'Tuvalu'),
('UG', 'Uganda'),
('UA', 'Ukraine'),
('AE', 'United Arab Emirates'),
('GB', 'United Kingdom'),
('US', 'United States'),
('UM', 'United States Minor Outlying Islands'),
('UY', 'Uruguay'),
('UZ', 'Uzbekistan'),
('VU', 'Vanuatu'),
('VE', 'Venezuela'),
('VN', 'Viet Nam'),
('VG', 'Virgin Islands, British'),
('VI', 'Virgin Islands, U.S.'),
('WF', 'Wallis and Futuna'),
('EH', 'Western Sahara'),
('YE', 'Yemen'),
('ZM', 'Zambia'),
('ZW', 'Zimbabwe'),
)
|
henrysher/spec4pypi
|
pyp2rpm/utils.py
|
Python
|
mit
| 1,076
| 0.004647
|
import functools
from pyp2rpm import settings
def memoize_by_args(func):
"""Memoizes return value of a func based on args."""
memory = {}
@functools.wraps(func)
def memoized(*args):
if not args in memory.keys():
value = func(*args)
memory[args] = value
return memory[args]
return memoized
def license_from_trove(trove):
"""Finds out license from list of trove classifiers.
Args:
trove: list of
|
trove classifiers
Returns:
Fedora name of the package license or empty string, if no licensing information is found in trove classifiers.
"""
license = []
|
for classifier in trove:
if classifier is None: continue
if 'License' in classifier != -1:
stripped = classifier.strip()
# if taken from EGG-INFO, begins with Classifier:
stripped = stripped[stripped.find('License'):]
if stripped in settings.TROVE_LICENSES:
license.append(settings.TROVE_LICENSES[stripped])
return ' and '.join(license)
|
ahmedaljazzar/edx-platform
|
cms/celery.py
|
Python
|
agpl-3.0
| 1,791
| 0.003908
|
"""
Import celery, load its settings from the django settings
and auto discover tasks in all installed djang
|
o apps.
Taken from: https://celery.readthedocs.org/en/latest/django/first-steps-with-django.html
"""
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from openedx.core.lib.celery.routers import AlternateEnvir
|
onmentRouter
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
APP = Celery('proj')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
APP.config_from_object('django.conf:settings')
APP.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
class Router(AlternateEnvironmentRouter):
"""
An implementation of AlternateEnvironmentRouter, for routing tasks to non-cms queues.
"""
@property
def alternate_env_tasks(self):
"""
Defines alternate environment tasks, as a dict of form { task_name: alternate_queue }
"""
# The tasks below will be routed to the default lms queue.
return {
'completion_aggregator.tasks.update_aggregators': 'lms',
'openedx.core.djangoapps.content.block_structure.tasks.update_course_in_cache': 'lms',
'openedx.core.djangoapps.content.block_structure.tasks.update_course_in_cache_v2': 'lms',
}
@property
def explicit_queues(self):
"""
Defines specific queues for tasks to run in (typically outside of the cms environment),
as a dict of form { task_name: queue_name }.
"""
return {
'lms.djangoapps.grades.tasks.compute_all_grades_for_course': settings.POLICY_CHANGE_GRADES_ROUTING_KEY,
}
|
abacuspix/NFV_project
|
Flask_By_Example/chapter6/crimemap.py
|
Python
|
mit
| 726
| 0
|
from dbhelper import DBHelper
from flask import Flask
from flask import rende
|
r_template
from flask import request
app = Flask(__name__)
DB = DBHelper()
@app.route("/")
def home():
try:
data = DB.get_all_inputs()
except Exception as e:
print e
data = None
return render_template("home.html", data=data)
@app.route("/add", methods=["POST"])
def add():
try:
data = request.form.get("userinput")
DB.add_input(data)
except Exception as e:
print e
return home
|
()
@app.route("/clear")
def clear():
try:
DB.clear_all()
except Exception as e:
print e
return home()
if __name__ == '__main__':
app.run(port=5000, debug=True)
|
hwoods723/script.gamescenter
|
main.py
|
Python
|
gpl-2.0
| 2,988
| 0.005689
|
# -*- coding: utf-8 -*-
'''
script.matchcenter - Football information for Kodi
A program addon that can be mapped to a key on your remote to display football information.
Livescores, Event details, Line-ups, League tables, next and previous matches by team. Follow what
others are saying about the match in twitter.
Copyright (C) 2016 enen92
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import thesportsdb
import xbmcgui
import xbmc
import sys
from resources.lib import livescores
from resources.lib import leaguetables
from resources.lib import eventdetails
from resources.lib import mainmenu
from resources.lib import ignoreleagues
from resources.lib import football_league_selection
from resources.lib import leagueselection
from resources.lib import eventdetails
from resources.lib import matchhistory
from resources.lib.utilities.cache import Addon
|
Cache
from resources.lib.utilities import keymapeditor
from resources.lib.utilities.common_addon import *
def get_params():
pairsofparams = []
if len(sys.argv) >= 2:
params=sys.argv[1]
pairsofparams=params.split('/')
pairsofparams = [parm for parm in pairsofparams if parm]
return pairsofparams
params=get_params
|
()
if not params:
if "script-matchcenter-MainMenu.xml" not in xbmc.getInfoLabel('Window.Property(xmlfile)'):
mainmenu.start()
else:
#Integration patterns below
'''
Eg: xbmc.executebuiltin("RunScript(script.matchcenter, /eventdetails/506227)")
'''
if params[0] == 'ignoreleagues':
ignoreleagues.start()
elif params[0] == 'keymapeditor':
keymapeditor.run()
elif params[0] == 'removecache':
AddonCache.removeCachedData()
elif params[0] == 'livescores':
livescores.start(standalone=True)
elif params[0] == 'leagueselection':
leagueselection.start(standalone=True)
elif params[0] == 'football_league_selection':
football_league_selection.start(standalone=True)
elif params[0] == 'leaguetables' and params[1]:
leaguetables.start_table(leagueid=params[1])
elif params[0] == 'matchhistory' and params[1]:
matchhistory.start(teamid=params[1])
elif params[0] == 'eventdetails' and params[1]:
eventdetails.showDetails(match=None,matchid=params[1])
try: xbmcplugin.endOfDirectory(int(sys.argv[1]))
except: sys.exit(0)
|
FlashXT/XJTU_WorkLog
|
2017.10/Engineering/PyGame/alien_invasion/ship.py
|
Python
|
gpl-3.0
| 1,131
| 0.052166
|
#coding=utf-8
#2017.10.3£¬Flash,ship class
import pygame
class Ship():
def __init__(self,ai_settings,screen):
"""³õʼ»¯·É´¬²¢ÉèÖÃÆä³õʼλÖÃ"""
self.screen=screen
self.ai_settings = ai_settings
#¼ÓÔØ·É´¬Í¼Ïñ²¢»ñÈ¡ÆäÍâ½Ó¾ØÐÎ
self.image=pygame.image.load("images/ship.bmp")
self.rect=self.image.get_rect()
self.screen_rect=screen.get_rect()
#½«Ã¿ËÒзɴ¬·ÅÔÚÆÁÄ»µ×²¿ÖÐÑë
self.rect.centerx=self.screen_rect.centerx
self.rect.bottom=self.screen_rect.bottom
#ÔÚ·É´¬µÄÊôÐÔcenterÖд洢СÊýÖµ
|
self.center=float(self.rect.centerx)
#ÒÆ¶¯±êÖ¾
self.moving_right = False
self.moving_left = False
def update(self):
"""¸ù¾ÝÒÆ¶¯±êÖ¾µ÷Õû·É´¬µÄλÖÃ"""
#¸üзɴ¬µÄcenterÖµ£¬¶ø²»ÊÇrect
if self.moving_right and self.rect.right < self.screen_rect.right:
self.center += self.ai_settings.ship_speed_factor
elif self.moving_left and self.rect.left > 0:
self.center -= self.ai_settings.ship_speed_factor
#¸ù¾Ýself.center¸üÐÂrect¶ÔÏó
self.
|
rect.centerx = self.center
def blitme(self):
"""ÔÚÖ¸¶¨Î»ÖûæÖÆ·É´¬"""
self.screen.blit(self.image,self.rect)
|
fifoforlifo/pynja
|
packages/pynja/build.py
|
Python
|
apache-2.0
| 15,654
| 0.005366
|
import sys
import os
from . import io
from . import root_paths
from abc import *
def ninja_esc_path(path):
return path.replace('$','$$').replace(' ','$ ').replace(':', '$:')
def xlat_path(project, path):
"""Translate common prefix to variable reference."""
if path.startswith(project.projectDir):
return path.replace(project.projectDir, '$'+project._pdirName)
if path.startswith(project.builtDir):
return path.replace(project.builtDir, '$'+project._bdirName)
return ninja_esc_path(path)
def translate_path_list(ninjaFile, project, paths, separator = " ", prefix = None):
if prefix and (len(paths) > 0):
ninjaFile.write(prefix)
for path in paths:
pathEsc = xlat_path(project, path)
ninjaFile.write(separator)
ninjaFile.write(pathEsc)
def translate_extra_deps(ninjaFile, project, task, needPipe):
prefix = " |" if needPipe else ""
translate_path_list(ninjaFile, project, task.extraDeps, " $\n ", prefix)
def translate_order_only_deps(ninjaFile, project, task, needPipe):
prefix = " ||" if needPipe else ""
translate_path_list(ninjaFile, project, task.orderOnlyDeps, " $\n ", prefix)
def get_loaded_modules(rootDir):
modules = []
for name, module in sorted(sys.modules.items()):
path = getattr(module, "__file__", None)
if not path:
continue
if path.endswith("<frozen>"):
continue
if not os.path.isabs(path):
path = os.path.join(rootDir, path)
modules.append(path)
return modules
class Variant:
def __init__(self, string, fieldDefs):
self.__str = string
parts = self.__str.split("-")
for i in range(len(parts)):
fieldValue = parts[i]
fieldName = fieldDefs[i * 2 + 0]
fieldOptions = fieldDefs[i * 2 + 1]
if not (fieldValue in fieldOptions):
errstr = "%s is not valid for field %s\n" % (fieldValue, fieldName)
errstr = errstr + "Valid options are:\n"
for option in fieldOptions:
errstr = errstr + (" %s\n" % (option,))
raise Exception(errstr)
setattr(self, fieldName, fieldValue)
def __lt__(self, other):
return self.__str < other.__str
def __str__(self, ):
return self.__str
class BuildTask(metaclass = ABCMeta):
def __init__(self, project):
self.project = project
self.extraDeps = []
self.extraOutputs = []
self.orderOnlyDeps = []
self.phonyTarget = None # nam
|
e of phony target to declare with this
self._emitted = False
def __enter__(self):
if self._emitted:
raise Exception("A task should not be re-used in a with statement.")
return self
def __exit__(self, type, value, traceback):
self._emit_once()
def _emit_once(self):
if not self._emitted:
self._emitted = True
self.emit()
@abstractmetho
|
d
def emit(self):
pass
class BuildTasks:
def __init__(self, tasks):
self.__dict__["_tasks"] = tasks
self.__dict__["_emitted"] = False
def __len__(self):
return self._tasks.__len__()
def __getitem__(self, index):
return self._tasks[index]
def __iter__(self):
return self._tasks.__iter__()
def __setattr__(self, name, value):
for task in self._tasks:
setattr(task, name, value)
def __enter__(self):
if self._emitted:
raise Exception("Tasks should not be re-used in a with statement.")
return self
def __exit__(self, type, value, traceback):
self._emit_once()
def _emit_once(self):
if not self._emitted:
self.__dict__["_emitted"] = True
for task in self._tasks:
task._emit_once()
class Project(metaclass = ABCMeta):
def __init__(self, projectMan, variant):
if not isinstance(variant, Variant):
raise Exception("variant must be instanceof(Variant)")
self.projectMan = projectMan
self.variant = variant
self.projectDir = self.get_project_dir()
self.projectRelDir = self.get_project_rel_dir()
self.builtDir = self.get_built_dir()
self.makeFiles = []
self._runtimeDeps = {}
self._cbProjectRefs = set()
def get_project_dir(self):
return getattr(root_paths.rootPaths, type(self).__name__)
def get_project_rel_dir(self):
return getattr(root_paths.rootPathsRel, type(self).__name__)
def get_built_dir(self):
return os.path.join(root_paths.rootPaths.built, self.get_project_rel_dir(), str(self.variant), type(self).__name__)
@abstractmethod
def emit(self):
pass
def get_project(self, projName, variant):
return self.projectMan.get_project(projName, variant)
def get_abs_path(self, path):
if os.path.isabs(path):
return path
else:
return os.path.join(self.projectDir, path)
def custom_command(self, command, desc = None, inputs = [], outputs = []):
self.projectMan.emit_custom_command(command, desc, inputs, outputs)
def copy(self, orig, dest, phonyTarget = None):
origPath = self.get_abs_path(orig)
destPath = self.get_abs_path(dest)
self.projectMan.emit_copy(origPath, destPath, phonyTarget)
def _add_runtime_dep(self, destPath, srcPath):
curPath = self._runtimeDeps.get(destPath, None)
if curPath:
if curPath != srcPath:
raise Exception("Conflicting runtime dependencies to destPath=%s;\nold = %s\nnew = %s" % (destPath, curPath, srcPath))
else:
self._runtimeDeps[destPath] = srcPath
def add_runtime_dependency(self, srcPath, destPath = None, destDir = None):
srcPath = os.path.normpath(srcPath)
if not os.path.isabs(srcPath):
raise Exception("srcPath is not an absolute path; srcPath=%s" % srcPath)
if not destPath:
destPath = os.path.basename(srcPath)
if destDir and not os.path.isabs(destPath):
destPath = os.path.join(destDir, destPath)
destPath = os.path.normpath(destPath)
return self._add_runtime_dep(destPath, srcPath)
def add_runtime_dependency_project(self, project, destDir = None):
for destPath, srcPath in project._runtimeDeps.items():
self.add_runtime_dependency(srcPath, destPath, destDir)
self.add_cb_project_reference(project)
def add_cb_project_reference(self, project):
self._cbProjectRefs.add(project)
for ref in project._cbProjectRefs:
self._cbProjectRefs.add(ref)
class ToolChain(metaclass = ABCMeta):
def __init__(self, name):
self.name = name
@abstractmethod
def emit_rules(self, file):
pass
class ProjectMan:
def __init__(self, ninjaFile, ninjaPath):
self.ninjaFile = ninjaFile
self.ninjaPath = ninjaPath
self._projects = {}
self._toolchains = {}
self._phonyTargets = {}
self._ninjaVars = set()
self._copyCommand = os.path.join(os.path.dirname(__file__), "scripts", "copy-file.py")
self._deployFiles = {}
self._cbProjectRoots = {} # map projName -> slnName
def _define_project_ninja_vars(self, project):
ninjaFile = self.ninjaFile
project._pdirName = type(project).__name__ + '_pdir'
if project._pdirName not in self._ninjaVars:
self._ninjaVars.add(project._pdirName)
ninjaFile.write('%s = %s\n' % (project._pdirName, project.projectDir))
project._bdirName = type(project).__name__ + str(project.variant)
ninjaFile.write('%s = %s\n' % (project._bdirName, project.builtDir))
def get_project(self, projName, variant):
if not isinstance(variant, Variant):
raise Exception("variant must be instanceof(Variant)")
variantName = str(variant)
variants = self._projects.get(projName)
if variants == None:
variants = {}
s
|
manufacturedba/pinax
|
pinax/apps/signup_codes/stats.py
|
Python
|
mit
| 548
| 0.00365
|
import datetime
from pinax.apps.signup_codes.models import SignupCode
def stats():
return {
"signup_codes_total": SignupCode.objects.count(),
"signup_codes_sent": SignupCode.objects.filter(sent__isnull=True).count(
|
),
"signup_codes_used": SignupCode.objects.filter(use_count__gt=0).count(),
"signup_codes_expired": SignupCode.object
|
s.exclude(
expiry__isnull=True
).filter(
expiry__lte=datetime.datetime.now(),
use_count=0
).count()
}
|
xiangke/pycopia
|
SMI/setup.py
|
Python
|
lgpl-2.1
| 1,514
| 0.044914
|
#!/usr/bin/python2.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
import ez_setup
ez_setup.use_setuptools()
from glob import glob
from setuptools import setup, Extension
NAME = "pycopia-SMI"
VERSION = "1.0a2"
ENAME = NAME.replace("-", "_")
DNAME = NAME.split("-", 1)[-1]
_libsmi = Extension("_libsmi", ["libsmi_wrap.c"], libraries=["smi"])
setup (name=NAME, version=VERSION,
ext_modules = [_libsmi],
py_modules = ["libsmi"], # stock SWIG wrapper
namespace_packages =
|
["pycopia"],
packages = ["pycopia", "pycopia.SMI"], # custom Python
|
wrapper - use this one.
install_requires = ['pycopia-aid>=1.0a1,==dev'],
scripts = glob("bin/*"),
zip_safe = False,
test_suite = "test.SMITests",
author = "Keith Dart",
author_email = "keith@kdart.com",
description = "Python wrapper for libsmi, providing access to MIB/SMI data files.",
long_description = """Python wrapper for libsmi, providing access to MIB/SMI data files.
Also provides a nicer API that is more object-oriented. Includes node interators.""",
license = "LGPL",
keywords = "SMI MIB SNMP",
url = "http://www.pycopia.net/",
download_url = "http://pycopia.googlecode.com/svn/trunk/%s#egg=%s-dev" % (DNAME, ENAME),
#download_url = "ftp://ftp.pycopia.net/pub/python/%s.%s.tar.gz" % (NAME, VERSION),
classifiers = ["Operating System :: POSIX",
"Topic :: System :: Networking :: Monitoring",
"Intended Audience :: Developers"],
)
|
alexkasko/krakatau-java
|
krakatau-lib/src/main/resources/Lib/Krakatau/ssa/constraints/obj_c.py
|
Python
|
gpl-3.0
| 4,428
| 0.005194
|
import itertools
from ..mixin import ValueType
from .int_c import IntConstraint
from .. import objtypes
array_supers = 'java/lang/Object','java/lang/Cloneable','java/io/Serializable'
obj_fset = frozenset([objtypes.ObjectTT])
def isAnySubtype(env, x, seq):
return any(objtypes.isSubtype(env,x,y) for y in seq)
class TypeConstraint(ValueType):
__slots__ = "env supers exact isBot".split()
def __init__(self, env, supers, exact):
self.env, self.supers, self.exact = env, frozenset(supers), frozenset(exact)
self.isBot = objtypes.ObjectTT in supers
temp = self.supers | self.exact
assert(objtypes.NullTT not in temp)
assert(all(objtypes.isBaseTClass(tt) for tt in supers))
assert(all(objtypes.dim(tt) < 999 for tt in exact))
@staticmethod
def fromTops(*args):
return TypeConstraint(*args)
def _key(self): return self.supers, self.exact
def __nonzero__(self): return bool(self.supers or self.exact)
def getSingleTType(self):
#comSuper doesn't care about order so we can freely pass in nondeterministic order
return objtypes.commonSupertype(self.env, list(self.supers) + list(self.exact))
def isBoolOrByteArray(self):
if self.supers or len(self.exact) != 2:
return False
tt1, tt2 = self.exact
bases = objtypes.baset(tt1), objtypes.baset(tt2)
return objtypes.dim(tt1) == objtypes.dim(tt2) and sorted(bases) == [objtypes.baset(objtypes.BoolTT), objtypes.baset(objtypes.ByteTT)]
@staticmethod
def reduce(env, supers, exact):
newsupers = []
for x in supers:
if not isAnySubtype(env, x, newsupers):
newsupers = [y for y in newsupers if not objtypes.isSubtype(env, y, x)]
newsupers.append(x)
newexact = [x for x in exact if not isAnySubtype(env, x, newsupers)]
return TypeConstraint(env, newsupers, newexact)
def join(*cons):
assert(len(set(map(type, cons))) == 1)
env = cons[0].env
#optimize for the common case of joining with itself or with bot
cons = set(c for c in cons if not c.isBot)
if not cons:
return TypeConstraint(env, obj_fset, [])
elif len(cons) == 1:
return cons.pop()
assert(len(cons) == 2) #joining more than 2 not currently supported
supers_l, exact_l = zip(*(c._key() for c in cons))
newsupers = set()
for t1,t2 in itertools.product(*supers_l):
if objtypes.isSubtype(env, t1, t2):
newsupers.add(t1)
elif objtypes.isSubtype(env, t2, t1):
newsupers.add(t2)
else: #TODO: need to add special handling for interfaces here
pass
newexact = frozenset.union(*exact_l)
for c in cons:
newexact = [x for x in newexact if x in c.exact or isAnySubtype(env, x, c.supers)]
return TypeConstraint.reduce(env, newsupers, newexact)
def meet(*cons):
supers = frozenset.union(*(c.supers for c in cons))
exact = frozenset.union(*(c.exact for c in cons))
return TypeConstraint.reduce(cons[0].env, supers, exact)
class ObjectConstraint(ValueType):
__slots__ = "null types arrlen isBot".split()
def __init__(self, null, types):
self.null, self.types = null, types
self.isBot = null and types.i
|
sBot
@staticmethod
def constNull(env):
return Ob
|
jectConstraint(True, TypeConstraint(env, [], []))
@staticmethod
def fromTops(env, supers, exact, nonnull=False):
types = TypeConstraint(env, supers, exact)
if nonnull and not types:
return None
return ObjectConstraint(not nonnull, types)
def _key(self): return self.null, self.types
def isConstNull(self): return self.null and not self.types
def getSingleTType(self):
return self.types.getSingleTType() if self.types else objtypes.NullTT
def join(*cons):
null = all(c.null for c in cons)
types = TypeConstraint.join(*(c.types for c in cons))
if not null and not types:
return None
res = ObjectConstraint(null, types)
return cons[0] if cons[0] == res else res
def meet(*cons):
null = any(c.null for c in cons)
types = TypeConstraint.meet(*(c.types for c in cons))
return ObjectConstraint(null, types)
|
serghei/kde3-kdeutils
|
superkaramba/examples/setIncomingData/2.py
|
Python
|
gpl-2.0
| 2,246
| 0.007124
|
#
# Written by Luke Kenneth Casson Leighton <lkcl@lkcl.net>
# This theme is demonstrates how to
#this import statement allows access to the karamba functions
import karamba
drop_txt = None
#this is called when you widget is initialized
def initWidget(widget):
# this resets the text to "" so we know we've never
# received anything yet from the other theme
name = karamba.getPrettyThemeName(widget)
print "2.py name: ", name
karamba.setIncomingData(widget, name, "")
karamba.redrawWidget(widget)
# this is a pain. in order to avoid memory-related threading problems,
# and also locking, with the communication between themes, the
# communication is done asynchronously. so you have to POLL for the
# information, by reading getIncomingData().
#
# therefore, you must set an interval=time_in_ms in your receiving .theme
# file (see 2.theme) and then call getIncomingData() in here.
#
# it's not ideal - but it works.
#
# NOTE: the data received - by getIncomingData - is NOT, i repeat NOT
# deleted when you call getIncomingData.
# so, obviously, you need to take care to not activate repeatedly.
# you could do this in several ways. one of them is to send, in
# the calling theme (the one that calls setIncomingData) a sequential
# number as part of the message.
#
# alternatively, you could reset the text to "" (see above)
expected_seq = 0
def widgetUpdated(widget):
global expected_seq # i hate globals. please write better code than this example.
# get the "message"...
disp = karamba.getIncomingData(widget)
if disp == "":
return
# decode it...
(seq, x, y, button) = eval(disp)
# if it's been seen before, skip it...
if seq <= expected_seq:
pass
expected_seq += 1
message = "seq:%d x:%d y:%d btn:%d" % (seq, x, y, button)
# delete previous text if exists.
global drop_txt
if drop_txt is not None:
karamba.deleteText(widget, drop_txt)
# display it...
drop_txt = karamba.createText(widget, 0, 20, 300, 20, message)
karamba.changeTextColor(widget, drop_txt, 252,252,252)
karamba.r
|
edrawWidget(widget)
pass
# This will be printed when the widget loads.
print "Loaded my python
|
2.py extension!"
|
seims/SEIMS
|
scenario_analysis/util.py
|
Python
|
gpl-2.0
| 1,939
| 0.00361
|
import os, platform
sysstr = platform.system()
if sysstr == "Windows":
LF = '\r\n'
elif sysstr == "Linux":
LF = '\n'
def StripStr(str):
# @Function: Remove space(' ') and indent('\t') at the begin and end of the string
oldStr = ''
newStr = str
while oldStr != newStr:
oldStr = newStr
newStr = oldStr.strip('\t')
newStr = newStr.strip(' ')
return newStr
def SplitStr(str, spliters=None):
# @Function: Split string by spliter space(' ') and indent('\t') as default
# spliters = [' ', '\t']
# spliters = []
# if spliter is not None:
# spliters.append(spliter)
if spliters is None:
spliters = [' ', '\t']
destStrs = []
srcStrs = [str]
while True:
oldDestStrs = srcStrs[:]
for s in spliters:
for srcS in srcStrs:
tempStrs = srcS.split(s)
for tempS in tempStrs:
tempS = StripStr(tempS)
if tempS != '':
destStrs.append(tempS)
srcStrs = destStrs[:]
destStrs = []
if oldDestStrs == srcStrs:
destStrs = srcStrs[:]
break
return destStrs
def isPathExists(path):
if os.
|
path.isdir(path):
if os.path.exists(path):
return True
else:
return False
else:
return False
def WriteLog(logfile, contentlist, MODE='replace'):
if os.path.exists(logfile):
if MODE == 'replace':
os.remove(logfile)
logStatus = op
|
en(logfile, 'w')
else:
logStatus = open(logfile, 'a')
else:
logStatus = open(logfile, 'w')
if isinstance(contentlist, list) or isinstance(contentlist,tuple):
for content in contentlist:
logStatus.write("%s%s" % (content, LF))
else:
logStatus.write(contentlist)
logStatus.flush()
logStatus.close()
|
tensorflow/tensorflow
|
tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/shapes_for_arguments.py
|
Python
|
apache-2.0
| 1,730
| 0.009827
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in complia
|
nce with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============
|
=================================================================
# RUN: %p/shapes_for_arguments | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
class TestModule(tf.Module):
# Check that we get shapes annotated on function arguments.
#
# Besides checking the shape on the function input argument, this test also
# checks that the shape on the input argument is propagated to the return
# value.
# We eventually want to move the shape inference to a pass separate from
# the initial import, in which case that aspect of this test doesn't make much
# sense and will be superceded by MLIR->MLIR shape inference tests.
#
# CHECK: func {{@[a-zA-Z_0-9]+}}(%arg0: tensor<f32> {{.*}}) -> (tensor<f32> {{.*}})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def some_function(self, x):
return x
if __name__ == '__main__':
common.do_test(TestModule)
|
puyilio/Application-for-PyBoard
|
About_author.py
|
Python
|
gpl-3.0
| 1,254
| 0.011962
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from PyQt4 import QtGui,QtCore
from Ui_about_author import Ui_About
_IME = "<p
|
>Author: Bojan Ili""ć</p>"
_FAKULTET = "Faculty
|
of Electrical Engineering, University of Belgrade - ETF"
_MAIL = "https.rs@gmail.com"
_URL = "<a href = ""https://www.facebook.com/puzicius>Facebook link</a>"
#-------------------------------------------------------------------------------
class AboutWindow2(QtGui.QDialog):
""" Class wrapper for about window ui """
def __init__(self):
super(AboutWindow2,self).__init__()
self.setupUI()
#print sys.stdout.encoding
def setupUI(self):
#create window from ui
self.ui=Ui_About()
self.ui.setupUi(self)
self.ui.lblVersion.setText("{}".format(_IME))
self.ui.lblVersion2.setText("{}".format(_FAKULTET))
self.ui.lblVersion3.setText("E-mail: {}".format(_MAIL))
self.ui.lblURL.setText(_URL)
#-------------------------------------------------------------------------------
def main():
app = QtGui.QApplication(sys.argv)
form = AboutWindow2()
form.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
|
rajeev001114/Grade-Recording-System
|
project/MSG/migrations/0018_auto_20150917_0722.py
|
Python
|
gpl-3.0
| 550
| 0
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
fro
|
m django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('MSG', '0017_auto_20150917_0707'),
]
operations = [
migrations.RemoveField(
model_name='content',
name='filename',
),
migrations.AddField(
model_name='content',
name='file',
field=models.FileField(default=12, upload_to=b'.'),
preserve_default=False,
),
]
| |
pylanglois/Social-Network-Harvester
|
SocialNetworkHarvester/snh/management/commands/cronharvester/youtubech.py
|
Python
|
bsd-3-clause
| 8,013
| 0.009485
|
# coding=UTF-8
from datetime import timedelta
import resource
import time
import urllib
from django.core.exceptions import ObjectDoesNotExist
from snh.models.youtubemodel import *
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
import snhlogger
logger = snhlogger.init_logger(__name__, "youtube.log")
def run_youtube_harvester():
harvester_list = YoutubeHarvester.objects.all()
for harvester in harvester_list:
logger.info(u"The harvester %s is %s" %
(unicode(harvester),
"active" if harvester.is_active else "inactive"))
if harvester.is_active:
run_harvester_v1(harvester)
def sleeper(retry_count):
retry_delay = 1
wait_delay = retry_count*retry_delay
wait_delay = 10 if wait_delay > 10 else wait_delay
time.sleep(wait_delay)
def get_timedelta(dm_time):
ts = datetime.strptime(dm_time,'%Y-%m-%dT%H:%M:%S+0000')
return (datetime.utcnow() - ts).days
def get_existing_user(param):
user = None
try:
user = YTUser.objects.get(**param)
except MultipleObjectsReturned:
user = YTUser.objects.filter(**param)[0]
logger.warning(u"Duplicated user in DB! %s, %s" % (user, user.fid))
except ObjectDoesNotExist:
pass
return user
def update_user(harvester, userid):
snh_user = None
try:
uniuserid = urllib.urlencode({"k":userid.encode('utf-8')}).split("=")[1:][0]
ytuser = harvester.api_call("GetYouTubeUserEntry",{"username":uniuserid})
split_uri = ytuser.id.text.split("/")
fid = split_uri[len(split_uri)-1]
snh_user = get_existing_user({"fid__exact":fid})
if not snh_user:
snh_user = get_existing_user({"username__exact":userid})
if not snh_user:
snh_user = YTUser(
fid=fid,
username=userid,
)
snh_user.save()
logger.info(u"New user created in status_from_search! %s", snh_user)
snh_user.update_from_youtube(ytuser)
except gdata.service.RequestError, e:
msg = u"RequestError on user %s. Trying to update anyway" % (userid)
logger.info(msg)
if e[0]["status"] == 403 or e[0]["status"] == 400:
snh_user = get_existing_user({"username__exact":userid})
if not snh_user:
snh_user = YTUser(
username=userid,
)
snh_user.save()
logger.info(u"New user created in status_from_search! %s", snh_user)
else:
msg = u"RequestError on user %s!!! Force update failed!!!" % (userid)
logger.exception(msg)
except:
msg = u"Cannot update user %s" % (userid)
logger.exception(msg)
return snh_user
def update_users(harvester):
all_users = harvester.ytusers_to_harvest.all()
for snhuser in all_users:
if not snhuser.error_triggered:
uid = snhuser.fid if snhuser.fid else snhuser.username
update_user(harvester, uid)
else:
logger.info(u"Skipping user update: %s(%s) because user has triggered the error flag." % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"User harvest completed %s Mem:%s MB" % (harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def update_video(snhuser, ytvideo):
split_uri = ytvideo.id.text.split("/")
fid = split_uri[len(split_uri)-1]
snhvideo = None
try:
try:
snhvideo = YTVideo.objects.get(fid__exact=fid)
except ObjectDoesNotExist:
snhvideo = YTVideo(fid=fid, user=snhuser)
snhvideo.save()
snhvideo.update_from_youtube(snhuser, ytvideo)
except:
msg = u"Cannot update video %s" % (unicode(ytvideo.id.text,'UTF-8'))
logger.exception(msg)
return snhvideo
def update_comment(harvester, snhvideo, ytcomment):
author_nam
|
e = ytcomment.author[0].name.text
snhuser = update_user(harvester, author_name)
split_uri = ytcomment.id.text.split("/")
fid = split_uri[len(split_uri)-1]
try:
try:
snhcomment = YTComment.objects.get(fid__exact=fid)
except ObjectDoesNotExist:
snhcomment = YTComment(fid=fid, video=snhvideo)
snhcomment.save()
|
snhcomment.update_from_youtube(snhvideo, snhuser, ytcomment)
except:
msg = u"Cannot update comment %s" % (unicode(ytcomment.id.text,'UTF-8'))
logger.exception(msg)
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.debug(u"Commment updated: comid:%s vidid:%s %s Mem:%s MB" % (snhcomment.fid,snhvideo.fid, harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
return snhcomment
def update_all_comment_helper(harvester, snhvideo, comment_list):
for comment in comment_list.entry:
update_comment(harvester, snhvideo, comment)
get_next_comment_uri = comment_list.GetNextLink().href if comment_list.GetNextLink() else None
return get_next_comment_uri
def update_all_comment(harvester,snhvideo):
comment_list = harvester.api_call("GetYouTubeVideoCommentFeed",{"video_id":snhvideo.fid})
get_next_comment_uri = update_all_comment_helper(harvester, snhvideo, comment_list)
while get_next_comment_uri:
comment_list = harvester.api_call("GetYouTubeVideoCommentFeed",{"uri":get_next_comment_uri})
get_next_comment_uri = update_all_comment_helper(harvester, snhvideo, comment_list)
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"Comment harvest completed for this video: %s %s Mem:%s MB" % (snhvideo.fid, harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def update_all_videos(harvester):
all_users = harvester.ytusers_to_harvest.all()
for snhuser in all_users:
out_of_window = False
if not snhuser.error_triggered:
logger.info(u"Will update user: %s(%s)" % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
get_vid_url = 'http://gdata.youtube.com/feeds/api/users/%s/uploads?' % snhuser.username
while get_vid_url and not out_of_window:
video_list = harvester.api_call("GetYouTubeVideoFeed",{"uri":get_vid_url})
for video in video_list.entry:
published = datetime.strptime(video.published.text,'%Y-%m-%dT%H:%M:%S.000Z')
if published < harvester.harvest_window_to:
snhvideo = update_video(snhuser, video)
update_all_comment(harvester, snhvideo)
if published < harvester.harvest_window_from:
out_of_window = True
break
if not out_of_window:
get_vid_url = video_list.GetNextLink().href if video_list.GetNextLink() else None
else:
logger.info(u"Skipping user update: %s(%s) because user has triggered the error flag." % (unicode(snhuser), snhuser.fid if snhuser.fid else "0"))
usage = resource.getrusage(resource.RUSAGE_SELF)
logger.info(u"Video harvest completed %s Mem:%s MB" % (harvester,unicode(getattr(usage, "ru_maxrss")/(1024.0))))
def run_harvester_v1(harvester):
harvester.start_new_harvest()
try:
start = time.time()
update_users(harvester)
update_all_videos(harvester)
logger.info(u"Results computation complete in %ss" % (time.time() - start))
except:
logger.exception(u"EXCEPTION: %s" % harvester)
finally:
usage = resource.getrusage(resource.RUSAGE_SELF)
harvester.end_current_harvest()
logger.info(u"End: %s Stats:%s Mem:%s MB" % (harvester,unicode(harvester.get_stats()),unicode(getattr(usage, "ru_maxrss")/(1024.0))))
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_11_01/models/azure_reachability_report_parameters_py3.py
|
Python
|
mit
| 2,234
| 0.000895
|
# codi
|
ng=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lo
|
st if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AzureReachabilityReportParameters(Model):
"""Geographic and time constraints for Azure reachability report.
All required parameters must be populated in order to send to Azure.
:param provider_location: Required.
:type provider_location:
~azure.mgmt.network.v2017_11_01.models.AzureReachabilityReportLocation
:param providers: List of Internet service providers.
:type providers: list[str]
:param azure_locations: Optional Azure regions to scope the query to.
:type azure_locations: list[str]
:param start_time: Required. The start time for the Azure reachability
report.
:type start_time: datetime
:param end_time: Required. The end time for the Azure reachability report.
:type end_time: datetime
"""
_validation = {
'provider_location': {'required': True},
'start_time': {'required': True},
'end_time': {'required': True},
}
_attribute_map = {
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'providers': {'key': 'providers', 'type': '[str]'},
'azure_locations': {'key': 'azureLocations', 'type': '[str]'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(self, *, provider_location, start_time, end_time, providers=None, azure_locations=None, **kwargs) -> None:
super(AzureReachabilityReportParameters, self).__init__(**kwargs)
self.provider_location = provider_location
self.providers = providers
self.azure_locations = azure_locations
self.start_time = start_time
self.end_time = end_time
|
julietalucia/page-objects
|
page_objects/__init__.py
|
Python
|
mit
| 4,653
| 0.001075
|
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
# Map PageElement constructor arguments to webdriver locator enums
_LOCATOR_MAP = {'css': By.CSS_SELECTOR,
'id_': By.ID,
'name': By.NAME,
'xpath': By.XPATH,
'link_text': By.LINK_TEXT,
'partial_link_text': By.PARTIAL_LINK_TEXT,
'tag_name': By.TAG_NAME,
'class_name': By.CLASS_NAME,
}
class PageObject(object):
"""Page Object pattern.
:param webdriver: `selenium.webdriver.WebDriver`
Selenium webdriver instance
:param root_uri: `str`
Root URI to base any calls to the ``PageObject.get`` method. If not defined
in the constructor it will try and look it from the webdriver object.
"""
def __init__(self, webdriver, root_uri=None):
self.w = webdriver
self.root_uri = root_uri if root_uri else getattr(self.w, 'root_uri', None)
def get(self, uri):
"""
:param uri: URI to GET, based off of the root_uri attribute.
"""
root_uri = self.root_uri or ''
self.w.get(root_uri + uri)
class PageElement(object):
"""Page Element descriptor.
:param css: `str`
Use this css locator
:param id_: `str`
Use this element ID locator
:param name: `str`
Use this element name locator
:param xpath: `str`
Use this xpath locator
:param link_text: `str`
Use this link text locator
:param partial_link_text: `str`
Use this partial link text locator
:param tag_name: `str`
Use this tag name locator
:param class_name: `str`
Use this class locator
:param context: `bool`
This element is expected to be called with context
Page Elements are used to access elements on a page. The are constructed
using this factory method to specify the locator for the element.
>>> from page_objects import PageObject, PageElement
>>> class MyPage(PageObject):
elem1 = PageElement(css='div.myclass')
elem2 = PageElement(id_='foo')
elem_with_context = PageElement(name='bar', context=True)
Page Elements act as property descriptors for their Page Object, you can get
and set them as normal attributes.
"""
def __init__(self, context=False, **kwargs):
if not kwargs:
raise ValueError("Please specify a locator")
if len(kwargs)
|
> 1:
raise ValueError("P
|
lease specify only one locator")
k, v = next(iter(kwargs.items()))
self.locator = (_LOCATOR_MAP[k], v)
self.has_context = bool(context)
def find(self, context):
try:
return context.find_element(*self.locator)
except NoSuchElementException:
return None
def __get__(self, instance, owner, context=None):
if not instance:
return None
if not context and self.has_context:
return lambda ctx: self.__get__(instance, owner, context=ctx)
if not context:
context = instance.w
return self.find(context)
def __set__(self, instance, value):
if self.has_context:
raise ValueError("Sorry, the set descriptor doesn't support elements with context.")
elem = self.__get__(instance, instance.__class__)
if not elem:
raise ValueError("Can't set value, element not found")
elem.send_keys(value)
class MultiPageElement(PageElement):
""" Like `PageElement` but returns multiple results.
>>> from page_objects import PageObject, MultiPageElement
>>> class MyPage(PageObject):
all_table_rows = MultiPageElement(tag='tr')
elem2 = PageElement(id_='foo')
elem_with_context = PageElement(tag='tr', context=True)
"""
def find(self, context):
try:
return context.find_elements(*self.locator)
except NoSuchElementException:
return []
def __set__(self, instance, value):
if self.has_context:
raise ValueError("Sorry, the set descriptor doesn't support elements with context.")
elems = self.__get__(instance, instance.__class__)
if not elems:
raise ValueError("Can't set value, no elements found")
[elem.send_keys(value) for elem in elems]
# Backwards compatibility with previous versions that used factory methods
page_element = PageElement
multi_page_element = MultiPageElement
|
zhuangjun1981/retinotopic_mapping
|
retinotopic_mapping/DisplayStimulus.py
|
Python
|
gpl-3.0
| 29,197
| 0.002432
|
'''
Visual Stimulus codebase implements several classes to display stimulus routi
|
nes.
Can display frame by frame or compress data for certain stimulus routines and
display by index. Used to manage information between experimental devices and
interact with `Stim
|
ulusRoutines` to produce visual display and log data. May also
be used to save and export movies of experimental stimulus routines for
presentation.
'''
from psychopy import visual, event
import PIL
import os
import datetime
import numpy as np
import matplotlib.pyplot as plt
import time
from tools import FileTools as ft
from tools.IO import nidaq as iodaq
try:
import skimage.external.tifffile as tf
except ImportError:
import tifffile as tf
def analyze_frames(ts_start, ts_end, refresh_rate, check_point=(0.02, 0.033, 0.05, 0.1)):
"""
Analyze frame durations of time stamp data.
Computes relevant statistics with respect to the presentation
of a given stimulus. The statistics are computed in order
to understand the timing of the frames since the monitor refresh
rate isn't always an accurate tool for timing.
Parameters
----------
ts_start : 1d array
list of time stamps of each frame start (in seconds).
ts_end: 1d array
list of time stamps of each frame end (in seconds).
refresh_rate : float
the refresh rate of imaging monitor measured (in Hz).
check_point : tuple, optional
Returns
-------
frame_duration : ndarray
list containing the length of each time stamp.
frame_stats : str
string containing a statistical analysis of the image frames.
"""
frame_interval = np.diff(ts_start)
plt.figure()
plt.hist(frame_interval, bins=np.linspace(0.0, 0.05, num=51))
refresh_rate = float(refresh_rate)
num_frames = ts_start.shape[0]
disp_true = ts_end[-1] - ts_start[0]
disp_expect = float(num_frames) / refresh_rate
avg_frame_time = np.mean(frame_interval) * 1000
sdev_frame_time = np.std(frame_interval) * 1000
short_frame = min(frame_interval) * 1000
short_frame_ind = np.where(frame_interval == np.min(frame_interval))[0][0]
long_frame = max(frame_interval) * 1000
long_frame_ind = np.where(frame_interval == np.max(frame_interval))[0][0]
frame_stats = ''
frame_stats += '\nTotal number of frames : {}.'.format(num_frames)
frame_stats += '\nTotal length of display : {:.5f} second.'.format(disp_true)
frame_stats += '\nExpected length of display : {:.5f} second.'.format(disp_expect)
frame_stats += '\nMean of frame intervals : {:.2f} ms.'.format(avg_frame_time)
frame_stats += '\nS.D. of frame intervals : {:.2f} ms.'.format(sdev_frame_time)
frame_stats += '\nShortest frame: {:.2f} ms, index: {}.'.format(short_frame, short_frame_ind)
frame_stats += '\nLongest frame : {:.2f} ms, index: {}.'.format(long_frame, long_frame_ind)
for i in range(len(check_point)):
check_number = check_point[i]
frame_number = len(frame_interval[frame_interval > check_number])
frame_stats += '\nNumber of frames longer than {:5.3f} second: {}; {:6.2f}%'. \
format(check_number, frame_number, (float(frame_number) * 100 / num_frames))
print(frame_stats)
return frame_interval, frame_stats
class DisplaySequence(object):
"""
Display the stimulus routine from memory.
Takes care of high level management of your computer
hardware with respect to its interactions within a given experiment.
Stimulus presentation routines are specified and external connection
to National Instuments hardware devices is provided. Also takes care
of the logging of relevant experimental data collected and where it
will be stored on the computer used for the experiment.
Parameters
----------
log_dir : str
system directory path to where log display will be saved.
backupdir : str, optional
copy of directory path to save backup, defaults to `None`.
identifier: str, optional
identifing string for this particular experiment, this will
show up in the name of log file when display is done.
display_iter : int, optional
defaults to `1`
mouse_id : str, optional
label for mouse, defaults to 'Test'.
user_id : str, optional
label for person performing experiment, defaults to 'Name'.
psychopy_mon : str, optional
label for monitor used for displaying the stimulus, defaults to
'testMonitor'.
is_interpolate : bool, optional
defaults to `False`.
is_triggered : bool, optional
if `True`, stimulus will not display until triggered. if `False`,
stimulus will display automatically. defaults to `False`.
is_by_index : bool, optional
determines if stimulus is displayed by index which saves memory
and should speed up routines. Note that not every stimulus can be
displayed by index and hence the default value is `False`.
is_save_sequence : bool, optional
defaults to False
if True, the class will save the sequence of images to be displayed
as a tif file, in the same folder of log file. If self.is_by_index
is True, only unique frames will be saved. Note, this will save
the whole sequence even if the display is interrupted in the middle.
trigger_NI_dev : str, optional
defaults to 'Dev1'.
trigger_NI_port : int, optional
defaults to `1`.
trigger_NI_line : int, optional
defaults to `0`.
is_sync_pulse : bool, optional
defaults to `True`.
sync_pulse_NI_dev : str, optional
defaults to 'Dev1'.
sync_pulse_NI_port : int, optional
defaults to 1.
sync_pulse_NI_line : int, optional
defaults to 1.
trigger_event : str
should be one of "negative_edge", "positive_edge", "high_level",
or "low_level". defaults to "negative_edge".
display_screen : int
determines which monitor to display stimulus on. defaults to `0`.
initial_background_color : float
defaults to `0`. should be in the range from -1. (black) to 1. (white)
color_weights : tuple, optional
defaults to (1., 1., 1.)
This should be a tuple with 3 elements. Each element specifies the
weight of each color channel (R, G, B). The value range of each
element is in the range [0., 1.]. This is designed in such way that
if you want to suppress a certain channel i.e. red channel, you can
change this parameter to (0., 1., 1.)
"""
def __init__(self,
log_dir,
backupdir=None,
identifier='000',
display_iter=1,
mouse_id='Test',
user_id='Name',
psychopy_mon='testMonitor',
is_by_index=True,
is_interpolate=False,
is_triggered=False,
is_save_sequence=False,
trigger_event="negative_edge",
trigger_NI_dev='Dev1',
trigger_NI_port=1,
trigger_NI_line=0,
is_sync_pulse=False,
sync_pulse_NI_dev='Dev1',
sync_pulse_NI_port=1,
sync_pulse_NI_line=1,
display_screen=0,
initial_background_color=0.,
color_weights=(1., 1., 1.)):
"""
initialize `DisplaySequence` object
"""
self.sequence = None
self.seq_log = {}
self.identifier = str(identifier)
self.psychopy_mon = psychopy_mon
self.is_interpolate = is_interpolate
self.is_triggered = is_triggered
self.is_by_index = is_by_index
self.is_save_sequence = is_save_sequence
self.trigger_NI_dev = trigger_NI_dev
self.trigger_NI_port = trigger_NI_p
|
soarpenguin/python-scripts
|
www-url.py
|
Python
|
gpl-3.0
| 823
| 0.013564
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Import system libs
import re
class WWW():
def __init__(self):
pass
def get_domain(self, site):
if site.startswith('https://'):
site = site[8:-1]
|
elif site.startswith('http://'):
site = site[7:-1]
return site.split('/')[0]
def is_url_format(self, url):
|
regex = """
^ #必须是串开始
(?:http(?:s)?://)? #protocol
(?:[\w]+(?::[\w]+)?@)? #user@password
([-\w]+\.)+[\w-]+(?:.)? #domain
(?::\d{2,5})? #port
(/?[-:\w;\./?%&=#]*)? #params
$
"""
result = re.search(regex, url, re.X|re.I)
if result:
return True
else:
return False
|
volkodav1985/volkodavpython
|
model/contact.py
|
Python
|
apache-2.0
| 957
| 0.015674
|
from sys import maxsize
class Contact:
def __init__(self, firstname=None, lastname=None, company=None, homephone=None, workphone=None, mobilephone=None, secondphone=None, address=None
|
, year=None, secondaddress=None, id=None):
sel
|
f.firsrtname=firstname
self.lastname=lastname
self.company=company
self.homephone=homephone
self.mobilephone=mobilephone
self.workphone=workphone
self.secondphone=secondphone
self.address=address
self.year=year
self.secondaddress=secondaddress
self.id=id
def __repr__(self):
return "%s:%s:%s" % (self.id, self.name, self.surname)
def __eq__(self, other):
return (self.id is None or other.id is None or self.id == other.id) and self.surname == other.surname and self.name == other.name
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
|
andir/ganeti
|
lib/tools/burnin.py
|
Python
|
bsd-2-clause
| 47,078
| 0.00822
|
#!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Burnin program
"""
import sys
import optparse
import time
import socket
import urllib
import random
import string # pylint: disable=W0402
from itertools import izip, islice, cycle
from cStringIO import StringIO
from operator import or_
from ganeti import opcodes
from ganeti import constants
from ganeti import cli
from ganeti import errors
from ganeti import utils
from ganeti import hypervisor
from ganeti import compat
from ganeti import pathutils
from ganeti.confd import client as confd_client
from ganeti.runtime import (GetClient)
USAGE = ("\tburnin -o OS_NAME [options...] instance_name ...")
MAX_RETRIES = 3
LOG_HEADERS = {
0: "- ",
1: "* ",
2: "",
}
#: Disk templates supporting a single node
_SINGLE_NODE_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_PLAIN,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_EXT,
constants.DT_RBD,
constants.DT_GLUSTER
])
_SUPPORTED_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_DRBD8,
constants.DT_EXT,
constants.DT_FILE,
constants.DT_PLAIN,
constants.DT_RBD,
constants.DT_SHARED_FILE,
constants.DT_GLUSTER
])
#: Disk templates for which import/export is tested
_IMPEXP_DISK_TEMPLATES = (_SUPPORTED_DISK_TEMPLATES - frozenset([
constants.DT_DISKLESS,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_GLUSTER
]))
class InstanceDown(Exception):
"""The checked instance was not up"""
class BurninFailure(Exception):
"""Failure detected during burning"""
def Usage():
"""Shows program usage information and exits the program."""
print >> sys.stderr, "Usage:"
print >> sys.stderr, USAGE
sys.exit(2)
def Log(msg, *args, **kwargs):
"""Simple function that prints out its argument.
"""
if args:
msg = msg % args
indent = kwargs.get("indent", 0)
sys.stdout.write("%*s%s%s\n" % (2 * indent, "",
LOG_HEADERS.get(indent, " "), msg))
sys.stdout.flush()
def Err(msg, exit_code=1):
"""Simple error logging that prints to stderr.
"""
sys.stderr.write(msg + "\n")
sys.stderr.flush()
sys.exit(exit_code)
def RandomString(size=8, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
class SimpleOpener(urllib.FancyURLopener):
"""A simple url opener"""
# pylint: disable=W0221
def prompt_user_passwd(self, host, realm, clear_cache=0):
"""No-interaction version of prompt_user_passwd."""
# we follow parent class' API
# pylint: disable=W0613
return None, None
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Custom error handling"""
# make sure sockets are not left in CLOSE_WAIT, this is similar
# but with a different exception to the BasicURLOpener class
_ = fp.read() # throw away data
fp.close()
raise InstanceDown("HTTP error returned: code %s, msg %s" %
(errcode, errmsg))
OPTIONS = [
cli.cli_option("-o", "--os", dest="os", default=None,
help="OS to use during burnin",
metavar="<OS>",
completion_suggest=cli.OPT_COMPL_ONE_OS),
cli.HYPERVISOR_OPT,
cli.OSPARAMS_OPT,
cli.cli_option("--disk-size", dest="disk_size",
help="Disk size (determines disk count)",
default="1G", type="string", metavar="<size,size,...>",
completion_suggest=("512M 1G 4G 1G,256M"
" 4G,1G,1G 10G").split()),
cli.cli_option("--disk-growth", dest="disk_growth", help="Disk growth",
default="128m", type="string", metavar="<size,size,...>"),
cli.cli_option("--mem-size", dest="mem_size", help="Memory size",
default=None, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--maxmem-size", dest="maxmem_size", help="Max Memory size",
default=256, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--minmem-size", dest="minmem_size", help="Min Memory size",
default=128, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--vcpu-count", dest="vcpu_count", help="VCPU count",
default=3, type="unit", metavar="<count>",
completion_suggest=("1 2 3 4").split()),
cli.DEBUG_OPT,
cli.VERBOSE_OPT,
cli.NOIPCHECK_OPT,
cli.NONAMECHECK_OPT,
cli.EARLY_RELEASE_OPT,
cli.cli_option("--no-replace1", dest="do_replace1",
help="Skip disk replacement with the same secondary",
action="store_false", default=True),
cli.cli_option("--no-replace2", dest="do
|
_replace2",
help="Skip disk replacement with a different secondary",
action="store_false", default=True),
cli.cli_option("--no-failover", dest="do_failover",
help="Skip instance failovers", action="store_false",
default=True),
cli.cli_option("--no-migrate", dest="do_migrate",
help="Skip
|
instance live migration",
action="store_false", default=True),
cli.cli_option("--no-move", dest="do_move",
help="Skip instance moves", action="store_false",
default=True),
cli.cli_option("--no-importexport", dest="do_importexport",
help="Skip instance export/import", action="store_false",
default=True),
cli.cli_option("--no-startstop", dest="do_startstop",
help="Skip instance stop/start", action="store_false",
default=True),
cli.cli_option("--no-reinstall", dest="do_reinstall",
help="Skip instance reinstall", action="store_false",
default=True),
cli.cli_option("--no-reboot", dest="do_reboot",
help="Skip instance reboot", action="store_false",
default=True),
cli.cli_option("--no-renamesame", dest="do_renamesame",
help="Skip instance rename to same name", action="store_false",
default=True),
cli.cli_option("--reboot-types", dest="reboot_types",
help="Specify the reboot types", default=None),
cli.cli_option("--no-activate-disks", dest="do_activate_disks",
help="Skip disk activation/deactivation",
action="store_false", default=True),
cli.cli_option("--no-add-disks", dest="do_addremove_disks",
help="Skip di
|
CPedrini/TateTRES
|
gspread/__init__.py
|
Python
|
apache-2.0
| 475
| 0
|
# -*- coding: utf-8 -*-
"""
gspread
~~~~~~~
Google Spreadsheets client library.
|
"""
__version__ = '0.2.1'
__author__ = 'Anton Burnashev'
from .client import Client, login
from .models import Spreadsheet, Worksheet, Cell
from .exceptions import (GSpreadException, Authen
|
ticationError,
SpreadsheetNotFound, NoValidUrlKeyFound,
IncorrectCellLabel, WorksheetNotFound,
UpdateCellError, RequestError)
|
emgirardin/compassion-modules
|
child_compassion/controllers/web_children_hold.py
|
Python
|
agpl-3.0
| 2,889
| 0
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Michael Sandoz <michaelsandoz87@gmail.com>, Emanuel Cino
#
# The licence is in the file __openerp__.py
#
##############################################################################
import logging
from datetime import datetime
from datetime import timedelta
from openerp import http
from openerp.http import request
from openerp.addons.connector.queue.job import job
from openerp.addons.connector.session import ConnectorSession
from werkzeug.wrappers import Response
from werkzeug.da
|
tastructures import Headers
_logger = logging.getLogger(__name__)
class RestController(http.Controller):
@http.route('/web_children_hold', type='http', auth='public', methods=
|
[
'GET'])
def handler_web_children_hold(self):
headers = request.httprequest.headers
self._validate_headers(headers)
# load children via a research on childpool
child_research = request.env['compassion.childpool.search'].sudo()
research = child_research.create({'take': 5})
research.rich_mix()
# create a hold for all children found
session = ConnectorSession.from_env(request.env)
hold_children_job.delay(session, research.id)
data = ""
# return principal children info
for child in research.global_child_ids:
if child.image_url:
data += '<img src="' + child.image_url + '"/> <br>'
data += child.name + ' ' + child.birthdate + '<br>'
headers = Headers()
response = Response(data, content_type='text/html', headers=headers)
return response
def _validate_headers(self, headers):
pass
##############################################################################
# CONNECTOR METHODS #
##############################################################################
@job(default_channel='root.global_pool')
def hold_children_job(session, research_id):
"""Job for holding requested children on the web."""
child_hold = session.env['child.hold.wizard'].with_context(
active_id=research_id).sudo()
expiration_date = datetime.now() + timedelta(minutes=15)
user_id = session.env['res.users'].\
search([('name', '=', 'Reber Rose-Marie')]).id
holds = child_hold.create({
'type': 'E-Commerce Hold',
'hold_expiration_date': expiration_date.strftime(
"%Y-%m-%dT%H:%M:%SZ"),
'primary_owner': user_id,
'secondary_owner': 'Carole Rochat',
'no_money_yield_rate': '1.1',
'yield_rate': '1.1',
'channel': 'Website',
})
holds.send()
|
neoranger/ActionLauncher
|
action_launcher.py
|
Python
|
gpl-3.0
| 8,866
| 0.012407
|
# -*- coding: utf-8 -*-
# Action Launcher Bot: This is a bot how can execute differents actions depends commands
# Code wrote by Zagur of PortalLinux.es and modified by NeoRanger of neositelinux.com
# For a good use of the bot please read the README file
import telebot
from telebot import types
import time
import random
import datetime
import token
import user
import os
import subprocess
import commands
import sys
from os.path import exists
import StringIO
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#user = [line.rstrip('\n') for line in open('user.txt','rt')]
TOKEN = token.token_id
bot = telebot.TeleBot(TOKEN) # Creating our bot object.
bot.skip_pending=True # Skip the pending messages
##################################################################
#LISTENER #
##################################################################
def printUser(msg): #debug function (will print every message sent by any user to the console)
print str(msg.chat.first_name) + " [" + str(msg.chat.id) + "]: " + msg.text
def listener(messages):
for m in messages:
cid = m.chat.id
if m.content_type == 'text':
text = m.text
printUser(m) #print the sent message to the console
bot.set_update_listener(listener) #
##################################################################
#FUNCIONES PRINCIPALES DEL BOT (CON SEGURIDAD) #
##################################################################
@bot.message_handler(commands=['temp'])
def command_temp(m):
temp = commands.getoutput('sudo vcgencmd measure_temp')
send_message_checking_permission(m, temp)
@bot.message_handler(commands=['df'])
def command_espacio(m):
info = commands.getoutput('inxi -p')
send_message_checking_permission(m, info)
@bot.message_handler(commands=['uptime'])
def command_tiempo(m):
tiempo = commands.getoutput('uptime')
send_message_checking_permission(m, tiempo)
@bot.message_handler(commands=['free'])
def command_libre(m):
libre = commands.getoutput('free -m')
send_message_checking_permission(m, libre)
@bot.message_handler(commands=['info'])
def command_info(m):
screenfetch = commands.getoutput('screenfetch -n')
send_message_checking_permission(m, u(screenfetch))
@bot.message_handler(commands=['who'])
def command_who(m):
who = commands.getoutput('who')
send_message_checking_permission(m, who)
@bot.message_handler(commands=['shutdown'])
def command_shutdown(m):
shutdown = commands.getoutput('sudo shutdown -h now')
send_message_checking_permission(m, shutdown)
@bot.message_handler(commands=['reboot'])
def command_reboot(m):
reboot = commands.getoutput('sudo reboot')
send_message_checking_permission(m, reboot)
@bot.message_handler(commands=['repoup'])
def command_repoup(m):
repoup = commands.getoutput('sudo apt-get update')
send_message_checking_permission(m, repoup)
@bot.message_handler(commands=['sysup'])
def command_sysup(m):
sysup = commands.getoutput('sudo apt-get upgrade -y')
send_message_checking_permission(m, sysup)
@bot.message_handler(commands=['distup'])
def command_distup(m):
distup = commands.getoutput('sudo apt-get dist-upgrade -y')
send_message_checking_permission(m, distup)
@bot.message_handler(commands=['osversion'])
def command_osversion(m):
osversion = commands.getoutput('lsb_release -a')
send_message_checking_permission(m, osversion)
#Otra forma: osversion = commands.getoutput('cat /etc/os-release')
@bot.message_handler(commands=['screens'])
def command_screens(m):
screens = commands.getoutput('screen -ls | grep "pi" ')
send_message_checking_permission(m, screens)
@bot.message_handler(commands=['weather'])
def command_weather(m):
weather = commands.getoutput('inxi -w')
send_message_checking_permission(m, weather)
def extract_arg(arg):
return arg.split()[1:]
@bot.message_handler(commands=['tv_on_off'])
def command_tv_on_off(m):
status = extract_arg(m.text)
tv_on_off = commands.getoutput('sudo ./tv_on_off.sh'+status)
# tv_on_off = subprocess(["sudo ./tv_on_off", status])
send_message_checking_permission(m, tv_on_off)
@bot.message_handler(commands=['ps_ram'])
def command_ps_ram(m):
ps_ram = commands.getoutput("ps aux | awk '{print $2, $4, $11}' | sort -k2r | head -n 10")
send_message_checking_permission(m, ps_ram)
@bot.message_handler(commands=['ps_cpu'])
def command_ps_cpu(m):
ps_cpu = commands.getoutput('ps -Ao user,uid,comm,pid,pcpu,tty --sort=-pcpu | head -n 6')
send_message_checking_permission(m, ps_cpu)
@bot.message_handler(commands=['server_torrent_restart'])
def command_torrent_res(m):
torrent_res = commands.getoutput('sudo service transmission-daemon restart')
send_message_checking_permission(m, torrent_res)
##################################################################
#FUNCIONES SIN SEGURIDAD (SIMPLES) #
##################################################################
@bot.message_handler(commands=['id'])
def command_id(m):
cid = m.chat.id
bot.send_message(cid, cid)
@bot.message_handler(commands=['ping'])
def command_test(m):
cid = m.chat.id
bot.send_message(cid, "Pong")
@bot.message_handler(commands=['help'])
def command_ayuda(m):
cid = m.chat.id
bot.send_message(cid, "Comandos Disponibles: /help\n /ping\n /temp(admin)\n /free(admin)\n /df(admin)\n /uptime(admin)\n /info(admin)\n /who\n /repoup(admin)\n /sysup(admin)\n /distup(admin)\n /screens(admin)\n /osversion(admin)\n /weather(admin)\n /nmap_all(admin)\n /nmap_active(admin)\n /start_nginx(admin)\n /stop_nginx(admin)\n /restart_nginx(admin)\n /bot_update(admin)\n /shutdown(admin)\n /reboot(admin)\n /ps_ram(admin)\n /ps_cpu(admin)\n")
#@bot.message_handler(commands=['apache'])
#def command_test(m):
# cid = m.chat.id
# bot.send_document(cid, '/home/ubuntu/apache2.conf','rb')
##################################################################
#MANEJO DEL SERVIDOR NGINX Y UPDATE DEL BOT VIA GIT #
##################################################################
@bot.message_handler(commands=['start_nginx'])
def command_start_nginx(m):
nginx_start = commands.getoutput('sudo service nginx start')
send_message_checking_permission(m, nginx_start)
@bot.message_handler(commands=['stop_nginx'])
def command_stop_nginx(m):
nginx_stop = commands.getoutput('sudo service nginx stop')
send_message_checking_permission(m, nginx_stop)
@bot.message_handler(commands=['restart_nginx'])
def command_restart_nginx(m):
nginx_restart = commands.getoutput('sudo service nginx restart')
send_message_checking_permission(m, nginx_restart)
@bot.message_handler(commands=['bot_update'])
def command_bot_update(m):
git_pull = commands.getoutput('git pull')
send_message_checking_permissio
|
n(m, git_pull)
@bot.message_handler(commands=['nmap_all'])
def comman
|
d_nmap_all(m):
nmap_all = commands.getoutput('sudo nast -m -i eth0')
send_message_checking_permission(m, nmap_all)
@bot.message_handler(commands=['nmap_active'])
def command_nmap_active(m):
nmap_active = commands.getoutput('sudo nast -g -i eth0')
send_message_checking_permission(m, nmap_active)
##################################################################
# FUNCION PARA CHEQUEAR PERMISOS A LA HORA DE EJECUTAR COMANDOS #
##################################################################
def send_message_checking_permission(m, response):
cid = m.chat.id
uid = m.from_user.id
if uid != user.user_id:
bot.send_message(cid, "You can't use the bot")
return
else:
bot.send_message(cid, "Triggering...")
bot.send_message(cid, response)
#def send_message_checking_permission(m, response):
# try:
# cid = m.chat.id
# uid = m.from_user.id
# if uid != user.
|
klebercode/lionsclub
|
eventi/subscriptions/tests/test_models.py
|
Python
|
mit
| 1,854
| 0.001618
|
# coding: utf-8
from django.test import TestCase
from django.db import IntegrityError
from datetime import datetime
from eventex.subscriptions.models import Subscription
class SubscriptionTest(TestCase):
def setUp(self):
self.obj = Subscription(
name='Henrique Bastos',
cpf='12345678901',
email='henrique@bastos.net',
phone='21-96186180'
)
def test_create(self):
"""
Subscription must have name, cpf, email, phone
"""
self.obj.save()
self.assertEqual(1, self.obj.pk)
def test_has_created_at(self):
"""
Subscription must have automatica created_at.
"""
self.obj.save()
self.assertIsInstance(self.obj.created_at, datetime)
def test_unicode(self):
self.assertEqual(u'Henrique Bastos', unicode(self.obj))
def test_paid_default_value_is_False(self):
"""
By default paid must be False.
"""
self.assertEqual(False, self.obj.paid)
class SubscriptionUniqueTest(TestCase):
def setUp(self):
# Create a first entry to force collision.
Subscription.objects.create(name='Henrique Bastos', cpf='12345678901',
email='henrique@bastos.net', phone='21-96186180')
def test_cpf_unique(self):
"""
CPF must be unique
"""
s =
|
Subsc
|
ription(name='Henrique Bastos', cpf='12345678901',
email='outro@email.com', phone='21-96186180')
self.assertRaises(IntegrityError, s.save)
def test_email_can_repeat(self):
"""
Email is not unique anymore.
"""
s = Subscription.objects.create(name='Henrique Bastos', cpf='00000000011',
email='henrique@bastos.net')
self.assertEqual(2, s.pk)
|
landism/pants
|
src/python/pants/backend/codegen/antlr/java/register.py
|
Python
|
apache-2.0
| 791
| 0.005057
|
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed
|
under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_
|
statement)
from pants.backend.codegen.antlr.java.antlr_java_gen import AntlrJavaGen
from pants.backend.codegen.antlr.java.java_antlr_library import JavaAntlrLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.task_registrar import TaskRegistrar as task
def build_file_aliases():
return BuildFileAliases(
targets={
'java_antlr_library': JavaAntlrLibrary,
}
)
def register_goals():
task(name='antlr-java', action=AntlrJavaGen).install('gen')
|
thiagof/treeio
|
treeio/identities/forms.py
|
Python
|
mit
| 15,660
| 0.00166
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Identities module forms
"""
from django import forms
from django.core.files.storage import default_storage
from django.template import defaultfilters
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from treeio.core.conf import settings
from treeio.core.models import AccessEntity, Object, ModuleSetting
from treeio.core.decorators import preprocess_form
from treeio.identities.models import Contact, ContactValue, ContactType, ContactField
from unidecode import unidecode
from PIL import Image
import re
preprocess_form()
class MassActionForm(forms.Form):
""" Mass action form for Reports """
delete = forms.ChoiceField(label=_("With selecte
|
d"), choices=(('', '-----'), ('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
instance = None
def __init__(self, user, *ar
|
gs, **kwargs):
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(MassActionForm, self).__init__(*args, **kwargs)
self.fields['delete'] = forms.ChoiceField(label=_("With selected"),
choices=(('', '-----'), ('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['delete']:
if self.cleaned_data['delete'] == 'delete':
self.instance.delete()
if self.cleaned_data['delete'] == 'trash':
self.instance.trash = True
self.instance.save()
class ContactFieldForm(forms.ModelForm):
"Contact Field Form"
def clean_name(self):
"Ensure the name of the field only contains alphanumeric"
name = self.cleaned_data['name']
if not re.match(r'^[a-zA-Z0-9-_]+$', name):
raise forms.ValidationError(
_("Sorry, field names can only contain letters, numbers, hyphens (-) and underscores (_)"))
return name
def __init__(self, *args, **kwargs):
super(ContactFieldForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['label'].label = _("Label")
self.fields['field_type'].label = _("Field type")
self.fields['required'].label = _("Required")
self.fields['details'].label = _("Details")
class Meta:
"Fields Form"
model = ContactField
fields = ('name', 'label', 'field_type', 'required', 'details')
class ContactTypeForm(forms.ModelForm):
"Contact Type Form"
def __init__(self, user, *args, **kwargs):
super(ContactTypeForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['fields'].queryset = Object.filter_permitted(
user, ContactField.objects.all())
self.fields['fields'].help_text = ''
self.fields['fields'].label = _("Fields")
self.fields['details'].label = _("Details")
def clean_name(self):
"Ensures a contact with the same name doesn't already exists"
instance = getattr(self, 'instance', None)
name = self.cleaned_data['name']
if instance and not instance.id:
slug = unicode(name).replace(" ", "-")
slug = defaultfilters.slugify(unidecode(slug))
if ContactType.objects.filter(slug=slug).exists():
raise forms.ValidationError(
_("Contact Type with such name already exists."))
return name
class Meta:
"Contact Type Form"
model = ContactType
fields = ('name', 'fields', 'details')
class ContactForm(forms.Form):
""" ContactForm """
name = forms.CharField(
max_length=256, widget=forms.TextInput(attrs={'size': '50'}))
instance = None
files = {}
def _get_form_field(self, field, value=None):
"Generate a Django-friendly field from Hardtree spec in DB"
form_field = None
if field.field_type == 'text':
form_field = forms.CharField(label=field.label, max_length=512,
widget=forms.TextInput(attrs={'size': '30'}))
elif field.field_type == 'textarea':
form_field = forms.CharField(label=field.label,
widget=forms.Textarea(attrs={'class': 'no-editor'}))
elif field.field_type == 'details':
form_field = forms.CharField(
label=field.label, widget=forms.Textarea())
elif field.field_type == 'email':
form_field = forms.EmailField(
label=field.label, widget=forms.TextInput(attrs={'size': '30'}))
elif field.field_type == 'url':
form_field = forms.URLField(
label=field.label, widget=forms.TextInput(attrs={'size': '50'}))
elif field.field_type == 'phone':
form_field = forms.CharField(label=field.label, max_length=256,
widget=forms.TextInput(attrs={'size': '30'}))
elif field.field_type == 'picture':
form_field = forms.ImageField(
label=field.label, widget=forms.FileInput)
elif field.field_type == 'date':
form_field = forms.DateTimeField(label=field.label)
form_field.widget.attrs.update({'class': 'datetimepicker'})
form_field.required = field.required
if value:
if isinstance(form_field, forms.FileField) and value.value:
form_field = forms.ChoiceField(
label=field.label, widget=forms.RadioSelect())
filename = full_filename = value.value
match = re.match('.*[a-z0-9]{32}__(?P<filename>.+)$', filename)
if match:
filename = match.group('filename')
form_field.choices = ((full_filename, _("Keep existing: ") + unicode(filename)),
('delete', _("Delete ")))
form_field.initial = full_filename
form_field.required = False
else:
form_field.initial = value.value
return form_field
def _get_free_field_name(self, field):
"Generate an available name for a field"
num = 0
field_name = unicode(field.name) + u"___" + unicode(num)
while field_name in self.fields:
num += 1
field_name = unicode(field.name) + u"___" + unicode(num)
return field_name
def _get_upload_name(self, filename):
"Returns an upload_to path to a new file"
import hashlib
import random
while True:
hasher = hashlib.md5()
hasher.update(str(random.random()))
filepath = u"identities/" + hasher.hexdigest() + u"__" + filename
fullpath = settings.MEDIA_ROOT + filepath
if not default_storage.exists(fullpath):
return filepath
def _handle_uploaded_file(self, field_name):
"Process an uploaded file"
try:
file = self.files[field_name]
filepath = self._get_upload_name(file.name)
except KeyError:
return ''
destination = open(settings.MEDIA_ROOT + filepath, 'wb+')
for chunk in file.chunks():
destination.write(chunk)
destination.close()
return settings.MEDIA_URL + filepath
def _image_resize(self, filepath):
"Resizes Image if it's over the maximum dimension"
filepath = filepath.replace(settings.MEDIA_URL, '')
filepath = settings.MEDIA_ROOT + filepath
try:
img = Image.open(filepath)
expected_size = getattr(
settings, 'HARDT
|
googleapis/python-securitycenter
|
samples/generated_samples/securitycenter_v1_generated_security_center_get_iam_policy_sync.py
|
Python
|
apache-2.0
| 1,489
| 0.000672
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetIamPolicy
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
|
# pyth
|
on3 -m pip install google-cloud-securitycenter
# [START securitycenter_v1_generated_SecurityCenter_GetIamPolicy_sync]
from google.cloud import securitycenter_v1
def sample_get_iam_policy():
# Create a client
client = securitycenter_v1.SecurityCenterClient()
# Initialize request argument(s)
request = securitycenter_v1.GetIamPolicyRequest(
resource="resource_value",
)
# Make the request
response = client.get_iam_policy(request=request)
# Handle the response
print(response)
# [END securitycenter_v1_generated_SecurityCenter_GetIamPolicy_sync]
|
davyx8/Python
|
PRODICTIVE/tfidf.py
|
Python
|
gpl-3.0
| 2,184
| 0.005495
|
import nltk
import string
import operator
from collections import Counter
#
# def get_tokens():
# with open('/home/davyx8/Downloads/data sets/lebowski/fargo.txt', 'r') as shakes:
# text = shakes.read()
# lowers = text.lower()
# #remove the punctuation using the character deletion step of translate
# no_punctuation = lowers.translate(None, string.punctuation)
# tokens = nltk.word_tokenize(no_punctuation)
# return tokens
#
# tokens = get_tokens()
# count = Counter(tokens)
#
# stopwords = ['a', 'the', 'of', 'at', 'it','and','i','in','is','on','his','he','you','we','to','with','out','are','that']
#
# filtered = [w for w in tokens if not w in stopwords]
# count = Counter(filtered)
# print count.most_common(100)
import os
from sklearn.feature_extraction.text import TfidfVectorizer
from nltk.stem.porter import PorterStemmer
path = '/home/davyx8/Downloads/medical data'
token_dict = {}
stemmer = PorterStemmer()
def stem_tokens(tokens, stemmer):
stemmed = []
for item in tokens:
stemmed.append(stemmer.stem(item))
return stemmed
def tokenize(text):
tokens = nltk.word_tokenize(text)
stems = stem_tokens(tokens, stemmer)
return tokens
import xml.etree.ElementTree as ET
for subdir, dirs, files in os.walk(path):
for file in files:
file_path = subdir + os.path.sep + file
shakes = open(file_path, 'r')
text = shakes.read()
lowers = text.lower()
no_punctuation = lowers.translate(None, string.punctuation)
token_dict[file] = no_punctuation
#this can take som
|
e time
tfidf = TfidfVectorizer(tokenizer=tokenize, stop_words='english')
tfs = tfidf.fit_transform(token_dict.values())
feature_names = tfidf.get_feature_names()
sortedx = {}
for filename in token_dict:
print filename
file = token_dict[filename]
sortedx = {}
|
response = tfidf.transform([file])
for col in response.nonzero()[1]:
sortedx[feature_names[col]] = tfs[0,col]
sortedx2 = sorted(sortedx.items(), key=operator.itemgetter(1))
f = open('medicalTFIDF/'+filename,"w")
for item in sortedx2:
f.write(str(item[0])+ '- ' +str(item[1])+'\n')
f.close()
|
abranches/backmonitor
|
backmonitor/tests/frame_tests.py
|
Python
|
apache-2.0
| 1,806
| 0.001661
|
import unittest
import ran
|
dom
from ..frame import Frame, decode_frame
from ..message import MessageType
from ..utilslib.strings import random_bytes
class FrameTestCase(unittest.TestCase):
MSG_TYPE = MessageType.HELLO
PAYLOAD = "Hello World!"
def frame_setup(self, msg_type, payload):
self.msg_type = msg_type
self.payload = payload
self.frame = Frame(self.msg_type, self.payload)
def setUp(self):
self.frame_setup(self.MSG_TY
|
PE, self.PAYLOAD)
def tearDown(self):
self.msg_type = None
self.payload = None
self.frame = None
def test_eq_at_decode_after_encode(self):
consumed_bytes, decoded = decode_frame(self.frame.encode())
self.assertIsNotNone(self, decoded)
self.assertEqual(self.frame, decoded,
"!!! decode(encode(X)) != X !!!")
def test_raw_eq_at_decode_after_encode(self):
consumed_bytes, decoded = decode_frame(self.frame.encode())
self.assertIsNotNone(self, decoded)
self.assertEqual(decoded.msg_type, self.msg_type)
self.assertEqual(decoded.payload, self.payload)
def test_encoded_size_matches_expected_calcsize(self):
self.assertEqual(len(self.frame.encode()), self.frame.calcsize())
class RandomFrameTestCase(FrameTestCase):
MIN_PAYLOAD_SIZE = 1
MAX_PAYLOAD_SIZE = 512
def setUp(self):
msg_type = random.choice(MessageType.values())
payload_size = random.randrange(self.MIN_PAYLOAD_SIZE,
self.MAX_PAYLOAD_SIZE+1)
payload = random_bytes(payload_size)
self.frame_setup(msg_type, payload)
class EmptyPayloadFrameTestCase(RandomFrameTestCase):
def setUp(self):
self.frame_setup(random.choice(MessageType.values()), b"")
|
fudanchii/archie
|
archie/handlers/restore.py
|
Python
|
mit
| 753
| 0.003984
|
import os
import tarfile
from contextlib i
|
mport closing
from archie import helpers
def find_backup(cfg):
files = []
rcfiles = cfg.options('rcfiles')
for rc in rcfiles:
backup = helpers.get_backupfile(cfg, rc)
rcfile = helpers.get_rcfile(cfg, rc)
if os.path.lexists(backup) and tarfile.is_tarf
|
ile(backup):
files.append((backup, rcfile))
return files
def gunzip_and_restore(cfg, backupfiles):
for backup, rc in backupfiles:
if os.path.islink(rc):
os.unlink(rc)
with closing(tarfile.open(backup, 'r:gz')) as tar:
tar.extractall('/')
return backupfiles
def Restore(cfg):
backupfiles = find_backup(cfg)
return gunzip_and_restore(cfg, backupfiles)
|
JackyChou/SGRS
|
SGRS/urls.py
|
Python
|
gpl-2.0
| 359
| 0.005571
|
from django.conf.
|
urls import include, url
from django.contrib import admin
def i18n_javascript(request):
return admin.site.i18n_javascript(request)
urlpatterns = [
url(r'^$', 'GeneralReport.views.index'),
url(r'^sgrs/', include('GeneralReport.urls')),
url(r'^admin/jsi18n', i1
|
8n_javascript),
url(r'^admin/', include(admin.site.urls)),
]
|
OpenSoccerManager/opensoccermanager
|
uigtk/window.py
|
Python
|
gpl-3.0
| 3,411
| 0.002345
|
#!/usr/bin/env pyth
|
on3
# This file is part of OpenSoccerManager.
#
# OpenSoccerManager is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# OpenSoccerManager is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPO
|
SE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# OpenSoccerManager. If not, see <http://www.gnu.org/licenses/>.
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from gi.repository import GdkPixbuf
import os
import data
import uigtk.helpdialog
import uigtk.quitdialog
import uigtk.screen
import uigtk.welcome
class Window(Gtk.Window):
def __init__(self):
data.preferences.read_from_config()
iconpath = os.path.join("resources", "logo.svg")
self.logo = GdkPixbuf.Pixbuf.new_from_file(iconpath)
Gtk.Window.__init__(self)
self.set_title("OpenSoccerManager")
self.set_default_icon(self.logo)
self.connect("delete-event", self.on_quit_game)
self.connect("window-state-event", self.on_window_state_event)
if data.preferences.window_maximized:
self.maximize()
else:
self.move(*data.preferences.window_position)
self.set_default_size(*data.preferences.window_size)
self.accelgroup = Gtk.AccelGroup()
self.add_accel_group(self.accelgroup)
def on_window_state_event(self, *args):
'''
Handle move to original position on unmaximize event.
'''
if self.is_maximized():
self.move(*data.preferences.window_position)
def on_quit_game(self, *args):
'''
Quit game, displaying confirmation prompt if set to show.
'''
data.preferences.write_to_config()
if data.preferences.confirm_quit:
dialogQuit = uigtk.quitdialog.QuitDialog()
if dialogQuit.run() == Gtk.ResponseType.OK:
Gtk.main_quit()
else:
dialogQuit.destroy()
else:
if data.unsaved:
dialogUnsaved = uigtk.quitdialog.UnsavedDialog()
response = dialogUnsaved.run()
if response == Gtk.ResponseType.ACCEPT:
dialogUnsaved.destroy()
dialogSave = uigtk.filedialog.SaveDialog()
response = dialogSave.run()
if response == Gtk.ResponseType.OK:
Gtk.main_quit()
else:
dialogSave.destroy()
elif response == Gtk.ResponseType.REJECT:
Gtk.main_quit()
else:
dialogUnsaved.destroy()
else:
Gtk.main_quit()
return True
def run(self):
self.mainscreen = uigtk.mainscreen.MainScreen()
self.screen = uigtk.screen.Screen()
self.help_dialog = uigtk.helpdialog.HelpDialog()
self.welcome = uigtk.welcome.Welcome()
self.add(self.welcome)
self.show_all()
Gtk.main()
|
andrewbates09/FERGUS
|
fergus/__init__.py
|
Python
|
gpl-3.0
| 291
| 0.024055
|
'''
Library for d
|
oing fun things with computers.
'''
__author__ = 'Andrew M Bates'
__version__ = '0.001'
import io, os, sys
# the core imports go here
# this should go in in the mods dir
try:
'''IF RASPBERRY PI & HAS A GPIO BOARD'''
import RPi.GP
|
IO as RPi
except ImportError:
pass
|
ruaultadrien/canpy
|
canpy/defects_movie.py
|
Python
|
gpl-3.0
| 2,116
| 0.025083
|
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 14 20:43:12 2016
@author: Adrien
"""
import canpy.point_defects
def defects_movie(dl,t_start='standard',t_end='standard'):
'''
Produce a movie in mp4 format of the defects.
t_start is the time at which we want the movie to begin and t_end is the time
at which we want the movie to end.
If t_start and t_end are kept at the 'standard' value the movie will start
at 0ps and end at the last available snapshot. The user is free to change the
beginning and finishing time. If so these times have to be provided in ps.
'''
if t_start == 'standard':
snap_start=0
else:
snap_start = round(t_start/(dl.inter*dl.time_step))
if t_end == 'standard':
snap_end=dl.nb_snapshots-1
else:
snap_end = round(t_end/(dl.inter*dl.time_step))
i=snap_start
while i <= snap_end:
a = package.point_defects(dl,i*dl.inter*dl.time_step)
fig = plt.figure()
ax = fig.gca(projection='3d')
def init():
ax.set_xlabel('x axis[Angström]')
ax.set_ylabel('y axis[Angström]')
ax.set_zlabel('z axis[Angström]')
ax.set_xlim3d(0,self.length)
ax
|
.set_ylim3d(0,self.length)
ax.set_zlim3d(0,self.length)
ax.legend(frameon = True, fancybox = True, ncol = 1, fontsize = 'x-small', loc = 'lower right')
def animate(i):
ax.scatter(xi, yi, zi, label='Interstitials', c='r', marker='^')
ax.scatter(xv, yv, zv, label='Vacancies', c='b', marker='o')
ax.set_title('Interstitials and vacancies a
|
t '+str(self.time)+' ps')
# Animate
anim = animation.FuncAnimation(fig, animate, init_func=init,frames=360, interval=speed*20, blit=True)
# Save
anim.save('rot_frame_anim_'+str(self.time)+'ps.mp4', fps=30, extra_args=['-vcodec', 'libx264'])
|
m110/pastevim
|
manage.py
|
Python
|
gpl-2.0
| 251
| 0
|
#!/usr/b
|
in/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pastevim.settings")
from django.core.management import execute_from_command_line
execute_from_command_
|
line(sys.argv)
|
Ckrisirkc/Red-DiscordBot
|
cogs/customcom.py
|
Python
|
gpl-3.0
| 5,602
| 0.003213
|
import discord
from discord.ext import commands
from .utils.dataIO import fileIO
from .utils import checks
from __main__ import user_allowed, send_cmd_help
import os
class CustomCommands:
"""Custom commands."""
def __init__(self, bot):
self.bot = bot
self.c_commands = fileIO("data/customcom/commands.json", "load")
@commands.command(pass_context=True, no_pm=True)
@checks.mod_or_permissions(administrator=True)
async def addcom(self, ctx, command : str, *, text):
"""Adds a custom command
Example:
!addcom yourcommand Text you want
"""
server = ctx.message.server
command = command.lower()
if command in self.bot.commands.keys():
await self.bot.say("That command is already a standard command.")
return
if not server.id in self.c_commands:
self.c_commands[server.id] = {}
cmdlist = self.c_commands[server.id]
if command not in cmdlist:
cmdlist[command] = text
self.c_commands[server.id] = cmdlist
fileIO("data/customcom/commands.json", "save", self.c_commands)
await self.bot.say("Custom command successfully added.")
else:
await self.bot.say("This command already exists. Use editcom to edit it.")
@commands.command(pass_context=True, no_pm=True)
@checks.mod_or_permissions(administrator=True)
async def editcom(self, ctx, command : str, *, text):
"""Edits a custom command
Example:
!editcom yourcommand Text you want
"""
server = ctx.message.server
command = command.lower()
if server.id in self.c_commands:
cmdlist = self.c_commands[server.id]
if command in cmdlist:
cmdlist[command] = text
self.c_commands[server.id] = cmdlist
fileIO("data/customcom/commands.json", "save", self.c_commands)
await self.bot.say("Custom command successfully edited.")
else:
await self.bot.say("That command doesn't exist. Use addcom [command] [text]")
else:
await self.bot.say("There are no custom commands in this server. Use addcom [command] [text]")
@commands.command(pass_context=True, no_pm=True)
@checks.mod_or_permissions(administrator=True)
async def delcom(self, ctx, command : str):
"""Deletes a custom command
Example:
!delcom yourcommand"""
server = ctx.message.server
command = command.lower()
if server.id in self.c_commands:
cmdlist = self.c_commands[server.id]
if command in cmdlist:
cmdlist.pop(command, None)
self.c_commands[server.id] = cmdlist
fileIO("data/customcom/commands.json", "save", self.c_commands)
await self.bot.say("Custom command successfully deleted.")
else:
await self.bot.say("That command doesn't exist.")
else:
await self.bot.
|
say("There are no custom commands in
|
this server. Use addcom [command] [text]")
@commands.command(pass_context=True, no_pm=True)
async def customcommands(self, ctx):
"""Shows custom commands list"""
server = ctx.message.server
if server.id in self.c_commands:
cmdlist = self.c_commands[server.id]
if cmdlist:
i = 0
msg = ["```Custom commands:\n"]
for cmd in sorted([cmd for cmd in cmdlist.keys()]):
if len(msg[i]) + len(ctx.prefix) + len(cmd) + 5 > 2000:
msg[i] += "```"
i += 1
msg.append("``` {}{}\n".format(ctx.prefix, cmd))
else:
msg[i] += " {}{}\n".format(ctx.prefix, cmd)
msg[i] += "```"
for cmds in msg:
await self.bot.whisper(cmds)
else:
await self.bot.say("There are no custom commands in this server. Use addcom [command] [text]")
else:
await self.bot.say("There are no custom commands in this server. Use addcom [command] [text]")
async def checkCC(self, message):
if message.author.id == self.bot.user.id or len(message.content) < 2 or message.channel.is_private:
return
if not user_allowed(message):
return
msg = message.content
server = message.server
prefix = self.get_prefix(msg)
if prefix and server.id in self.c_commands.keys():
cmdlist = self.c_commands[server.id]
cmd = msg[len(prefix):]
if cmd in cmdlist.keys():
await self.bot.send_message(message.channel, cmdlist[cmd])
elif cmd.lower() in cmdlist.keys():
await self.bot.send_message(message.channel, cmdlist[cmd.lower()])
def get_prefix(self, msg):
for p in self.bot.command_prefix:
if msg.startswith(p):
return p
return False
def check_folders():
if not os.path.exists("data/customcom"):
print("Creating data/customcom folder...")
os.makedirs("data/customcom")
def check_files():
f = "data/customcom/commands.json"
if not fileIO(f, "check"):
print("Creating empty commands.json...")
fileIO(f, "save", {})
def setup(bot):
check_folders()
check_files()
n = CustomCommands(bot)
bot.add_listener(n.checkCC, "on_message")
bot.add_cog(n)
|
rdio/sentry
|
tests/sentry/nodestore/django/backend/tests.py
|
Python
|
bsd-3-clause
| 2,382
| 0.001679
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.nodestore.django.models import Node
from sentry.nodestore.django.backend import DjangoNodeStorage
from sentry.testutils import TestCase
class DjangoNodeStorageTest(TestCase):
def setUp(self):
self.ns = DjangoNodeStorage()
def test_get(self):
node = Node.objects.create(
id='d2502ebbd7df41ceba8d3275595cac33',
data={
'foo': 'bar',
}
)
result = self.ns.get(node.id)
assert result == node.data
def test_get_multi(self):
nodes = [
Node.objects.create(
id='d2502ebbd7df41ceba8d3275595cac33',
data={
'foo': 'bar',
}
),
Node.objects.create(
id='5394aa025b8e401ca6bc3ddee3130edc',
data={
'foo': 'baz',
}
),
]
result = self.ns.get_multi([
'd2502ebbd7df41ceba8d3275595cac33', '5394aa025b8e401ca6bc3ddee3130edc'
])
assert result == dict((n.id, n.data) for n in nodes)
def test_set(self):
self.ns.set('d2502ebbd7df41ceba8d3275595cac33', {
'foo': 'bar',
})
assert Node.objects.get(id='d2502ebbd7df41ceba8d3275595cac33').data == {
'foo': 'bar',
}
def test_set_multi(self):
self.ns.set_multi({
'd2502ebbd7df41ceba8d3275595cac33': {
'foo': 'bar',
},
'5394aa025b8e401ca6bc3ddee3130edc': {
'foo': 'baz',
},
})
assert Node.objects.get(id='d2502ebbd7df41ceba8d3275595cac33').data == {
'foo': 'bar',
}
assert Node.objects.get(id='5394aa025b8e401ca6bc3ddee3130edc').data == {
'foo': 'baz',
}
def test_create(self):
node_id = self.ns.create({
'foo': 'bar',
})
assert Node.objects.g
|
et(id=node_id).data == {
'foo': 'bar',
}
def test_delete(self):
node = Node.objects.create(
|
id='d2502ebbd7df41ceba8d3275595cac33',
data={
'foo': 'bar',
}
)
self.ns.delete(node.id)
assert not Node.objects.filter(id=node.id).exists()
|
hezuoguang/Yeps-Server
|
Yeps/Yep/tools.py
|
Python
|
mit
| 943
| 0.009967
|
#coding:utf-8
import hashlib, datetime, pdb
from Yep import system_tags, system_schools
MD5PERFIX = "YEPS_WEIMI"
def md5_pwd(pwd):
return hashlib.new("md5", MD5PERFIX + pwd).hexdigest()
# 检查标签是否合法
def check_user_tag(tags):
tag_list = system_tags.tag_list
for tag in tags:
if tag not in tag_list:
|
return False
return True
# 检查学校是否合法
def check_school(school):
school_list = system_schools.school_list
if school in school_list:
return True
return False
# 计算sha1
def sha1_with
|
_args(*kwargs):
sha1 = hashlib.sha1()
for arg in kwargs:
sha1.update(arg)
sha1 = sha1.hexdigest()
return sha1
# 计算access_token
def init_access_token(user_sha1, pwd):
return hashlib.new("md5", MD5PERFIX + user_sha1 + pwd).hexdigest()
# datetime to "XXXX-XX-XX : XX:XX:XX"
def date_time_to_str(t):
return t.strftime("%Y-%m-%d %H:%M:%S")
|
ojengwa/grr
|
lib/flows/general/registry.py
|
Python
|
apache-2.0
| 6,980
| 0.008739
|
#!/usr/bin/env python
"""Gather information from the registry on windows."""
import re
import stat
from grr.lib import aff4
from grr.lib import artifact
from grr.lib import artifact_lib
from grr.lib import flow
from grr.lib import rdfvalue
from grr.lib import utils
from grr.proto import flows_pb2
class RegistryFinderCondition(rdfvalue.RDFProtoStruct):
protobuf = flows_pb2.RegistryFinderCondition
class RegistryFinderArgs(rdfvalue.RDFProtoStruct):
protobuf = flows_pb2.RegistryFinderArgs
class RegistryFinder(flow.GRRFlow):
"""This flow looks for registry items matching given criteria."""
friendly_name = "Registry Finder"
category = "/Registry/"
args_type = RegistryFinderArgs
behaviours = flow.GRRFlow.behaviours + "BASIC"
def ConditionsToFileFinderConditions(self, conditions):
ff_condition_type_cls = rdfvalue.FileFinderCondition.Type
result = []
for c in conditions:
if c.condition_type == RegistryFinderCondition.Type.MODIFICATION_TIME:
result.append(rdfvalue.FileFinderCondition(
condition_type=ff_condition_type_cls.MODIFICATION_TIME,
modification_time=c.modification_time))
elif c.condition_type == RegistryFinderCondition.Type.VALUE_LITERAL_MATCH:
result.append(rdfvalue.FileFinderCondition(
condition_type=ff_condition_type_cls.CONTENTS_LITERAL_MATCH,
contents_literal_match=c.value_literal_match))
elif c.condition_type == RegistryFinderCondition.Type.VALUE_REGEX_MATCH:
result.append(rdfvalue.FileFinderCondition(
condition_type=ff_condition_type_cls.CONTENTS_REGEX_MATCH,
contents_regex_match=c.value_regex_match))
elif c.condition_type == RegistryFinderCondition.Type.SIZE:
result.append(rdfvalue.FileFinderCondition(
condition_type=ff_condition_type_cls.SIZE,
size=c.size))
else:
raise ValueError("Unknown condition type: %s", c.condition_type)
return result
@classmethod
def GetDefaultArgs(cls, token=None):
_ = token
return cls.args_type(keys_paths=["HKEY_USERS/%%users.sid%%/Software/"
"Microsoft/Windows/CurrentVersion/Run/*"])
@flow.StateHandler(next_state="Done")
def Start(self):
self.CallFlow("FileFinder",
paths=self.args.keys_paths,
pathtype=rdfvalue.PathSpec.PathType.REGISTRY,
conditions=self.ConditionsToFileFinderConditions(
self.args.conditions),
action=rdfvalue.FileFinderAction(
action_type=rdfvalue.FileFinderAction.Action.STAT),
next_state="Done")
@flow.StateHandler()
def Done(self, responses):
if not responses.success:
raise flow.FlowError("Registry search failed %s" % responses.status)
for response in responses:
self.SendReply(response)
# TODO(user): replace this flow with chained artifacts once the capability
# exists.
class CollectRunKeyBinaries(flow.GRRFlow):
"""Collect the binaries used by Run and RunOnce keys on the system.
We use the RunKeys artifact to get RunKey command strings for all users and
System. This flow guesses file paths from the strings, expands any
windows system environment variables, and attempts to retrieve the files.
"""
category = "/Registry/"
behaviours = flow.GRRFlow.behaviours + "BASIC"
@flow.StateHandler(next_state="ParseRunKeys")
def Start(self):
"""Get runkeys via the ArtifactCollectorFlow."""
self.CallFlow("ArtifactCollectorFlow", artifact_list=["WindowsRunKeys"],
use_tsk=True, store_results_in_aff4=False,
next_state="ParseRunKeys")
def _IsExecutableExtension(self, path):
return path.endswith(("exe", "com", "bat", "dll", "msi", "sys", "scr",
"pif"))
@flow.StateHan
|
dler(next_state="Done")
def ParseRunKeys(self, responses):
"""Get filenames from the RunKeys and download the files."""
|
filenames = []
client = aff4.FACTORY.Open(self.client_id, mode="r", token=self.token)
kb = artifact.GetArtifactKnowledgeBase(client)
for response in responses:
runkey = response.registry_data.string
path_guesses = utils.GuessWindowsFileNameFromString(runkey)
path_guesses = filter(self._IsExecutableExtension, path_guesses)
if not path_guesses:
self.Log("Couldn't guess path for %s", runkey)
for path in path_guesses:
full_path = artifact_lib.ExpandWindowsEnvironmentVariables(path, kb)
filenames.append(rdfvalue.PathSpec(
path=full_path, pathtype=rdfvalue.PathSpec.PathType.TSK))
if filenames:
self.CallFlow("MultiGetFile", pathspecs=filenames,
next_state="Done")
@flow.StateHandler()
def Done(self, responses):
for response in responses:
self.SendReply(response)
class GetMRU(flow.GRRFlow):
"""Collect a list of the Most Recently Used files for all users."""
category = "/Registry/"
behaviours = flow.GRRFlow.behaviours + "BASIC"
@flow.StateHandler(next_state="StoreMRUs")
def Start(self):
"""Call the find flow to get the MRU data for each user."""
fd = aff4.FACTORY.Open(self.client_id, mode="r", token=self.token)
for user in fd.Get(fd.Schema.USER):
mru_path = ("HKEY_USERS/%s/Software/Microsoft/Windows"
"/CurrentVersion/Explorer/ComDlg32"
"/OpenSavePidlMRU" % user.sid)
findspec = rdfvalue.FindSpec(max_depth=2, path_regex=".")
findspec.iterator.number = 1000
findspec.pathspec.path = mru_path
findspec.pathspec.pathtype = rdfvalue.PathSpec.PathType.REGISTRY
self.CallFlow("FindFiles", findspec=findspec, output=None,
next_state="StoreMRUs",
request_data=dict(username=user.username))
@flow.StateHandler()
def StoreMRUs(self, responses):
"""Store the MRU data for each user in a special structure."""
for response in responses:
urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(
response.pathspec, self.client_id)
if stat.S_ISDIR(response.st_mode):
obj_type = "VFSDirectory"
else:
obj_type = "VFSFile"
fd = aff4.FACTORY.Create(urn, obj_type, mode="w", token=self.token)
fd.Set(fd.Schema.STAT(response))
fd.Close(sync=False)
username = responses.request_data["username"]
m = re.search("/([^/]+)/\\d+$", unicode(urn))
if m:
extension = m.group(1)
fd = aff4.FACTORY.Create(
rdfvalue.ClientURN(self.client_id)
.Add("analysis/MRU/Explorer")
.Add(extension)
.Add(username),
"MRUCollection", token=self.token,
mode="rw")
# TODO(user): Implement the actual parsing of the MRU.
mrus = fd.Get(fd.Schema.LAST_USED_FOLDER)
mrus.Append(filename="Foo")
fd.Set(mrus)
fd.Close()
|
exleym/simpaq
|
solvers/regressions.py
|
Python
|
mit
| 390
| 0
|
impor
|
t statsmodels.api as sm
import numpy as np
class LSM(object):
def __init__(self, lambdas):
self.lambdas = lambdas
def calc(self, y, x):
X = np.zeros((len(x), len(self.lambdas)))
for i in range(0, len(self.lambdas)):
X[:, i] = self.lambdas[i](x)
ols = sm.OLS(y, sm.add_constan
|
t(X, prepend=False)).fit()
return ols.params
|
sarbi127/inviwo
|
data/scripts/loadtransferfunction.py
|
Python
|
bsd-2-clause
| 150
| 0.026667
|
# I
|
nviwo Python script
import inviwo
inviwo.loadTransferFunction("SimpleRaycaster.transferFunction",inviwo.getDataPath() + "transferfunction.itf"
|
)
|
smallyear/linuxLearn
|
salt/salt/pillar/git_pillar.py
|
Python
|
apache-2.0
| 16,836
| 0
|
# -*- coding: utf-8 -*-
'''
Use a git repository as a Pillar source
---------------------------------------
.. note::
This external pillar has been rewritten for the :doc:`2015.8.0
</topics/releases/2015.8.0>` release. The old method of configuring this
external pillar will be maintained for a couple releases, allowing time for
configurations to be updated to reflect the new usage.
This external pillar allows for a Pillar top file and Pillar SLS files to be
sourced from a git repository.
However, since git_pillar does not have an equivalent to the
:conf_master:`pillar_roots` parameter, configuration is slightly different. The
Pillar top file must still contain the relevant environment, like so:
.. code-block:: yaml
base:
'*':
- foo
The branch/tag which maps to that environment must then be specified along with
the repo's URL. Configuration details can be found below.
.. _git-pillar-pre-2015-8-0:
Configuring git_pillar for Salt releases before 2015.8.0
========================================================
For Salt releases earlier than :doc:`2015.8.0 </topics/releases/2015.8.0>`,
GitPython is the only supported provider for git_pillar. Individual
repositories can be configured under the :conf_master:`ext_pillar`
configuration parameter like so:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git root=subdirectory
The repository is specified in the format ``<branch> <repo_url>``, with an
optional ``root`` parameter (added in the :doc:`2014.7.0
</topics/releases/2014.7.0>` release) which allows the pillar SLS files to be
served up from a subdirectory (similar to :conf_master:`gitfs_root` in gitfs).
To use more than one branch from the same repo, multiple lines must be
specified under :conf_master:`ext_pillar`:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git
- git: dev https://gitserver/git-pillar.git
To remap a specific branch to a specific Pillar environment, use the format
``<branch>:<env>``:
.. code-block:: yaml
ext_pillar:
- git: develop:dev https://gitserver/git-pillar.git
- git: master:prod https://gitserver/git-pillar.git
In this case, the ``develop`` branch would need its own ``top.sls`` with a
``dev`` section in it, like this:
.. code-block:: yaml
dev:
'*':
- bar
The ``master`` branch would need its own ``top.sls`` with a ``prod`` section in
it:
.. code-block:: yaml
prod:
'*':
- bar
If ``__env__`` is specified as the branch name, then git_pillar will use the
branch specified by :conf_master:`gitfs_base`:
.. code-block:: yaml
ext_pillar:
- git: __env__ https://gitserver/git-pillar.git root=pillar
The corresponding Pillar top file would look like this:
.. code-block:: yaml
{{env}}:
'*':
- bar
.. _git-pillar-2015-8-0-and-later:
Configuring git_pillar for Salt releases 2015.8.0 and later
===========================================================
.. note::
In version 2015.8.0, the method of configuring git external pillars has
changed, and now more closely resembles that of the :ref:`Git Fileserver
Backend <tutorial-gitfs>`. If Salt detects the old configuration schema, it
will use the pre-2015.8.0 code to compile the external pillar. A warning
will also be logged.
Beginning with Salt version 2015.8.0, pygit2_ is now supported in addition to
GitPython_ (Dulwich_ will not be supported for the forseeable future). The
requirements for GitPython_ and pygit2_ are the same as for gitfs, as described
:ref:`here <gitfs-dependencies>`.
.. important::
git_pillar has its own set of global configuration parameters. While it may
seem intuitive to use the global gitfs configuration parameters
(:conf_master:`gitfs_base`, etc.) to manage git_pillar, this will not work.
The main difference for this is the fact that the different components
which use Salt's git backend code do not all function identically. For
instance, in git_pillar it is necessary to specify which branch/tag to be
used for git_pillar remotes. This is the reverse behavior from gitfs, where
branches/tags make up your environments.
See :ref:`here <git_pillar-config-opts>` for documentation on the
git_pillar configuration options and their usage.
Here is an example git_pillar configuration:
.. code-block:: yaml
ext_pillar:
- git:
# Use 'prod' instead of the branch name 'production' as the environment
- production https://gitserver/git-pillar.git:
- env: prod
# Use 'dev' instead of the branch name 'develop' as the environment
- develop https://gitserver/git-pillar.git:
- env: dev
# No per-remote config parameters (and no trailing colon), 'qa' will
# be used as the environment
- qa https://gitserver/git-pillar.git
# SSH key authentication
- master git@other-git-server:pillardata-ssh.git:
# Pillar SLS files will be read from the 'pillar' subdirectory in
|
# this repository
- root: pillar
- privkey: /path/to/key
- pubkey: /path/to/key.pub
- passphrase: CorrectHorseBatteryStaple
# HTTPS authentication
- master https://other-
|
git-server/pillardata-https.git:
- user: git
- password: CorrectHorseBatteryStaple
The main difference between this and the old way of configuring git_pillar is
that multiple remotes can be configured under one ``git`` section under
:conf_master:`ext_pillar`. More than one ``git`` section can be used, but it is
not necessary. Remotes will be evaluated sequentially.
Per-remote configuration parameters are supported (similar to :ref:`gitfs
<gitfs-per-remote-config>`), and global versions of the git_pillar
configuration parameters can also be set.
With the addition of pygit2_ support, git_pillar can now interact with
authenticated remotes. Authentication works just like in gitfs (as outlined in
the :ref:`Git Fileserver Backend Walkthrough <gitfs-authentication>`), only
with the global authenication parameter names prefixed with ``git_pillar``
instead of ``gitfs`` (e.g. :conf_master:`git_pillar_pubkey`,
:conf_master:`git_pillar_privkey`, :conf_master:`git_pillar_passphrase`, etc.).
.. _GitPython: https://github.com/gitpython-developers/GitPython
.. _pygit2: https://github.com/libgit2/pygit2
.. _Dulwich: https://www.samba.org/~jelmer/dulwich/
'''
from __future__ import absolute_import
# Import python libs
import copy
import logging
import hashlib
import os
# Import salt libs
import salt.utils.gitfs
import salt.utils.dictupdate
from salt.exceptions import FileserverConfigError
from salt.pillar import Pillar
# Import third party libs
import salt.ext.six as six
# pylint: disable=import-error
try:
import git
HAS_GITPYTHON = True
except ImportError:
HAS_GITPYTHON = False
# pylint: enable=import-error
PER_REMOTE_OVERRIDES = ('env', 'root', 'ssl_verify')
# Set up logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'git'
def __virtual__():
'''
Only load if gitpython is available
'''
git_ext_pillars = [x for x in __opts__['ext_pillar'] if 'git' in x]
if not git_ext_pillars:
# No git external pillars were configured
return False
for ext_pillar in git_ext_pillars:
if isinstance(ext_pillar['git'], six.string_types):
# Verification of legacy git pillar configuration
if not HAS_GITPYTHON:
log.error(
'Git-based ext_pillar is enabled in configuration but '
'could not be loaded, is GitPython installed?'
)
return False
if not git.__version__ > '0.3.0':
return False
return __virtualname__
else:
# Verification of new git pillar configuration
try:
salt.utils.gitfs.GitPillar(__opts__)
# Initialization of the GitPillar object did not fail, so we
# know we ha
|
SEL-Columbia/commcare-hq
|
corehq/apps/cloudcare/touchforms_api.py
|
Python
|
bsd-3-clause
| 4,169
| 0.003118
|
from casexml.apps.case.models import CommCareCase
from dimagi.utils.decorators.memoized import memoized
from touchforms.formplayer.api import post_data
import json
from django.conf import settings
from corehq.apps.cloudcare import CLOUDCARE_DEVICE_ID
from django.core.urlresolvers import reverse
from corehq.apps.users.models import CommCareUser
DELEGATION_STUB_CASE_TYPE = "cc_delegation_stub"
class SessionDataHelper(object):
def __init__(self, domain, couch_user, case_id=None, delegation=False, offline=False):
self.domain = domain
self.couch_user = couch_user
self.case_id = case_i
|
d
self._delegation = delegation
self.offline = offline
@property
@memoized
def case(self):
return CommCareCase.get(self.case_id)
@property
def case_type(self):
return self.case.type
@property
def _case_parent_id(self):
"""Only makes sense if the case is a delegation s
|
tub"""
return self.case.get_index_map().get('parent')['case_id']
@property
def delegation(self):
if self._delegation and self.case_id:
assert self.case_type == DELEGATION_STUB_CASE_TYPE
return self._delegation
def get_session_data(self, device_id=CLOUDCARE_DEVICE_ID):
"""
Get session data used by touchforms.
"""
# NOTE: Better to use isinstance(self.couch_user, CommCareUser) here rather than
# self.couch_user.is_commcare_user() since this function is reused by smsforms where
# the recipient can be a case.
session_data = {
'device_id': device_id,
'app_version': '2.0',
'username': self.couch_user.raw_username,
'user_id': self.couch_user.get_id,
'domain': self.domain,
'user_data': self.couch_user.user_data if isinstance(self.couch_user, CommCareUser) else {},
}
if self.case_id:
if self.delegation:
session_data["delegation_id"] = self.case_id
session_data["case_id"] = self._case_parent_id
else:
session_data["case_id"] = self.case_id
return session_data
def filter_cases(self, xpath, additional_filters=None, auth=None, extra_instances=None):
"""
Filter a list of cases by an xpath expression + additional filters
"""
session_data = self.get_session_data()
session_data["additional_filters"] = additional_filters or {}
session_data['extra_instances'] = extra_instances or []
data = {
"action": "touchcare-filter-cases",
"filter_expr": xpath,
"session_data": session_data,
}
response = post_data(
json.dumps(data),
url=settings.XFORMS_PLAYER_URL,
content_type="text/json", auth=auth
)
return json.loads(response)
def get_full_context(self, root_extras=None, session_extras=None):
"""
Get the entire touchforms context for a given user/app/module/form/case
"""
root_extras = root_extras or {}
session_extras = session_extras or {}
session_data = self.get_session_data()
# always tell touchforms to include footprinted cases in its case db
session_data["additional_filters"] = {"footprint": True}
session_data.update(session_extras)
online_url = reverse("xform_player_proxy")
offline_url = 'http://localhost:%d' % settings.OFFLINE_TOUCHFORMS_PORT
ret = {
"session_data": session_data,
"xform_url": offline_url if self.offline else online_url,
}
ret.update(root_extras)
return ret
def get_session_data(domain, couch_user, case_id=None, device_id=CLOUDCARE_DEVICE_ID, delegation=False):
return SessionDataHelper(domain, couch_user, case_id, delegation=delegation).get_session_data(device_id)
def filter_cases(domain, couch_user, xpath, additional_filters=None, auth=None, delegation=False):
return SessionDataHelper(domain, couch_user, delegation=delegation).filter_cases(xpath, additional_filters, auth)
|
cspode/SU2
|
SU2_PY/SU2/util/filter_adjoint.py
|
Python
|
lgpl-2.1
| 16,651
| 0.025404
|
#!/usr/bin/env python
## \file filter_adjoint.py
# \brief Applies various filters to the adjoint surface sensitivities of an airfoil
# \author T. Lukaczyk, F. Palacios
# \version 5.0.0 "Raven"
#
# SU2 Lead Developers: Dr. Francisco Palacios (Francisco.D.Palacios@boeing.com).
# Dr. Thomas D. Economon (economon@stanford.edu).
#
# SU2 Developers: Prof. Juan J. Alonso's group at Stanford University.
# Prof. Piero Colonna's group at Delft University of Technology.
# Prof. Nicolas R. Gauger's group at Kaiserslautern University of Technology.
# Prof. Alberto Guardone's group at Polytechnic University of Milan.
# Prof. Rafael Palacios' group at Imperial College London.
# Prof. Edwin van der Weide's group at the University of Twente.
# Prof. Vincent Terrapon's group at the University of Liege.
#
# Copyright (C) 2012-2017 SU2, the open-source CFD code.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published
|
by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
|
. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
import os, math
from numpy import pi
from optparse import OptionParser
import numpy as np
import libSU2, libSU2_mesh
# plotting with matplotlib
try:
import pylab as plt
pylab_imported = True
except ImportError:
pylab_imported = False
# -------------------------------------------------------------------
# MAIN
# -------------------------------------------------------------------
def main():
# Command Line Options
parser=OptionParser()
parser.add_option( "-f", "--file", dest="filename",
help="read config from FILE", metavar="FILE" )
parser.add_option( "-t", "--type", dest="filter_type", default='LAPLACE',
help="apply filter TYPE", metavar="TYPE" )
parser.add_option( "-m", "--marker", dest="marker_name", default='airfoil',
help="use marker named TAG", metavar="TAG" )
parser.add_option( "-c", "--chord", dest="chord_length", default=1.0,
help="reference CHORD length", metavar="CHORD" )
(options, args)=parser.parse_args()
options.chord_length = float( options.chord_length )
# run filter
process_surface_adjoint( options.filename ,
options.filter_type ,
options.marker_name ,
options.chord_length )
#: def main()
# -------------------------------------------------------------------
# PROCESS SURFACE ADJOINT
# -------------------------------------------------------------------
def process_surface_adjoint( config_filename ,
filter_type='LAPLACE' ,
marker_name='airfoil' ,
chord_length=1.0 ):
print('')
print('-------------------------------------------------------------------------')
print('| SU2 Suite (Process Surface Adjoint) |')
print('-------------------------------------------------------------------------')
print('')
# some other defaults
c_clip = 0.01 # percent chord to truncate
fft_copy = 5 # number of times to copy the fft signal
smth_len = 0.05 # percent chord smoothing window length
lapl_len = 1e-4 # laplace smoothing parameter
# read config file
config_data = libSU2.Get_ConfigParams(config_filename)
surface_filename = config_data['SURFACE_ADJ_FILENAME'] + '.csv'
print surface_filename
mesh_filename = config_data['MESH_FILENAME']
gradient = config_data['OBJECTIVE_FUNCTION']
print('Config filename = %s' % config_filename)
print('Surface filename = %s' % surface_filename)
print('Filter Type = %s' % filter_type)
# read adjoint data
adj_data = np.genfromtxt( surface_filename ,
dtype = float ,
delimiter = ',' ,
skip_header = 1 )
# read mesh data
mesh_data = libSU2_mesh.Read_Mesh(mesh_filename)
# proces adjoint data
P = map(int, adj_data[:,0] )
X = adj_data[:,6].copy()
Y = adj_data[:,7].copy()
Sens = adj_data[:,1].copy()
PsiRho = adj_data[:,2].copy()
I = range(0,len(P)) # important - for unsorting durring write
# store in dict by point index
adj_data_dict = dict( zip( P , zip(X,Y,Sens,PsiRho,I) ) )
# sort airfoil points
iP_sorted,_ = libSU2_mesh.sort_Airfoil(mesh_data,marker_name)
assert(len(iP_sorted) == len(P))
# rebuild airfoil loop
i = 0
for this_P in iP_sorted:
# the adjoint data entry
this_adj_data = adj_data_dict[this_P]
# re-sort
P[i] = this_P
X[i] = this_adj_data[0]
Y[i] = this_adj_data[1]
Sens[i] = this_adj_data[2]
PsiRho[i] = this_adj_data[3]
I[i] = this_adj_data[4]
# next
i = i+1
#: for each point
# calculate arc length
S = np.sqrt( np.diff(X)**2 + np.diff(Y)**2 ) / chord_length
S = np.cumsum( np.hstack([ 0 , S ]) )
# tail trucating, by arc length
I_clip_lo = S < S[0] + c_clip
I_clip_hi = S > S[-1] - c_clip
S_clip = S.copy()
Sens_clip = Sens.copy()
Sens_clip[I_clip_hi] = Sens_clip[I_clip_hi][0]
Sens_clip[I_clip_lo] = Sens_clip[I_clip_lo][-1]
# some edge length statistics
dS_clip = np.diff(S_clip)
min_dS = np.min ( dS_clip )
mean_dS = np.mean( dS_clip )
max_dS = np.max ( dS_clip )
#print 'min_dS = %.4e ; mean_dS = %.4e ; max_dS = %.4e' % ( min_dS , mean_dS , max_dS )
# --------------------------------------------
# APPLY FILTER
if filter_type == 'FOURIER':
Freq_notch = [ 1/max_dS, np.inf ] # the notch frequencies
Sens_filter,Frequency,Power = fft_filter( S_clip,Sens_clip, Freq_notch, fft_copy )
#Sens_filter = smooth(S_clip,Sens_filter, 0.03,'blackman') # post smoothing
elif filter_type == 'WINDOW':
Sens_filter = window( S_clip, Sens_clip, smth_len, 'blackman' )
elif filter_type == 'LAPLACE':
Sens_filter = laplace( S_clip, Sens_clip, lapl_len )
elif filter_type == 'SHARPEN':
Sens_smooth = smooth( S_clip, Sens_clip , smth_len/5, 'blackman' ) # pre smoothing
Sens_smoother = smooth( S_clip, Sens_smooth, smth_len , 'blackman' )
Sens_filter = Sens_smooth + (Sens_smooth - Sens_smoother) # sharpener
else:
raise Exception, 'unknown filter type'
# --------------------------------------------
# PLOTTING
if pylab_imported:
# start plot
fig = plt.figure(gradient)
plt.clf()
#if not fig.axes: # for comparing two filter calls
#plt.subplot(1,1,1)
#ax = fig.axes[0]
#if len(ax.lines) == 4:
#ax.lines.pop(0)
#ax.lines.pop(0)
# SENSITIVITY
plt.plot(S ,Sens ,color='b') # original
plt.plot(S_clip,Sens_filter,color='r') # filtered
plt.xlim(-0.1,2.1)
plt.ylim(-5,5)
plt.xlabel('Arc Length')
plt.ylabel('Surface Sensitivity')
#if len(ax.lines) == 4:
#seq = [2, 2, 7, 2]
#ax.lines[0].set_dashes(seq)
#ax.lines[1].set_dashes(seq)
plot_
|
amidvidy/mongo-orchestration
|
mongo_orchestration/sharded_clusters.py
|
Python
|
apache-2.0
| 18,384
| 0.000925
|
#!/usr/bin/python
# coding=utf-8
# Copyright 2012-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import tempfile
from uuid import uuid4
from mongo_orchestration.common import (
BaseModel, DEFAULT_SUBJECT, DEFAULT_CLIENT_CERT)
from mongo_orchestration.container import Container
from mongo_orchestration.errors import ShardedClusterError
from mongo_orchestration.servers import Servers
from mongo_orchestration.replica_sets import ReplicaSets
from mongo_orchestration.singleton import Singleton
from pymongo import MongoClient
logger = logging.getLogger(__name__)
class ShardedCluster(BaseModel):
"""class represents Sharding configuration"""
def __init__(self, params):
"""init configuration acording params"""
self.id = params.get('id', None) or str(uuid4())
self.admin_added = False
self.login = params.get('login', '')
self.password = params.get('password', '')
self.auth_key = params.get('auth_key', None)
self.auth_source = params.get('authSource', 'admin')
self._version = params.get('version')
self._configsvrs = []
self._routers = []
self._shards = {}
self.tags = {}
self.sslParams = params.get('sslParams', {})
self.kwargs = {}
self.restart_required = self.login or self.auth_key
self.x509_extra_user = False
if self.sslParams:
self.kwargs['ssl'] = True
self.kwargs['ssl_certfile'] = DEFAULT_CLIENT_CERT
configsvr_configs = params.get('configsvrs', [{}])
self.__init_configsvr(configsvr_configs)
for r in params.get('routers', [{}]):
self.router_add(r)
for cfg in params.get('shards', []):
shard_params = cfg.get('shardParams', {})
shard_tags = shard_params.pop('tags', None)
info = self.member_add(cfg.get('id', None), shard_params)
if shard_tags:
self.tags[info['id']] = shard_tags
if self.tags:
for sh_id in self.tags:
logger.debug('Add tags %r to %s' % (self.tags[sh_id], sh_id))
self.connection().config.shards.update(
{'_id': sh_id},
{'$addToSet': {'$each': self.tags[sh_id]}})
if self.login:
# Do we need to add an extra x509 user?
def only_x509(config):
set_params = config.get('setParameter', {})
auth_mechs = set_params.get('authenticationMechanisms', '')
auth_mechs = auth_mechs.split(',')
if len(auth_mechs) == 1 and auth_mechs[0] == 'MONGODB-X509':
return True
return False
any_only_x509 = lambda l: any(map(only_x509, l))
shard_configs = [s.get('shardParams', {}).get('procParams', {})
for s in params.get('shards', [])]
rs_shard_configs = [
m.get('procParams', {})
for s in params.get('shards', [])
for m in s.get('shardParams', {}).get('members', [])
]
router_configs = params.get('routers', [])
self.x509_extra_user = (any_only_x509(configsvr_configs) or
any_only_x509(shard_configs) or
any_only_x509(rs_shard_configs) or
any_only_x509(router_configs))
self._add_users(self.connection()[self.auth_source])
# Secondary user given from request.
secondary_login = {
'name': self.login,
'roles': self._user_roles
}
if self.password:
secondary_login['password'] = self.password
# Do the same for the shards.
for shard_id, config in zip(self._shards, shard_configs):
shard = self._shards[shard_id]
instance_id = shard['_id']
|
if shard.get('isServer'):
client = Servers()._storage[instance_id].connection
elif shard.get('isReplicaSet'):
client = ReplicaSets()._storage[instance_id].connection()
db = client[self.auth_source]
if self.x509_extra_user:
db.add_user(DEFAULT_SUBJECT, roles=self._user_roles)
if self.login:
|
db.add_user(**secondary_login)
if self.restart_required:
# Do we need to add clusterAuthMode back?
cluster_auth_mode = None
for cfg in shard_configs:
cam = cfg.get('clusterAuthMode')
if cam:
cluster_auth_mode = cam
break
def restart_with_auth(server_or_rs):
server_or_rs.x509_extra_user = self.x509_extra_user
server_or_rs.auth_source = self.auth_source
server_or_rs.ssl_params = self.sslParams
server_or_rs.login = self.login
server_or_rs.password = self.password
server_or_rs.auth_key = self.auth_key
def add_auth(cfg):
if self.auth_key:
cfg['keyFile'] = self.key_file
# Add clusterAuthMode back in.
if cluster_auth_mode:
cfg['clusterAuthMode'] = cam
return cfg
server_or_rs.restart(config_callback=add_auth)
for server_id in self._configsvrs:
server = Servers()._storage[server_id]
restart_with_auth(server)
for server_id in self._routers:
server = Servers()._storage[server_id]
restart_with_auth(server)
for shard_id in self._shards:
shard = self._shards[shard_id]
instance_id = shard['_id']
klass = ReplicaSets if shard.get('isReplicaSet') else Servers
instance = klass()._storage[instance_id]
restart_with_auth(instance)
self.restart_required = False
def __init_configsvr(self, params):
"""create and start config servers"""
self._configsvrs = []
for cfg in params:
# Remove flags that turn on auth.
cfg = self._strip_auth(cfg)
server_id = cfg.pop('server_id', None)
cfg.update({'configsvr': True})
self._configsvrs.append(Servers().create(
'mongod', cfg, sslParams=self.sslParams, autostart=True,
version=self._version, server_id=server_id))
def __len__(self):
return len(self._shards)
@property
def configsvrs(self):
"""return list of config servers"""
return [{'id': h_id, 'hostname': Servers().hostname(h_id)} for h_id in self._configsvrs]
@property
def routers(self):
"""return list of routers"""
return [{'id': h_id, 'hostname': Servers().hostname(h_id)} for h_id in self._routers]
@property
def members(self):
"""return list of members"""
# return [{'id': shard, 'hostname': Servers().hostname(info['_id'])} for shard, info in self._shards.items()]
return [self.member_info(item) for item in self._shards]
@property
def router(self):
"""return first available router"""
for server in self._routers:
info = Servers().info(server)
if info['procInfo'].get('alive', False):
return {'id': server, 'hostname': Servers().hostname(server)}
|
codingforentrepreneurs/digital-marketplace
|
src/billing/models.py
|
Python
|
mit
| 589
| 0.027165
|
from django.conf import settings
from django.db import models
# Create your models here.
from products.models import Product
class Transaction(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
product = models.ForeignKey(P
|
roduct)
price = models.DecimalField(max_digits=100, decimal_places=2, default=9.99, null=True,)
timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
success = models.BooleanField(default=True)
# transaction_id_payment_system = Brain
|
tree / Stripe
# payment_method
# last_four
def __unicode__(self):
return "%s" %(self.id)
|
vlegoff/tsunami
|
src/secondaires/calendrier/evenement.py
|
Python
|
bsd-3-clause
| 4,012
| 0.006263
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 AYDIN Ali-Kémal
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NO
|
T LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTER
|
RUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier contient la classe Evenement, détaillée plus bas."""
import datetime
from abstraits.obase import BaseObj
from primaires.format.description import Description
from primaires.format.date import get_date
from .commentaire import Commentaire
class Evenement(BaseObj):
"""Classe définissant un évènement pour le calendrier.
Un évènement est défini par un ID, qui est unique, un titre,
une description courte, ainsi qu'une description longue, son
créateur et ses responsables et enfin les commentaires lui
correspondant.
"""
enregistrer = True
id_actuel = 1
def __init__(self, createur):
"""Constructeur d'un évènement"""
BaseObj.__init__(self)
self.id = type(self).id_actuel
type(self).id_actuel += 1
self.date = datetime.date.today()
self.responsables = [createur]
self.titre = "Sans Titre"
self.description = Description(parent = self)
self.commentaires = []
def __getnewargs__(self):
return (None, )
def __repr__(self):
return "<évènement {}>".format(self.id)
def __str__(self):
return str(self.id)
@property
def str_detail(self):
"""Renvoie une présentation détaillée de l'évènement."""
info_generale = \
"Id : {id}\n" \
"Titre : {titre}\n" \
"Date : Pour {date}\n" \
"Description :\n {desc}\n" \
"Responsables : {resp}\n" \
nom_responsables = ", ".join([resp.nom for resp in self.responsables])
info_generale = info_generale.format(id=self.id,
titre=self.titre, date=self.str_date, desc=self.description,
resp=nom_responsables)
commentaires = ""
if self.commentaires:
commentaires = "Commentaires :\n"
for comm in self.commentaires:
commentaires += "\n{}".format(comm)
info_generale += "\n\n" + commentaires
return info_generale
@property
def str_date(self):
"""Retourne la date en format français."""
return get_date(self.date.timetuple())
def ajouter_commentaire(self, personnage, commentaire):
"""Ajoute un commentaire"""
self.commentaires.append(Commentaire(self, personnage, commentaire))
|
bt3gl/Numerical-Methods-for-Physics
|
homework5_elliptic_PDES/main.py
|
Python
|
apache-2.0
| 2,009
| 0.019413
|
"""
Any vector field U can be decomposed into a divergence free term
Ud and the gradient of a scalar, phi:
U = Ud + phi
This program recovers a divergemce-free filed on a 2-d grid.
Marina von Steinkirch, based on M. Zingale's codes, spring 2013
"""
from part_a imp
|
ort doPartA
from part_b import doPartB, error
from part_c import doPartC
import os
import numpy
def main():
"""
Do you want to make plots???
"""
DO_PLOTS = 1
""" create folder for plots """
try:
os.makedirs("plots/")
except OSError:
if not os.path.isdir("plots/"):
raise
numpy.seterr(divide='ignore')
"
|
"" [0,1]x[0,1]"""
xmin = 0.0
xmax = 1.0
ymin = 0.0
ymax = 1.0
""" setting the number of cells """
nx = [32,64]
ny = [32,64]
ng = 1
for i in range(len(nx)):
print "Calculating for [%d,%d] cells..." %(nx[i], ny[i])
""" set the limits, grid limits """
ilo = ng
ihix = ng + nx[i] - 1
ihiy = ng + ny[i] - 1
""" coordinates of centers """
dx = (xmax - xmin)/nx[i]
dy = (ymax - ymin)/ny[i]
"""
a) Constructing the vector field
"""
Ustar, Ustar_final, phi_true, Ud = doPartA(nx[i], ny[i], ng, dx, dy, DO_PLOTS)
"""
b) Solving discrete Poisson equation using Gauss-Seidel relaxation
"""
phi_num = doPartB(Ustar, phi_true, nx[i], ny[i], ng, dx, dy, DO_PLOTS)
"""
c) Recovering the original and calculating the error
"""
doPartC(Ustar_final, phi_num, Ud, nx[i], ny[i], xmin, xmax, ymin, ymax, DO_PLOTS)
phi_true_final = phi_true[ilo:ihiy+1]
print "The error is %.8f.\n" %(error(ilo, ihix, ihiy, dx, phi_num - phi_true_final))
print "\nDone!"
if __name__ == "__main__":
main()
|
Connexions/cnx-user
|
cnxuser/_sqlalchemy.py
|
Python
|
agpl-3.0
| 1,342
| 0
|
# -*
|
- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import uuid
from sqlalchemy.types import TypeDecorator, CHAR
from sqlalchemy.dialects.postgresql import UUID
# Derived from:
# http://docs.sqlalchemy.org/en/latest/core/types.html#backend-agnostic-guid-type
class GUID(TypeDecorator):
"""Plat
|
form-independent GUID type.
Uses Postgresql's UUID type, otherwise uses
CHAR(32), storing as stringified hex values.
"""
impl = CHAR
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
return dialect.type_descriptor(UUID())
else:
return dialect.type_descriptor(CHAR(32))
def process_bind_param(self, value, dialect):
if value is None:
return value
elif dialect.name == 'postgresql':
return str(value)
else:
if not isinstance(value, uuid.UUID):
return "%.32x" % uuid.UUID(value)
else:
# hexstring
return "%.32x" % value
def process_result_value(self, value, dialect):
if value is None:
return value
else:
return uuid.UUID(value)
|
fapable/pygameproject2
|
Input.py
|
Python
|
apache-2.0
| 2,779
| 0.031306
|
import pygame
import sys
pygame.init()
class Input:
def __init__(self):
self.shift = False
self.white = (255,255,255)
self.red = (255,10,10)
self.black = (0,0,0)
def get_key(self):
while True:
event = pygame.event.poll()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.KEYDOWN:
#prin
|
t(event.key)
if event.key in [pygame.K_LSHIFT, pygame.K_RSHIFT]:
self.shift = True
continue
if self.shift:
#return ascii code
if event.key >= 97 and event.key
|
<= 122:
return event.key - 32
elif event.key == 50:
return 64 #return @
elif event.key == 32:
return 32 #return space even if shifted
elif not self.shift:
return event.key
elif event.type == pygame.KEYUP:
if event.key in [pygame.K_LSHIFT, pygame.K_RSHIFT]:
self.shift = False
else:
pass
def display_box(self, screen, message):
fontobject = pygame.font.Font(None, 40)
pygame.draw.rect(screen, self.white,
((screen.get_width()) - 0, (screen.get_height() / 2) - 0, 0,0)) #if border add 1 for transp
if len(message) != 0:
screen.blit(fontobject.render(message, 1 , self.black),
((screen.get_width() / 4) - 100, (screen.get_height() / 4) - 10))
pygame.display.update()
def ask(self, screen, question):
current_string = []
self.display_box(screen, question + ': ' + ''.join(current_string))
while True:
inkey = self.get_key()
if inkey == pygame.K_BACKSPACE:
current_string = current_string[0:-1]
elif inkey == pygame.K_RETURN:
break
else:
current_string.append(chr(inkey))
self.display_box(screen, question + ': ' + ''.join(current_string))
return ''.join(current_string)
def name(player: int):
input_box = Input()
white = (255,255,255)
screen = pygame.display.set_mode((1700,1000))
x = 0
y = 0
if player == 1:
redSquare = pygame.image.load('player1.png')
elif player == 2:
redSquare = pygame.image.load('player2.png')
elif player == 3:
redSquare = pygame.image.load('player3.png')
elif player == 4:
redSquare = pygame.image.load('player4.png')
elif player == 5:
redSquare = pygame.image.load('player5.png')
elif player == 6:
redSquare = pygame.image.load('player6.png')
screen.blit(redSquare, (x, y))
pygame.display.flip()
var = input_box.ask(screen, 'Name')
print(var + ' was entered')
return var
|
tapple/nsize-web
|
nsize/body_detector/serializers.py
|
Python
|
agpl-3.0
| 1,173
| 0.00341
|
from rest_framework import serializers
class AttachmentSerializer(serializers.Serializer):
pass
"""
//*
// As of 2017-06-13, up to 38 attachments can be worn per agent
// max of 328 bytes per attach point (if name and description are maxed out).
// x38 = 12464 byt
|
es
// without descriptions: 188 * 38 = 7144 bytes top
// I measured a real outfit with 38 attachments; it's json encoding was 6877 bytes
string avatarAttachmentsJson(key id) {
list ans = [];
list attachments = llGetAttachedList(id);
debug((string)llGetListLength(attachments) + " attachments");
integer i;
for (i = llGetListLength(attachments) - 1; i
|
>= 0; i--) {
key attachment = llList2Key(attachments, i);
list details = llGetObjectDetails(attachment, [OBJECT_NAME, OBJECT_DESC, OBJECT_CREATOR, OBJECT_ATTACHED_POINT]);
ans = [llList2Json(JSON_OBJECT, [
"id", attachment, // 6+2+36 = 44 bytes
"name", llList2String(details, 0), // 6+4+64 = 74 bytes
"desc", llList2String(details, 1), // 6+4+128 = 140 bytes
"creator", llList2String(details, 2), // 6+7+36 = 49 bytes
"attachPoint", llList2String(details, 3) // 6+11+2 = 19 bytes
])] + ans;
}
return llList2Json(JSON_ARRAY, ans);
}
"""
|
jupyter/dockerspawner
|
tests/test_deprecations.py
|
Python
|
bsd-3-clause
| 972
| 0.001029
|
import logging
import pytest
from traitlets.config import Config
from dockerspawner import DockerSpawner
def test_deprecated_config(caplog):
cfg = Config()
cfg.DockerSpawner.image_whitelist = {"1.0": "jupyterhub/singleuser:1.0"}
|
log = logging.getLogger("testlog")
spawner = DockerSpawner(config=cfg, log=log)
assert caplog.record_tuples == [
(
log.name,
logging.WARNING,
'DockerSpawner.image_whitelist is deprecated in DockerSpawner 12.0, use '
'DockerSpawner.allowed_images instead',
)
]
assert spawner.allow
|
ed_images == {"1.0": "jupyterhub/singleuser:1.0"}
async def test_deprecated_methods():
cfg = Config()
cfg.DockerSpawner.image_whitelist = {"1.0": "jupyterhub/singleuser:1.0"}
spawner = DockerSpawner(config=cfg)
assert await spawner.check_allowed("1.0")
with pytest.deprecated_call():
assert await spawner.check_image_whitelist("1.0")
|
mancoast/CPythonPyc_test
|
cpython/223_test_minidom.py
|
Python
|
gpl-3.0
| 19,230
| 0.005668
|
# test for xml.dom.minidom
from xml.dom.minidom import parse, Node, Document, parseString
from xml.dom import HierarchyRequestErr
import xml.parsers.expat
import os
import sys
import traceback
from test_su
|
pport import verbose
if __name__ == "__mai
|
n__":
base = sys.argv[0]
else:
base = __file__
tstfile = os.path.join(os.path.dirname(base), "test"+os.extsep+"xml")
del base
def confirm(test, testname = "Test"):
if not test:
print "Failed " + testname
raise Exception
Node._debug = 1
def testParseFromFile():
from StringIO import StringIO
dom = parse(StringIO(open(tstfile).read()))
dom.unlink()
confirm(isinstance(dom,Document))
def testGetElementsByTagName():
dom = parse(tstfile)
confirm(dom.getElementsByTagName("LI") == \
dom.documentElement.getElementsByTagName("LI"))
dom.unlink()
def testInsertBefore():
dom = parseString("<doc><foo/></doc>")
root = dom.documentElement
elem = root.childNodes[0]
nelem = dom.createElement("element")
root.insertBefore(nelem, elem)
confirm(len(root.childNodes) == 2
and root.childNodes.length == 2
and root.childNodes[0] is nelem
and root.childNodes.item(0) is nelem
and root.childNodes[1] is elem
and root.childNodes.item(1) is elem
and root.firstChild is nelem
and root.lastChild is elem
and root.toxml() == "<doc><element/><foo/></doc>"
, "testInsertBefore -- node properly placed in tree")
nelem = dom.createElement("element")
root.insertBefore(nelem, None)
confirm(len(root.childNodes) == 3
and root.childNodes.length == 3
and root.childNodes[1] is elem
and root.childNodes.item(1) is elem
and root.childNodes[2] is nelem
and root.childNodes.item(2) is nelem
and root.lastChild is nelem
and nelem.previousSibling is elem
and root.toxml() == "<doc><element/><foo/><element/></doc>"
, "testInsertBefore -- node properly placed in tree")
nelem2 = dom.createElement("bar")
root.insertBefore(nelem2, nelem)
confirm(len(root.childNodes) == 4
and root.childNodes.length == 4
and root.childNodes[2] is nelem2
and root.childNodes.item(2) is nelem2
and root.childNodes[3] is nelem
and root.childNodes.item(3) is nelem
and nelem2.nextSibling is nelem
and nelem.previousSibling is nelem2
and root.toxml() == "<doc><element/><foo/><bar/><element/></doc>"
, "testInsertBefore -- node properly placed in tree")
dom.unlink()
def _create_fragment_test_nodes():
dom = parseString("<doc/>")
orig = dom.createTextNode("original")
c1 = dom.createTextNode("foo")
c2 = dom.createTextNode("bar")
c3 = dom.createTextNode("bat")
dom.documentElement.appendChild(orig)
frag = dom.createDocumentFragment()
frag.appendChild(c1)
frag.appendChild(c2)
frag.appendChild(c3)
return dom, orig, c1, c2, c3, frag
def testInsertBeforeFragment():
dom, orig, c1, c2, c3, frag = _create_fragment_test_nodes()
dom.documentElement.insertBefore(frag, None)
confirm(tuple(dom.documentElement.childNodes) == (orig, c1, c2, c3),
"insertBefore(<fragment>, None)")
frag.unlink()
dom.unlink()
#
dom, orig, c1, c2, c3, frag = _create_fragment_test_nodes()
dom.documentElement.insertBefore(frag, orig)
confirm(tuple(dom.documentElement.childNodes) == (c1, c2, c3, orig),
"insertBefore(<fragment>, orig)")
frag.unlink()
dom.unlink()
def testAppendChild():
dom = parse(tstfile)
dom.documentElement.appendChild(dom.createComment(u"Hello"))
confirm(dom.documentElement.childNodes[-1].nodeName == "#comment")
confirm(dom.documentElement.childNodes[-1].data == "Hello")
dom.unlink()
def testAppendChildFragment():
dom, orig, c1, c2, c3, frag = _create_fragment_test_nodes()
dom.documentElement.appendChild(frag)
confirm(tuple(dom.documentElement.childNodes) == (orig, c1, c2, c3),
"appendChild(<fragment>)")
frag.unlink()
dom.unlink()
def testReplaceChildFragment():
dom, orig, c1, c2, c3, frag = _create_fragment_test_nodes()
dom.documentElement.replaceChild(frag, orig)
orig.unlink()
confirm(tuple(dom.documentElement.childNodes) == (c1, c2, c3),
"replaceChild(<fragment>)")
frag.unlink()
dom.unlink()
def testLegalChildren():
dom = Document()
elem = dom.createElement('element')
text = dom.createTextNode('text')
try: dom.appendChild(text)
except HierarchyRequestErr: pass
else:
print "dom.appendChild didn't raise HierarchyRequestErr"
dom.appendChild(elem)
try: dom.insertBefore(text, elem)
except HierarchyRequestErr: pass
else:
print "dom.appendChild didn't raise HierarchyRequestErr"
try: dom.replaceChild(text, elem)
except HierarchyRequestErr: pass
else:
print "dom.appendChild didn't raise HierarchyRequestErr"
nodemap = elem.attributes
try: nodemap.setNamedItem(text)
except HierarchyRequestErr: pass
else:
print "NamedNodeMap.setNamedItem didn't raise HierarchyRequestErr"
try: nodemap.setNamedItemNS(text)
except HierarchyRequestErr: pass
else:
print "NamedNodeMap.setNamedItemNS didn't raise HierarchyRequestErr"
elem.appendChild(text)
dom.unlink()
def testNamedNodeMapSetItem():
dom = Document()
elem = dom.createElement('element')
attrs = elem.attributes
attrs["foo"] = "bar"
a = attrs.item(0)
confirm(a.ownerDocument is dom,
"NamedNodeMap.__setitem__() sets ownerDocument")
confirm(a.ownerElement is elem,
"NamedNodeMap.__setitem__() sets ownerElement")
confirm(a.value == "bar",
"NamedNodeMap.__setitem__() sets value")
confirm(a.nodeValue == "bar",
"NamedNodeMap.__setitem__() sets nodeValue")
elem.unlink()
dom.unlink()
def testNonZero():
dom = parse(tstfile)
confirm(dom)# should not be zero
dom.appendChild(dom.createComment("foo"))
confirm(not dom.childNodes[-1].childNodes)
dom.unlink()
def testUnlink():
dom = parse(tstfile)
dom.unlink()
def testElement():
dom = Document()
dom.appendChild(dom.createElement("abc"))
confirm(dom.documentElement)
dom.unlink()
def testAAA():
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam2")
confirm(el.toxml() == '<abc spam="jam2"/>', "testAAA")
a = el.getAttributeNode("spam")
confirm(a.ownerDocument is dom,
"setAttribute() sets ownerDocument")
confirm(a.ownerElement is dom.documentElement,
"setAttribute() sets ownerElement")
dom.unlink()
def testAAB():
dom = parseString("<abc/>")
el = dom.documentElement
el.setAttribute("spam", "jam")
el.setAttribute("spam", "jam2")
confirm(el.toxml() == '<abc spam="jam2"/>', "testAAB")
dom.unlink()
def testAddAttr():
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
child.setAttribute("def", "ghi")
confirm(child.getAttribute("def") == "ghi")
confirm(child.attributes["def"].value == "ghi")
child.setAttribute("jkl", "mno")
confirm(child.getAttribute("jkl") == "mno")
confirm(child.attributes["jkl"].value == "mno")
confirm(len(child.attributes) == 2)
child.setAttribute("def", "newval")
confirm(child.getAttribute("def") == "newval")
confirm(child.attributes["def"].value == "newval")
confirm(len(child.attributes) == 2)
dom.unlink()
def testDeleteAttr():
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
confirm(len(child.attributes) == 0)
child.setAttribute("def", "ghi")
confirm(len(child.attributes) == 1)
del child.attributes["def"]
confirm(len(child.attributes) == 0)
dom.unlink()
def testRemoveAttr():
dom = Document()
child = dom.appendChild(dom.createElement("abc"))
child.setAttribute("de
|
lcoandrade/DsgTools
|
gui/CustomWidgets/OrderedPropertyWidgets/orderedTableWidget.py
|
Python
|
gpl-2.0
| 24,412
| 0.001352
|
# -*- coding: utf-8 -*-
"""
/**********************
|
*****************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2019-09-03
git sha : $Format:%H$
copyright : (C) 2019 by João P. Esperidião - Cartographic Engineer @ Brazilian Army
email : esperi
|
diao.joao@eb.mil.br
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os, json
from datetime import datetime
from qgis.PyQt import uic
from qgis.utils import iface
from qgis.PyQt.QtCore import Qt, pyqtSlot, pyqtSignal
from qgis.PyQt.QtWidgets import (QWidget,
QFileDialog,
QHeaderView,
QMessageBox,
QTableWidgetItem,
QAbstractItemView)
FORM_CLASS, _ = uic.loadUiType(
os.path.join(os.path.dirname(__file__), 'orderedTableWidget.ui')
)
class OrderedTableWidget(QWidget, FORM_CLASS):
rowAdded = pyqtSignal(int)
rowRemoved = pyqtSignal(int)
# rowModified = pyqtSignal(int)
# ordering modes
ORDER_MODE_COUNT = 2
ASC_ORDER, DESC_ORDER = range(ORDER_MODE_COUNT)
def __init__(self, parent=None, headerMap=None, showButtons=False,
fileType=None, extension=None):
"""
Class constructor.
:param headerMap: (dict) a map from each header to be shown and type of
cell content (e.g. widget or item).
:param parent: (QtWidgets.*) any widget parent to current instance.
:param showButtons: (bool) whether buttons are visible.
:param fileType: (str) ex/import file type extension name (e.g. JSON
file).
:param fileType: (str) ex/import file type extension (e.g. .json).
"""
super(OrderedTableWidget, self).__init__(parent)
self.parent = parent
self.setupUi(self)
self.fileType = fileType or "JSON file"
self.extension = extension or ".json"
self.showSaveLoadButtons(showButtons)
self.setHeaders(headerMap or {})
self.setHeaderDoubleClickBehaviour()
self.tableWidget.setSelectionMode(QAbstractItemView.ExtendedSelection)
def showSaveLoadButtons(self, showButtons=False):
"""
Sets save and load buttons visibility.
:param showButtons: (bool) whether buttons are visible.
"""
getattr(self.savePushButton, "show" if showButtons else "hide")()
getattr(self.loadPushButton, "show" if showButtons else "hide")()
def setHeaders(self, headerMap):
"""
Sets headers to table and prepare each row for their contents.
"""
#######################################################################
# 'headers' attribute is a map that describes each column on table. #
# it has a mandatory set of attributes and some are optional (depends #
# on the cell contents type). It is composed as: #
# { #
# col (int) : { #
# "header" : "Header for current column as exposed on table", #
# "type" : "item" or "widget", #
# "editable" or "widget" : bool or callable object to a Widget#
# "getter" : method for value retrieval or None, if not given #
# "setter" : method for value definition or None, if not given#
# } #
# } #
#######################################################################
self.clear()
self.headers = headerMap
self.tableWidget.setColumnCount(len(self.headers))
self.tableWidget.setHorizontalHeaderLabels([
p["header"] for p in self.headers.values()
])
def replicateColumnValue(self, col):
"""
Replicates the value from the first cell of a colums based on column
filled values.
:param col: (int) column to have its first value replicated to the
other rows.
"""
prop = self.headers[col]
if "editable" in prop and not prop["editable"]:
# ingnores non-editable columns
return
for row in range(self.rowCount()):
if row == 0:
value = self.getValue(row, col)
else:
self.setValue(row, col, value)
def orderColumn(self, col):
"""
Orders a colums based on column filled values.
:param col: (int) column to be ordered.
"""
if not hasattr(self, "currentRowOrder"):
self.currentRowOrder = dict()
if col not in self.currentRowOrder:
self.currentRowOrder[col] = self.ASC_ORDER
else:
# get next mode
self.currentRowOrder[col] = (self.currentRowOrder[col] + 1) % \
self.ORDER_MODE_COUNT
contents = []
for row in range(self.rowCount()):
contents.append(self.row(row))
self.clear()
rev = self.currentRowOrder[col] == self.DESC_ORDER
for content in sorted(contents, key = lambda i: i[col], reverse=rev):
self.addRow(content)
def setHeaderDoubleClickBehaviour(self, mode=None, cols=None):
"""
Connects header double click signal to the selected callback.
:param mode: (str) pre-set callback mode (e.g. what will be applied to
each column).
:param cols: (list-of-int) list of columns to which callback behaviour
is applied.
"""
self.unsetHeaderDoubleClickBehaviour()
self.headerDoubleClicked = {
"replicate" : self.replicateColumnValue,
"order" : self.orderColumn,
"none" : lambda col : None
}[mode or "none"]
self.horizontalHeader().sectionDoubleClicked.connect(
self.headerDoubleClicked
)
def unsetHeaderDoubleClickBehaviour(self):
"""
Disconnects header double click signal to the selected callback.
:return: (bool) whether behaviour was disconnected.
"""
try:
self.horizontalHeader().sectionDoubleClicked.disconnect(
self.headerDoubleClicked
)
return True
except:
return False
def clear(self):
"""
Resets table to initial state.
"""
for row in range(self.rowCount()):
self.tableWidget.removeRow(row)
self.tableWidget.setRowCount(0)
def getValue(self, row, column):
"""
Gets the value from a table cell. It uses column definitions from
headers attribute.
:param row: (int) target cell's row.
:param column: (int) target cell's column.
:return: (*) cell's contents. This might be any of widget's readable
inputs (int, float, str, dict, etc) - Depends on defined input
widget.
"""
if self.headers[column]["type"] == "item":
retu
|
joshcai/utdmathclub
|
math-club/settings.py
|
Python
|
mit
| 5,912
| 0.002368
|
# Django settings for math-club project.
import dj_database_url
import os
from os import environ
from urlparse import urlparse
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
if environ.has_key('DATABASE_URL'):
DEBUG = False
DATABASES = {
'default': dj_database_url.config(default=os.getenv('DATABASE_URL'))
}
else:
import secrets
DEBUG = True
DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
# 'NAME': 'math-club',
# 'USER': 'django_login',
# 'PASSWORD': secrets.db_password,
# 'HOST': 'localhost',
# 'PORT':'',
# }
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'math_club',
'USER': 'root',
'PASSWORD': secrets.db_password,
'HOST': '',
'PORT':'',
}
}
TEMPLATE_DEBUG = DEBUG
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(BASE_DIR, '../blog')
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_DIR,'static/'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# vari
|
ous locations.
STATICFILES
|
_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '9kc6*9+nr=8phoink1rpkn(*2d484@6k!dak&@gge#n!xp46f1'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'math-club.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'math-club.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'south',
'gunicorn',
'blog',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
zsjohny/jumpserver
|
apps/users/signals_handler.py
|
Python
|
gpl-2.0
| 2,429
| 0.001249
|
# -*- coding: utf-8 -*-
#
from django.dispatch import receiver
from django.db.models.signals import m2m_changed
from django_auth_ldap.backend import populate_user
from django.conf import settings
from django_cas_ng.signals import cas_user_authenticated
from jms_oidc_rp.signals import openid_create_or_update_user
from common.utils import get_logger
from .signals import post_user_create
from .models import User
logger = get_logger(__file__)
@receiver(post_user_create)
def on_user_create(sender, user=None, **kwargs):
logger.debug("Receive user `{}` create signal".format(user.name))
from .utils import send_user_created_mail
logger.info(" - Sending welcome mail ...".format(user.name))
if user.can_send_created_mail():
send_user_created_mail(user)
@receiver(m2m_changed, sender=User.groups.through)
def on_user_groups_change(sender, instance=None, action='', **kwargs):
"""
资产节点发生变化时,刷新节点
"""
if action.startswith('post'):
logger.debug("User group member change signal recv: {}".format(instance))
from perms.utils import AssetPermissionUtil
AssetPermissionUtil.expire_all_user_tree_cache()
@receiver(cas_user_authenticated)
def on_cas_user_authenticated(sender, user, created, **kwargs):
if created:
user.source = user.SOURCE_CAS
user.save()
@receiver(populate_user)
def on_ldap_create_user(sender, user, ldap_user, **kwargs):
if user and user.username not in ['admin']:
exists = User.objects.filter(username=user.username).exists()
if not exists:
|
user.source = user.SOURCE_LDAP
user.s
|
ave()
@receiver(openid_create_or_update_user)
def on_openid_create_or_update_user(sender, request, user, created, name, username, email, **kwargs):
if created:
logger.debug(
"Receive OpenID user created signal: {}, "
"Set user source is: {}".format(user, User.SOURCE_OPENID)
)
user.source = User.SOURCE_OPENID
user.save()
elif not created and settings.AUTH_OPENID_ALWAYS_UPDATE_USER:
logger.debug(
"Receive OpenID user updated signal: {}, "
"Update user info: {}"
"".format(user, "name: {}|username: {}|email: {}".format(name, username, email))
)
user.name = name
user.username = username
user.email = email
user.save()
|
kevinrue/RNAfastqDeconvolute
|
src/__init__.py
|
Python
|
gpl-2.0
| 160
| 0
|
__author__ = 'David Magee
|
, Carolina Correia, and Kevin Rue-Albrecht'
__copyright__ = "Copyright 2014, GPLv2"
from . import RNAseqIO
from . im
|
port SeqDataTypes
|
OCA/purchase-workflow
|
purchase_order_approval_block/__init__.py
|
Python
|
agpl-3.0
| 113
| 0
|
# License LGPL-3.0 or later (https://www.gnu.o
|
rg/licenses/lgpl.html).
from . import model
|
s
from . import wizard
|
plotly/python-api
|
packages/python/plotly/plotly/validators/bar/insidetextfont/_sizesrc.py
|
Python
|
mit
| 463
| 0
|
import _plotly_utils.basevalidators
class Siz
|
esrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="sizesrc", parent_name="bar.insidetextfont", **kwargs
):
super(SizesrcValidator, self).__init__(
plotly_n
|
ame=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
FederatedAI/FATE
|
python/fate_test/fate_test/_client.py
|
Python
|
apache-2.0
| 3,346
| 0.002989
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sshtunnel
from fate_test._flow_client import FLOWClient
from fate_test._io import LOGGER
from fate_test._parser import Config
class Clients(object):
def __init__(self, config: Config):
self._flow_clients = {}
self._tunnel_id_to_flow_clients = {}
self._role_str_to_service_id = {}
self._tunnel_id_to_tunnel = config.tunnel_id_to_tunnel
for service_id, service in config.service_id_to_service.items():
if isinstance(service, Config.service):
self._flow_clients[service_id] = FLOWClient(service.address, config.data_base_dir, config.cache_directory)
elif isinstance(service, Config.tunnel_service):
self._flow_clients[service_id] = FLOWClient(None, config.data_base_dir, config.cache_directory)
self._tunnel_id_to_flow_clients.setdefault(service.tunnel_id, []).append(
(service.index, self._flow_clients[service_id]))
for party, service_id in config.party_to_service_id.items():
for role_str in config.parties.party_to_role_string(party):
self._role_str_to_service_id[role_str] = service_id
def __getitem__(self, role_str: str) -> 'FLOWClient':
if role_str not in self._role_str_to_service_id:
raise RuntimeError(f"no flow client found binding to {role_str}")
return self._flow_clients[self._role_str_to_service_id[role_str]]
def __enter__(self):
# open ssh tunnels and create flow clients for remote
self._tunnels = []
|
for tunnel_id, tunnel_conf in self._tunnel_id_to_tunnel.items():
tunnel = sshtunnel.SSHTunnelForwarder(ssh_address_or_host=tunnel_conf.ssh_address,
ssh_username=tunnel_conf.ssh_username,
|
ssh_password=tunnel_conf.ssh_password,
ssh_pkey=tunnel_conf.ssh_priv_key,
remote_bind_addresses=tunnel_conf.services_address)
tunnel.start()
self._tunnels.append(tunnel)
for index, flow_client in self._tunnel_id_to_flow_clients[tunnel_id]:
flow_client.set_address(f"127.0.0.1:{tunnel.local_bind_ports[index]}")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for tunnel in self._tunnels:
try:
tunnel.stop()
except Exception as e:
LOGGER.exception(e)
def contains(self, role_str):
return role_str in self._role_str_to_service_id
def all_roles(self):
return sorted(self._role_str_to_service_id.keys())
|
materialsproject/MPContribs
|
mpcontribs-api/mpcontribs/api/contributions/generate_formulae.py
|
Python
|
mit
| 431
| 0
|
# -*- coding: utf-8 -*-
import os
impor
|
t json
from pymatgen.ext.matproj import MPRester
data = {}
with MPRester() as mpr:
for i, d in enumerate(
mpr.query(criteria={}, properties=["task_ids", "pretty_formula"])
):
for task_id in d["task_ids"]:
data[task_id] = d["pretty_formula"]
out = os.path.join(os.pa
|
th.dirname(__file__), "formulae.json")
with open(out, "w") as f:
json.dump(data, f)
|
mujin/jhbuild
|
jhbuild/commands/autobuild.py
|
Python
|
gpl-2.0
| 3,732
| 0.001876
|
# jhbuild - a tool to ease building collections of source packages
# Copyright (C) 2001-2004 James Henstridge
#
# autobuild.py: non-interactive build that generates a report
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from optparse import make_option
from jhbuild.errors import UsageError
from jhbuild.commands.base import Command, register_command
import jhbuild.frontends
import optparse
class cmd_autobuild(Command):
doc = N_('Build modules non-interactively and upload results to JhAutobuild')
name = 'autobuild'
def __init__(self):
Command.__init__(self, [
make_option('-a', '--autogen',
action='store_true', dest='autogen', default=False,
help=_('always run autogen.sh')),
make_option('-c', '--clean',
action='store_true', dest='clean', default=False,
help=_('run make clean before make')),
make_option('--distcheck',
action='store_true', dest='distcheck', default=False,
help=_('run make distcheck after building')),
make_option('-s', '--skip', metavar='MODULES',
action='append', dest='skip', default=[],
help=_('treat the given modules as up to date')),
make_option('-t', '--start-at', metavar='MODULE',
action='store', dest='startat', default=None,
help=_('start building at the given module')),
make_option('-r', '--report-url',
action='store', dest='reporturl', default=None,
help=_('jhautobuild
|
report URL')),
make_option('-v', '--verbose',
action='store_true', dest='verbose', default=False,
help=_('verbose mode'))
|
,
])
def run(self, config, options, args, help=None):
config.set_from_cmdline_options(options)
config.buildscript = 'autobuild'
config.autobuild_report_url = None
config.verbose = False
config.interact = False
if options.reporturl is not None:
config.autobuild_report_url = options.reporturl
if options.verbose:
config.verbose = True
if not config.autobuild_report_url:
raise UsageError(_('report URL for autobuild not specified'))
module_set = jhbuild.moduleset.load(config)
module_list = module_set.get_module_list(args or config.modules,
config.skip)
# remove modules up to startat
if options.startat:
while module_list and module_list[0].name != options.startat:
del module_list[0]
if not module_list:
raise FatalError(_('%s not in module list') % options.startat)
build = jhbuild.frontends.get_buildscript(config, module_list, module_set=module_set)
return build.build()
register_command(cmd_autobuild)
|
TRox1972/youtube-dl
|
youtube_dl/extractor/orf.py
|
Python
|
unlicense
| 11,297
| 0.001151
|
# coding: utf-8
from __future__ import unicode_literals
import re
import calendar
import datetime
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
HEADRequest,
unified_strdate,
strip_jsonp,
int_or_none,
float_or_none,
determine_ext,
remove_end,
unescapeHTML,
)
class ORFTVthekIE(InfoExtractor):
IE_NAME = 'orf:tvthek'
IE_DESC = 'ORF TVthek'
_VALID_URL = r'https?://tvthek\.orf\.at/(?:[^/]+/)+(?P<id>\d+)'
_TESTS = [{
'url': 'http://tvthek.orf.at/program/Aufgetischt/2745173/Aufgetischt-Mit-der-Steirischen-Tafelrunde/8891389',
'playlist': [{
'md5': '2942210346ed779588f428a92db88712',
'info_dict': {
'id': '8896777',
'ext': 'mp4',
'title': 'Aufgetischt: Mit der Steirischen Tafelrunde',
'description': 'md5:c1272f0245537812d4e36419c207b67d',
'duration': 2668,
'upload_date': '20141208',
},
}],
'skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Im-Wandel-der-Zeit/8002126/Best-of-Ingrid-Thurnher/7982256',
'info_dict': {
'id': '7982259',
'ext': 'mp4',
'title': 'Best of Ingrid Thurnher',
'upload_date': '20140527',
'description': 'Viele Jahre war Ingrid Thurnher das "Gesicht" der ZIB 2. Vor ihrem Wechsel zur ZIB 2 im Jahr 1995 moderierte sie unter anderem "Land und Leute", "Österreich-Bild" und "Niederösterreich heute".',
},
'params': {
'skip_download': True, # rtsp downloads
},
'_skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Fluechtlingskrise/10463081/Heimat-Fremde-Heimat/13879132/Senioren-betreuen-Migrantenkinder/13879141',
'skip_download': True,
}, {
'url': 'http://tvthek.orf.at/profile/Universum/35429',
'skip_download': True,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
data_jsb = self._parse_json(
self._search_regex(
r'<div[^>]+class=(["\']).*?VideoPlaylist.*?\1[^>]+data-jsb=(["\'])(?P<json>.+?)\2',
webpage, 'playlist', group='json'),
playlist_id, transform_source=unescapeHTML)['playlist']['videos']
def quality_to_int(s):
m = re.search('([0-9]+)', s)
if m is None:
return -1
return int(m.group(1))
entries = []
for sd in data_jsb:
video_id, title = sd.get('id'), sd.get('title')
if not video_id or not title:
continue
video_id = compat_str(video_id)
formats = [{
'preference': -10 if fd['delivery'] == 'hls' else None,
'format_id': '%s-%s-%s' % (
fd['delivery'], fd['quality'], fd['quality_string']),
'url': fd['src'],
'protocol': fd['protocol'],
'quality': quality_to_int(fd['quality']),
} for fd in sd['sources']]
# Check for geoblocking.
# There is a property is_geoprotection, but that's always false
geo_str = sd.get('geoprotection_string')
if geo_str:
try:
http_url = next(
f['url']
for f in formats
if re.match(r'^https?://.*\.mp4$', f['url']))
except StopIteration:
pass
else:
req = HEADRequest(http_url)
self._request_webpage(
req, video_id,
note='Testing for geoblocking',
errnote=((
'This video seems to be blocked outside of %s. '
'You may want to try the streaming-* formats.')
% geo_str),
fatal=False)
self._check_formats(formats, video_id)
self._sort_formats(formats)
subtitles = {}
for sub in sd.get('subtitles', []):
sub_src = sub.get('src')
if not sub_src:
continue
subtitles.setdefault(sub.get('lang', 'de-AT'), []).append({
'url': sub_src,
})
upload_date = unified_strdate(sd.get('created_date'))
entries.append({
'_type': 'video',
'id': video_id,
'title': title,
'formats': formats,
'subtitles': subtitles,
'description': sd.get('description'),
'duration': int_or_none(sd.get('duration_in_seconds')),
'upload_date': upload_date,
'thumbnail': sd.get('image_full_url'),
})
return {
'_type': 'playlist',
'entries': entries,
'id': playlist_id,
}
class ORFOE1IE(InfoExtractor):
IE_NAME = 'orf:oe1'
IE_DESC = 'Radio Österreich 1'
_VALID_URL = r'https?://oe1\.orf\.at/(?:programm/|konsole\?.*?\btrack_id=)(?P<id>[0-9]+)'
# Audios on ORF radio are only available for 7 days, so we can't add tests.
_TESTS = [{
'url': 'http://oe1.orf.at/konsole?show=on_demand#?track_id=394211',
'only_matching':
|
True,
}, {
'url': 'http://oe1.orf.at/konsole?show=ondemand&track_id=443608&load_day=/programm/konsole/tag/20160726',
'only_matching': True,
}]
def _real_extract(self, url):
show_id = self._match_id(url)
data = self._
|
download_json(
'http://oe1.orf.at/programm/%s/konsole' % show_id,
show_id
)
timestamp = datetime.datetime.strptime('%s %s' % (
data['item']['day_label'],
data['item']['time']
), '%d.%m.%Y %H:%M')
unix_timestamp = calendar.timegm(timestamp.utctimetuple())
return {
'id': show_id,
'title': data['item']['title'],
'url': data['item']['url_stream'],
'ext': 'mp3',
'description': data['item'].get('info'),
'timestamp': unix_timestamp
}
class ORFFM4IE(InfoExtractor):
IE_NAME = 'orf:fm4'
IE_DESC = 'radio FM4'
_VALID_URL = r'https?://fm4\.orf\.at/(?:7tage/?#|player/)(?P<date>[0-9]+)/(?P<show>\w+)'
_TEST = {
'url': 'http://fm4.orf.at/player/20160110/IS/',
'md5': '01e736e8f1cef7e13246e880a59ad298',
'info_dict': {
'id': '2016-01-10_2100_tl_54_7DaysSun13_11244',
'ext': 'mp3',
'title': 'Im Sumpf',
'description': 'md5:384c543f866c4e422a55f66a62d669cd',
'duration': 7173,
'timestamp': 1452456073,
'upload_date': '20160110',
},
'skip': 'Live streams on FM4 got deleted soon',
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
show_date = mobj.group('date')
show_id = mobj.group('show')
data = self._download_json(
'http://audioapi.orf.at/fm4/json/2.0/broadcasts/%s/4%s' % (show_date, show_id),
show_id
)
def extract_entry_dict(info, title, subtitle):
return {
'id': info['loopStreamId'].replace('.mp3', ''),
'url': 'http://loopstream01.apa.at/?channel=fm4&id=%s' % info['loopStreamId'],
'title': title,
'description': subtitle,
'duration': (info['end'] - info['start']) / 1000,
'timestamp': info['start'] / 1000,
'ext': 'mp3'
}
entries = [extract_entry_dict(t, data['title'], data['subtitle']) for t in data['streams']]
return {
'_type': 'playlist',
'id': show_id,
'title': data['title'],
'description': data
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.