repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
rev2004/android2cloud.app-engine | google_appengine/google/appengine/ext/db/__init__.py | Python | mit | 101,475 | 0.005932 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple, schema-based database abstraction layer for the datastore.
Modeled after Django's abstraction layer on top of SQL databases,
http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
and a lot less code because the datastore is so much simpler than SQL
databases.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want to
publish a story with title, body, and created date, you would do it like this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
You can create a new Story in the datastore with this usage pattern:
story = Story(title='My title')
story.body = 'My body'
story.put()
You query for Story entities using built in query interfaces that map directly
to the syntax and semantics of the datastore:
stories = Story.all().filter('date >=', yesterday).order('-date')
for story in stories:
print story.title
The Property declarations enforce types by performing validation on assignment.
For example, the DateTimeProperty enforces that you assign valid datetime
objects, and if you supply the "required" option for a property, you will not
be able to assign None to that property.
We also support references between models, so if a story has comments, you
would represent it like this:
class Comment(db.Model):
story = db.ReferenceProperty(Story)
body = db.TextProperty()
When you get a story out of the datastore, the story reference is resolved
automatically the first time it is referenced, which makes it easy to use
model instances without performing additional queries by hand:
comment = Comment.get(key)
print comment.story.title
Likewise, you can access the set of comments that refer to each story through
this property through a reverse reference called comment_set, which is a Query
preconfigured to return all matching comments:
story = Story.get(key)
for comment in story.comment_set:
print comment.body
"""
import base64
import copy
import datetime
import logging
import re
import time
import urlparse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
CommittedButStillApplying = datastore_errors.CommittedButStillApplying
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
BlobKey = datastore_types.BlobKey
READ_CAPABILITY = datastore.READ_CAPABILITY
WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
STRONG_CONSISTENCY = datastore.STRONG_CONSISTENCY
EVENTUAL_CONSISTENCY = datastore.EVENTUAL_CONSISTENCY
_kind_map = {}
_SELF_REFERENCE = object()
_RESERVED_WORDS = set(['key_name'])
class NotSavedError(Error):
"""Raised when a saved-object action is performed on a non-saved object."""
class KindError(BadValueError):
"""Raised when an entity is used with incorrect Model."""
class PropertyError(Error):
"""Raised when non-existent property is referenced."""
class DuplicatePropertyError(Error):
"""Raised when a property is duplicated in a model definition."""
class ConfigurationError(Error):
"""Raised when a property or model is improperly configured."""
class ReservedWordError(Error):
"""Raised when a property is defined for a reserved word."""
class DerivedPropertyError(Error):
"""Raised when attempting to assign a value to a derived property."""
_ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
Key,
datetime.datetime,
datetime.date,
datetime.time,
Blob,
ByteString,
Text,
users.User,
Category,
Link,
Email,
GeoPt,
IM,
PhoneNumber,
PostalAddress,
Rating,
BlobKey,
])
_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
re.IGNORECASE | re.UNICODE)
def class_for_kind(kind):
"""Return base-class responsible for implementing kind.
Necessary to recover the class responsible for implement | ing provided
kind.
Args:
kind: Entity kind string.
Returns:
Cl | ass implementation for kind.
Raises:
KindError when there is no implementation for kind.
"""
try:
return _kind_map[kind]
except KeyError:
raise KindError('No implementation for kind \'%s\'' % kind)
def check_reserved_word(attr_name):
"""Raise an exception if attribute name is a reserved word.
Args:
attr_name: Name to check to see if it is a reserved word.
Raises:
ReservedWordError when attr_name is determined to be a reserved word.
"""
if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
raise ReservedWordError(
"Cannot define property. All names both beginning and "
"ending with '__' are reserved.")
if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
raise ReservedWordError(
"Cannot define property using reserved word '%(attr_name)s'. "
"If you would like to use this name in the datastore consider "
"using a different name like %(attr_name)s_ and adding "
"name='%(attr_name)s' to the parameter list of the property "
"definition." % locals())
def query_descendants(model_instance):
"""Returns a query for all the descendants of a model instance.
Args:
model_instance: Model instance to find the descendants of.
Returns:
Query that will retrieve all entities that have the given model instance
as an ancestor. Unlike normal ancestor queries, this does not include the
ancestor itself.
"""
result = Query().ancestor(model_instance);
result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
model_instance.key());
return result;
def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
"""Encodes a model instance as a protocol buffer.
Args:
model_instance: Model instance to encode.
Returns:
entity_pb.EntityProto representation of the model instance
"""
return model_instanc |
spektom/incubator-airflow | tests/providers/snowflake/operators/test_s3_to_snowflake.py | Python | apache-2.0 | 3,634 | 0.001376 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
from airflow.providers.snowflake.operators.s3_to_snowflake import S3ToSnowflakeTransfer
from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
class TestS3ToSnowflakeTransfer(unittest.TestCase):
@mock.patch("airflow.providers.snowflake.hooks.snowflake.SnowflakeHook.run")
def test_execute(self, mock_run):
s3_keys = ['1.csv', '2.csv']
table = 'table'
stage = 'stage'
file_format = 'file_format'
schema = 'schema'
S3ToSnowflakeTransfer(
s3_keys=s3_keys,
table=table,
stage=stage,
file_format=file_format,
schema=schema,
columns_array=None,
task_id="task_id",
dag=None
).execute(None)
files = str(s3_keys)
files = files.replace('[', '(')
files = files.replace(']', ')')
base_sql = """
FROM @{stage}/
files={files}
file_format={file_format}
""".format(
stage=stage,
files=files,
file_format=file_format
)
copy_query = """
COPY INTO {schema}.{table} {base_sql}
""".format(
schema=schema,
table=table,
base_sql=base_sql
)
assert mock_run.call_count == 1
asse | rt_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], copy_query)
@mock.patch("airflow.providers.snowflake.hooks.snowflake.SnowflakeHook.run")
de | f test_execute_with_columns(self, mock_run):
s3_keys = ['1.csv', '2.csv']
table = 'table'
stage = 'stage'
file_format = 'file_format'
schema = 'schema'
columns_array = ['col1', 'col2', 'col3']
S3ToSnowflakeTransfer(
s3_keys=s3_keys,
table=table,
stage=stage,
file_format=file_format,
schema=schema,
columns_array=columns_array,
task_id="task_id",
dag=None
).execute(None)
files = str(s3_keys)
files = files.replace('[', '(')
files = files.replace(']', ')')
base_sql = """
FROM @{stage}/
files={files}
file_format={file_format}
""".format(
stage=stage,
files=files,
file_format=file_format
)
copy_query = """
COPY INTO {schema}.{table}({columns}) {base_sql}
""".format(
schema=schema,
table=table,
columns=",".join(columns_array),
base_sql=base_sql
)
assert mock_run.call_count == 1
assert_equal_ignore_multiple_spaces(self, mock_run.call_args[0][0], copy_query)
|
lefthandedroo/Cosmo-models | zprev versions/Models_py_backup/setup.py | Python | mit | 243 | 0.012346 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Ju | l 23 13:23:20 2018
@author: BallBlueMeercat
"""
from distutils.core import setup
from Cython.Build import cythonize
setup(ext_m | odules = cythonize('firstderivs_cython.pyx')) |
ctlewitt/Invisible-Keyboard | find_intersecting_words.py | Python | mit | 417 | 0.002398 | with open("dictionary.txt", "r") as one_gram_f:
with open("old_dictionary.txt", "r") as internet_list_f:
with open("intersecting_words.txt", "w") as intersecting_words_f:
one_grams = one_gram_f.readlines()
internet_words = internet_list_f | .readlines()
for word in one_grams:
if word in internet_words:
intersecting_words_f.write(word) | |
pawelad/BLM | Players/admin.py | Python | mit | 851 | 0.0047 | from django.contrib import ad | min
from Players.models import Player
@admin.register(Player)
class PlayerAdmin(admin.ModelAdmin):
view_on_site = True
| list_display = ('pk', 'first_name', 'last_name', 'number', 'team', 'position', 'age', 'height', 'weight')
list_filter = ['team', 'position']
search_fields = ['first_name', 'last_name']
# Disable delete when team has 5 players
def has_delete_permission(self, request, obj=None):
try:
return False if Player.objects.filter(team=obj.team).count() == 5 else True
except AttributeError:
pass
# Disable delete action form the list; not ideal, disables delete for all players
def get_actions(self, request):
actions = super(PlayerAdmin, self).get_actions(request)
del actions['delete_selected']
return actions |
dinoperovic/django-shop-wspay | shop_wspay/forms.py | Python | bsd-3-clause | 536 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
class WSPayForm(forms.Form):
ShopID = forms.CharField(widget=forms.HiddenInput)
ShoppingCartID = forms.CharField(widget=forms.HiddenInput)
TotalAmount = forms.CharField(widget=forms.HiddenInput)
Signature = forms.CharField(widget=forms.HiddenInput)
ReturnURL = forms.CharField(widget=for | ms.HiddenInput)
CancelURL = forms.CharField(widget=forms.HiddenInput)
ReturnErrorURL = forms.CharField(widg | et=forms.HiddenInput)
|
Ezopt/Fear-Shadows | cst.py | Python | gpl-3.0 | 796 | 0.003783 | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 20 16:01:21 2016
@author: KB
"""
#Listes des images du jeu
import pygame
dossier = "./Img_FS/"
extension1 = ".png"
extension2 = ".jpg"
image_accueil = dossier + "accueil" + extension1
image_fond = dossier + "back1" + extension2
im | age_mur = dossier + "mur | " + extension1
image_persos = dossier + "persos" + extension1
image_monstres = dossier + "monstres" + extension1
image_fin = dossier + "fin" + extension1
image_pause = dossier + "pause" + extension1
image_tache = dossier + "tache" + extension1
image_credit = dossier + "credit" + extension1
#Paramètres de la fenêtre
nombre_sprite_cote = 25
taille_sprite = 42
cote_fenetre = nombre_sprite_cote * taille_sprite
#Personnalisation de la fenêtre
titre_fenetre = "Fear Shadows"
|
Smart-Torvy/torvy-home-assistant | homeassistant/components/zone.py | Python | mit | 4,669 | 0 | """
Support for the definition of zones.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/zone/
"""
import logging
import voluptuous as vol
from homeassistant.const import (
ATTR_HIDDEN, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_NAME, CONF_LATITUDE,
CONF_LONGITUDE, CONF_ICON)
from homeassistant.helpers import config_per_platform
from homeassistant.helpers.entity import Entity, generate_entity_id
from homeassistant.util.location import distance
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
ATTR_PASSIVE = 'passive'
ATTR_RADIUS = 'radius'
CONF_PASSIVE = 'passive'
CONF_RADIUS = 'radius'
DEFAULT_NAME = 'Unnamed zone'
DEFAULT_PASSIVE = False
DEFAULT_RA | DIUS = 100
DOMAIN = 'zone'
ENTITY_ID_FORMAT = 'zone.{}'
ENTITY_ID_HOME = ENTITY_ID_FORMAT.format('home')
ICON_HOME = 'mdi:home'
ICON_IMPORT = 'mdi:import'
STATE = 'zoning'
# The config that zone accepts is the same as if it has platforms.
PLATFORM_SCHEMA = vol.Schema({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_LATITUDE): cv.latitude,
vol.Requi | red(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.Coerce(float),
vol.Optional(CONF_PASSIVE, default=DEFAULT_PASSIVE): cv.boolean,
vol.Optional(CONF_ICON): cv.icon,
})
def active_zone(hass, latitude, longitude, radius=0):
"""Find the active zone for given latitude, longitude."""
# Sort entity IDs so that we are deterministic if equal distance to 2 zones
zones = (hass.states.get(entity_id) for entity_id
in sorted(hass.states.entity_ids(DOMAIN)))
min_dist = None
closest = None
for zone in zones:
if zone.attributes.get(ATTR_PASSIVE):
continue
zone_dist = distance(
latitude, longitude,
zone.attributes[ATTR_LATITUDE], zone.attributes[ATTR_LONGITUDE])
within_zone = zone_dist - radius < zone.attributes[ATTR_RADIUS]
closer_zone = closest is None or zone_dist < min_dist
smaller_zone = (zone_dist == min_dist and
zone.attributes[ATTR_RADIUS] <
closest.attributes[ATTR_RADIUS])
if within_zone and (closer_zone or smaller_zone):
min_dist = zone_dist
closest = zone
return closest
def in_zone(zone, latitude, longitude, radius=0):
"""Test if given latitude, longitude is in given zone."""
zone_dist = distance(
latitude, longitude,
zone.attributes[ATTR_LATITUDE], zone.attributes[ATTR_LONGITUDE])
return zone_dist - radius < zone.attributes[ATTR_RADIUS]
def setup(hass, config):
"""Setup zone."""
entities = set()
for _, entry in config_per_platform(config, DOMAIN):
name = entry.get(CONF_NAME)
zone = Zone(hass, name, entry[CONF_LATITUDE], entry[CONF_LONGITUDE],
entry.get(CONF_RADIUS), entry.get(CONF_ICON),
entry.get(CONF_PASSIVE))
zone.entity_id = generate_entity_id(ENTITY_ID_FORMAT, name, entities)
zone.update_ha_state()
entities.add(zone.entity_id)
if ENTITY_ID_HOME not in entities:
zone = Zone(hass, hass.config.location_name,
hass.config.latitude, hass.config.longitude,
DEFAULT_RADIUS, ICON_HOME, False)
zone.entity_id = ENTITY_ID_HOME
zone.update_ha_state()
return True
class Zone(Entity):
"""Representation of a Zone."""
# pylint: disable=too-many-arguments, too-many-instance-attributes
def __init__(self, hass, name, latitude, longitude, radius, icon, passive):
"""Initialize the zone."""
self.hass = hass
self._name = name
self._latitude = latitude
self._longitude = longitude
self._radius = radius
self._icon = icon
self._passive = passive
@property
def name(self):
"""Return the name of the zone."""
return self._name
@property
def state(self):
"""Return the state property really does nothing for a zone."""
return STATE
@property
def icon(self):
"""Return the icon if any."""
return self._icon
@property
def state_attributes(self):
"""Return the state attributes of the zone."""
data = {
ATTR_HIDDEN: True,
ATTR_LATITUDE: self._latitude,
ATTR_LONGITUDE: self._longitude,
ATTR_RADIUS: self._radius,
}
if self._passive:
data[ATTR_PASSIVE] = self._passive
return data
|
appsembler/mayan_appsembler | apps/registration/models.py | Python | gpl-3.0 | 2,647 | 0.002267 | from __future__ import absolute_import
import requests
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.simplejson import dumps, loads
from common.models import Singleton
from lock_manager import Lock, LockError
from .literals import FORM_SUBMIT_URL, FORM_KEY, FORM_RECEIVER_FIELD, TIMEOUT
from .exceptions import AlreadyRegistered
class RegistrationSingleton(Singleton):
_cached_name = None
_registered = None
registered = models.BooleanField(default=False, verbose_name=_('registered'))
registration_data = models.TextField(verbose_name=_(u'registration data'), blank=True)
@classmethod
def registration_state(cls):
if cls._registered:
return cls._registered
else:
instance = cls.objects.get()
if instance.is_registered:
cls._registered = instance.is_registered
| return instance.is_registered
@classmethod
def registered_name(cls):
if cls._cached_name:
return cls._cached_name
else | :
instance = cls.objects.get()
try:
dictionary = loads(instance.registration_data)
except ValueError:
dictionary = {}
name_value = dictionary.get('company') or dictionary.get('name')
if name_value:
cls._cached_name = name_value
return name_value or _(u'No name')
@property
def is_registered(self):
return self.registered
def register(self, form):
from installation.models import Installation
if self.is_registered:
raise AlreadyRegistered
installation = Installation.objects.get()
dictionary = {}
dictionary.update(form.cleaned_data)
dictionary.update({
'uuid': installation.uuid
})
self.registration_data = dumps(dictionary)
self.save()
self.submit()
def submit(self):
try:
lock = Lock.acquire_lock('upload_registration')
except LockError:
pass
else:
try:
requests.post(FORM_SUBMIT_URL, data={'formkey': FORM_KEY, FORM_RECEIVER_FIELD: self.registration_data}, timeout=TIMEOUT)
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError):
pass
else:
self.registered = True
self.save()
finally:
lock.release()
class Meta:
verbose_name = verbose_name_plural = _(u'registration properties')
|
MDAnalysis/mdanalysis | testsuite/MDAnalysisTests/core/test_fragments.py | Python | gpl-2.0 | 7,995 | 0.000125 | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import numpy as np
from numpy.testing import (
assert_equal,
)
import pytest
import MDAnalysis as mda
from MDAnalysis.core.topologyattrs import Bonds
from MDAnalysis.core import groups
from MDAnalysis import NoDataError
from MDAnalysisTests import make_Universe
from MDAnalysisTests.datafiles import TPR, XTC
# Also used in topology/test_guessers
def make_starshape():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((1 + base, 3 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((4 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
def case1():
return make_starshape()
def case2():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((2 + base, 3 + base))
bonds.append((3 + base, 4 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((0 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
class TestFragments(object):
r"""Use 125 atom test Universe
5 segments of 5 residues of 5 atoms
Case1
-----
Star shapes to try and test the branching prediction
o | o | o
| | | | |
o-o-o-|-o-o-o-|-o-o-o
| | | | |
o | o |x3 o
Case2
-----
4-ring pendants to test cyclic conditions
o------o------o
| | |
o o o
/ \ / \ / \
o o o o o o
\ / \ / \ /
o o o
Test ring molecules?
"""
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_total_frags(self, u):
fragments = u.atoms.fragments
fragindices = u.atoms.fragindices
# should be 5 fragments of 25 atoms
assert len(fragments) == 5
for frag in fragments:
assert len(frag) == 25
# number of fragindices must correspond to number of atoms:
| assert len(fragindices) == len(u.atoms)
# number of unique fragindices must correspond to number of fragments | :
assert len(np.unique(fragindices)) == len(fragments)
# check fragindices dtype:
assert fragindices.dtype == np.intp
#check n_fragments
assert u.atoms.n_fragments == len(fragments)
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_external_ordering(self, u):
# check fragments and fragindices are sorted correctly:
for i, frag in enumerate(u.atoms.fragments):
assert frag[0].index == i * 25
assert np.unique(frag.fragindices)[0] == i
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_internal_ordering(self, u):
# check atoms are sorted within fragments and have the same fragindex:
for i, frag in enumerate(u.atoms.fragments):
assert_equal(frag.ix, np.arange(25) + i * 25)
assert len(np.unique(frag.fragindices)) == 1
assert frag.n_fragments == 1
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atom_access(self, u):
# check atom can access fragment and fragindex:
for at in (u.atoms[0], u.atoms[76], u.atoms[111]):
frag = at.fragment
assert isinstance(frag, groups.AtomGroup)
assert len(frag) == 25
assert at in frag
fragindex = at.fragindex
assert isinstance(fragindex, int)
with pytest.raises(AttributeError):
x = at.n_fragments
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atomgroup_access(self, u):
# check atomgroup can access fragments
# first 60 atoms have 3 fragments, given as tuple
# each fragment should still be 25 atoms
ag = u.atoms[:60]
frags = ag.fragments
assert len(frags) == 3
assert isinstance(frags, tuple)
for frag in frags:
assert len(frag) == 25
# same for fragindices:
fragindices = ag.fragindices
assert len(fragindices) == 60
assert len(np.unique(fragindices)) == 3
assert ag.n_fragments == 3
def test_empty_atomgroup_access(self):
ag = mda.AtomGroup([], case1())
assert ag.fragments == tuple()
assert_equal(ag.fragindices, np.array([], dtype=np.int64))
assert ag.n_fragments == 0
def test_atomgroup_fragments_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
ag = u.atoms[:10]
with pytest.raises(NoDataError):
getattr(ag, 'fragments')
with pytest.raises(NoDataError):
getattr(ag, 'fragindices')
with pytest.raises(NoDataError):
getattr(ag, 'n_fragments')
def test_atom_fragment_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragment')
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragindex')
def test_atomgroup_fragment_cache_invalidation_bond_making(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.add_bonds((fgs[0][-1] + fgs[1][0],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) > len(u.atoms.fragments) # recomputed
def test_atomgroup_fragment_cache_invalidation_bond_breaking(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.delete_bonds((u.atoms.bonds[3],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) < len(u.atoms.fragments) # recomputed
def test_tpr_fragments():
ag = mda.Universe(TPR, XTC).atoms
frags = ag.fragments
fragindices = ag.fragindices
assert len(frags[0]) == 3341
assert len(fragindices) == len(ag)
assert len(np.unique(fragindices)) == len(frags)
assert ag.n_fragments == len(frags)
|
luosch/leetcode | python/Word Break.py | Python | mit | 333 | 0 | class Solution(object):
def wordBreak(self, s, wordDict):
n = len(s)
dp = [False] * (n + 1)
dp[0] = True
for i in xrange(1, n + 1):
for j in xrange(0, i):
if dp[j] and s[j:i] in wor | dDict:
dp[i] = True
| break
return dp[-1]
|
stormi/tsunami | src/primaires/salle/commandes/chercherbois/__init__.py | Python | bsd-3-clause | 5,680 | 0.000883 | # -*-coding:Utf-8 -*
# Copyright (c) 2012 NOEL-BARON Léo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | PROCUREMEN | T
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'chercherbois'."""
from random import random, randint, choice
from math import sqrt
from primaires.interpreteur.commande.commande import Commande
from primaires.perso.exceptions.stat import DepassementStat
class CmdChercherBois(Commande):
"""Commande 'chercherbois'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "chercherbois", "gatherwood")
self.nom_categorie = "objets"
self.aide_courte = "permet de chercher du bois"
self.aide_longue = \
"Cette commande permet de chercher du combustible dans la salle " \
"où vous vous trouvez."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
salle = personnage.salle
if salle.interieur:
personnage << "|err|Vous ne pouvez chercher du combustible " \
"ici.|ff|"
return
personnage.agir("chercherbois")
prototypes = importeur.objet.prototypes.values()
prototypes = [p for p in prototypes if p.est_de_type("combustible")]
combustibles = []
choix = None
for proto in prototypes:
if personnage.salle.terrain.nom in proto.terrains:
combustibles.append((proto.rarete, proto))
combustibles = sorted(combustibles, key=lambda combu: combu[0])
if not combustibles:
personnage << "|err|Il n'y a rien qui puisse brûler par ici.|ff|"
else:
niveau = sqrt(personnage.get_talent("collecte_bois") / 100)
if not niveau:
niveau = 0.1
proba_trouver = round(random(), 1)
if proba_trouver <= niveau: # on trouve du bois
possibles = []
for proba, combustible in combustibles:
if 2 * proba_trouver >= (proba - 1) / 10:
for i in range(int(10 / proba)):
possibles.append(combustible)
nb_obj = randint(int(proba_trouver * 10), int(niveau * 10)) + 1
if possibles:
choix = choice(possibles)
somme_qualites = 0
end = int(choix.poids_unitaire * nb_obj / 2)
try:
personnage.stats.endurance -= end
except DepassementStat:
personnage << "|err|Vous êtes trop fatigué pour " \
"cela.|ff|"
return
try:
personnage.stats.endurance -= 3
except DepassementStat:
personnage << "|err|Vous êtes trop fatigué pour cela.|ff|"
return
# On cherche le bois
personnage.etats.ajouter("collecte_bois")
personnage << "Vous vous penchez et commencez à chercher du bois."
personnage.salle.envoyer(
"{} se met à chercher quelque chose par terre.",
personnage)
yield 5
if "collecte_bois" not in personnage.etats:
return
if choix:
for i in range(nb_obj):
objet = importeur.objet.creer_objet(choix)
personnage.salle.objets_sol.ajouter(objet)
somme_qualites += objet.qualite
personnage << "Vous trouvez {} " \
"et vous relevez.".format(choix.get_nom(nb_obj))
personnage.salle.envoyer("{} se relève, l'air satisfait.",
personnage)
personnage.pratiquer_talent("collecte_bois")
personnage.gagner_xp("survie", somme_qualites * 2)
else:
personnage << "Vous vous redressez sans avoir rien trouvé."
personnage.salle.envoyer("{} se relève, l'air dépité.",
personnage)
personnage.pratiquer_talent("collecte_bois", 4)
personnage.etats.retirer("collecte_bois")
|
tyagow/FacebookBot | src/bot/tests_bot/test_view.py | Python | mit | 870 | 0.001149 | from unittest import skip
from django.shortcuts import resolve_url as r
from django.test import TestCase
@skip
class BotViewTest(TestCase):
def setUp(self):
fake_message = {
'data': [{
| 'entry': [
{
'messaging': {
'message': {
'text': 'Texto Mensagem'
},
'sender': {
'id': 123,
},
}
}
]
| }]
}
import json
data = json.dumps(str(fake_message))
print(data)
self.response = self.client.post(r('bot:main'), fake_message)
def test_get(self):
self.assertEqual(200, self.response.status_code)
|
SebastienPeillet/PisteCreator | gui/option_Dock.py | Python | gpl-3.0 | 8,879 | 0.001126 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
PisteCreatorDockWidget_OptionDock
Option dock for Qgis plugins
Option dock initialize
-------------------
begin : 2017-07-25
last : 2017-10-20
copyright : (C) 2017 by Peillet Sebastien
email : peillet.seb@gmail.com
***************************************************************************/
/***************************************************************************
* | *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. | *
* *
***************************************************************************/
"""
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
import os
from qgis.PyQt.QtGui import QColor
from qgis.PyQt.QtWidgets import QDialog
from qgis.PyQt import uic
from qgis.PyQt.QtCore import pyqtSignal, QSettings
from qgis.gui import QgsColorButton
def hex_to_rgb(value):
value = value.lstrip("#")
lv = len(value)
return list(int(value[i : i + lv / 3], 16) for i in range(0, lv, lv / 3))
class OptionDock(QDialog):
closingPlugin = pyqtSignal()
def __init__(self, plugin, graph_widget, canvas, parent=None):
"""Constructor."""
super(OptionDock, self).__init__(parent)
uic.loadUi(os.path.join(os.path.dirname(__file__), "Option_dock.ui"), self)
self.settings = QSettings()
self.initConfig()
self.graph_widget = graph_widget
self.PisteCreatorTool = plugin.PisteCreatorTool
self.canvas = canvas
self.plugin = plugin
self.saveButton.clicked.connect(self.saveconfig)
def initConfig(self):
self.sideDistInt = self.settings.value(
"PisteCreator/calculation_variable/side_distance", 6
)
self.sideDistSpinBox.setValue(int(self.sideDistInt))
self.aslopeInt = self.settings.value(
"PisteCreator/graphical_visualisation/tolerated_a_slope", 10
)
self.toleratedASlopeSpinBox.setValue(int(self.aslopeInt))
self.cslopeInt = self.settings.value(
"PisteCreator/graphical_visualisation/tolerated_c_slope", 4
)
self.toleratedCSlopeSpinBox.setValue(int(self.cslopeInt))
self.lengthInt = self.settings.value(
"PisteCreator/graphical_visualisation/max_length", 50
)
self.maxLengthSpinBox.setValue(int(self.lengthInt))
self.lengthBool = self.settings.value(
"PisteCreator/graphical_visualisation/max_length_hold", False
)
self.maxLengthCheckBox.setChecked(bool(self.lengthBool))
self.swathInt = self.settings.value(
"PisteCreator/graphical_visualisation/swath_distance", 30
)
self.swathDistSpinBox.setValue(int(self.swathInt))
self.swathBool = self.settings.value(
"PisteCreator/graphical_visualisation/swath_display", True
)
self.swathDistCheckBox.setChecked(bool(self.swathBool))
self.interpolBool = self.settings.value(
"PisteCreator/calculation_variable/interpolate_act", True
)
self.interpolCheckBox.setChecked(bool(self.interpolBool))
self.t_color = QColor(
self.settings.value(
"PisteCreator/graphical_visualisation/t_color", "#00d003"
)
)
self.f_color = QColor(
self.settings.value(
"PisteCreator/graphical_visualisation/f_color", "#ff0000"
)
)
self.tl_color = QColor(
self.settings.value(
"PisteCreator/graphical_visualisation/tl_color", "#236433"
)
)
self.fl_color = QColor(
self.settings.value(
"PisteCreator/graphical_visualisation/fl_color", "#b80000"
)
)
self.b_color = QColor(
self.settings.value(
"PisteCreator/graphical_visualisation/b_color", "#0fff33"
)
)
self.a_color = QColor(
self.settings.value(
"PisteCreator/graphical_visualisation/a_color", "#48b0d2"
)
)
self.T_ColorButton.setColor(self.t_color)
self.F_ColorButton.setColor(self.f_color)
self.TL_ColorButton.setColor(self.tl_color)
self.FL_ColorButton.setColor(self.fl_color)
self.B_ColorButton.setColor(self.b_color)
self.A_ColorButton.setColor(self.a_color)
def saveconfig(self):
# self.checkChanges()
self.sideDistInt = self.sideDistSpinBox.value()
self.aslopeInt = self.toleratedASlopeSpinBox.value()
self.cslopeInt = self.toleratedCSlopeSpinBox.value()
self.lengthInt = self.maxLengthSpinBox.value()
self.lengthBool = self.maxLengthCheckBox.isChecked()
self.swathInt = self.swathDistSpinBox.value()
self.swathBool = self.swathDistCheckBox.isChecked()
self.interpolBool = self.interpolCheckBox.isChecked()
self.t_color = self.T_ColorButton.color().name()
self.f_color = self.F_ColorButton.color().name()
self.tl_color = self.TL_ColorButton.color().name()
self.fl_color = self.FL_ColorButton.color().name()
self.a_color = self.A_ColorButton.color().name()
self.b_color = self.B_ColorButton.color().name()
self.settings.setValue(
"PisteCreator/calculation_variable/side_distance",
self.sideDistSpinBox.value(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/tolerated_a_slope",
self.toleratedASlopeSpinBox.value(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/tolerated_c_slope",
self.toleratedCSlopeSpinBox.value(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/max_length",
self.maxLengthSpinBox.value(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/max_length_hold",
self.maxLengthCheckBox.isChecked(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/swath_distance",
self.swathDistSpinBox.value(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/swath_display",
self.swathDistCheckBox.isChecked(),
)
self.settings.setValue(
"PisteCreator/calculation_variable/interpolate_act",
self.interpolCheckBox.isChecked(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/t_color",
self.T_ColorButton.color().name(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/f_color",
self.F_ColorButton.color().name(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/tl_color",
self.TL_ColorButton.color().name(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/fl_color",
self.FL_ColorButton.color().name(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/b_color",
self.B_ColorButton.color().name(),
)
self.settings.setValue(
"PisteCreator/graphical_visualisation/a_color",
self.A_ColorButton.color().name(),
)
try:
if self.canvas.mapTool().map_tool_name == "SlopeMapTool":
self.plugin.PisteCrea |
tboyce021/home-assistant | homeassistant/components/cloudflare/config_flow.py | Python | apache-2.0 | 5,495 | 0.000546 | """Config flow for Cloudflare integration."""
import logging
from typing import Dict, List, Optional
from pycfdns import CloudflareUpdater
from pycfdns.exceptions import (
CloudflareAuthenticationException,
CloudflareConnectionException,
CloudflareZoneException,
)
import voluptuous as vol
from homeassistant.components import persistent_notification
from homeassistant.config_entries import CONN_CLASS_CLOUD_PUSH, ConfigFlow
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_RECORDS
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_TOKEN): str,
}
)
def _zone_schema(zones: Optional[List] = None):
"""Zone selection schema."""
zones_list = []
if zones is not None:
zones_list = zones
return vol.Schema({vol.Required(CONF_ZONE): vol.In(zones_list)})
def _records_schema(records: Optional[List] = None):
"""Zone records selection schema."""
records_dict = {}
if records:
records_dict = {name: name for name in records}
return vol.Schema({vol.Required(CONF_RECORDS): cv.multi_select(records_dict)})
async def validate_input(hass: HomeAssistant, data: Dict):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
zone = data.get(CONF_ZONE)
records = None
cfupdate = CloudflareUpdater(
async_get_clientsession(hass),
data[CONF_API_TOKEN],
zone,
[],
)
try:
zones = await cfupdate.get_zones()
if zone:
zone_id = await cfupdate.get_zone_id()
records = await cfupdate.get_zone_records(zone_id, "A")
except CloudflareConnectionException as error:
raise CannotConnect from error
except CloudflareAuthenticationException as error:
raise InvalidAuth from error
except CloudflareZoneException as error:
raise InvalidZone from error
return {"zones": zones, "records": records}
class CloudflareConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Cloudflare."""
VERSION = 1
CONNECTION_CLASS = CONN_CLASS_CLOUD_PUSH
def __init__(self):
"""Initialize the Cloudflare config flow."""
self.cloudflare_config = {}
self.zones = None
self.records = None
async def async_step_user(self, user_input: Optional[Dict] = None):
"""Handle a flow initiated by the user."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
assert self.hass
persistent_notification.async_dismiss(self.hass, "cloudflare_setup")
errors = {}
if user_input is not None:
info, errors = await self._async_validate_or_error(user_input)
if not errors:
self.cloudflare_config.update(user_input)
self.zones = info["zones"]
return await self.async_step_zone()
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
)
async def async_step_zone(self, user_input: Optional[Dict] = None):
"""Handle the picking the zone."""
errors = {}
if user_input is not None:
self.cloudflare_config.update(user_input)
info, errors = await self._async_validate_or_error(self.cloudflare_config)
if not errors:
await self.async_set_unique_id(user_input[CONF_ZONE])
self.records = info["records"]
return await self.async_step_records()
return s | elf.async_show_form(
step_id="zone",
data_schema=_zone_schema(self.zones),
errors=errors,
)
async def async_step_records(self, user_input: Optional[Dict] = None):
"""Handle the picking the zone records."""
errors = {}
if user_input is not None:
self.cloudflare_config.update(user_input)
title = self.cloudflare_config[CONF_ZONE]
| return self.async_create_entry(title=title, data=self.cloudflare_config)
return self.async_show_form(
step_id="records",
data_schema=_records_schema(self.records),
errors=errors,
)
async def _async_validate_or_error(self, config):
errors = {}
info = {}
try:
info = await validate_input(self.hass, config)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidZone:
errors["base"] = "invalid_zone"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
return info, errors
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class InvalidZone(HomeAssistantError):
"""Error to indicate we cannot validate zone exists in account."""
|
iksteen/jaspyx | jaspyx/visitor/return_.py | Python | mit | 299 | 0 | from jaspyx.visitor import BaseVisitor
clas | s Return(BaseVisitor):
def visit_Return(self, node):
self.indent()
if node.value is not None:
self.output('return ')
self.visit(node.value)
else:
self.output('return')
| self.finish()
|
zordsdavini/qtile | test/test_config.py | Python | mit | 3,631 | 0.001377 | # Copyright (c) 2011 Florian Mounier
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import pytest
from libqtile import config, confreader, utils
from libqtile.backend.x11 import xcore
tests_dir = os.path.dirname(os.path.realpath(__file__))
def test_validate():
xc = xcore.XCore()
f = confreader.Config.from_file(xc, os.path.join(tests_dir, "configs", "basic.py"))
f.validate(xc)
f.keys[0].key = "nonexistent"
with pytest.raises(confreader.ConfigError):
f.validate(xc)
f.keys[0].key = "x"
f = confreader.Config.from_file(xc, os.pat | h.join(tests_dir, "confi | gs", "basic.py"))
f.keys[0].modifiers = ["nonexistent"]
with pytest.raises(confreader.ConfigError):
f.validate(xc)
f.keys[0].modifiers = ["shift"]
def test_syntaxerr():
xc = xcore.XCore()
with pytest.raises(confreader.ConfigError):
confreader.Config.from_file(xc, os.path.join(tests_dir, "configs", "syntaxerr.py"))
def test_basic():
xc = xcore.XCore()
f = confreader.Config.from_file(xc, os.path.join(tests_dir, "configs", "basic.py"))
assert f.keys
def test_falls_back():
xc = xcore.XCore()
f = confreader.Config.from_file(xc, os.path.join(tests_dir, "configs", "basic.py"))
# We just care that it has a default, we don't actually care what the
# default is; don't assert anything at all about the default in case
# someone changes it down the road.
assert hasattr(f, "follow_mouse_focus")
def test_ezkey():
def cmd(x):
return None
key = config.EzKey('M-A-S-a', cmd, cmd)
modkey, altkey = (config.EzConfig.modifier_keys[i] for i in 'MA')
assert key.modifiers == [modkey, altkey, 'shift']
assert key.key == 'a'
assert key.commands == (cmd, cmd)
key = config.EzKey('M-<Tab>', cmd)
assert key.modifiers == [modkey]
assert key.key == 'Tab'
assert key.commands == (cmd,)
with pytest.raises(utils.QtileError):
config.EzKey('M--', cmd)
with pytest.raises(utils.QtileError):
config.EzKey('Z-Z-z', cmd)
with pytest.raises(utils.QtileError):
config.EzKey('asdf', cmd)
with pytest.raises(utils.QtileError):
config.EzKey('M-a-A', cmd)
def test_ezclick_ezdrag():
def cmd(x):
return None
btn = config.EzClick('M-1', cmd)
assert btn.button == 'Button1'
assert btn.modifiers == [config.EzClick.modifier_keys['M']]
btn = config.EzDrag('A-2', cmd)
assert btn.button == 'Button2'
assert btn.modifiers == [config.EzClick.modifier_keys['A']]
|
micromagnetics/magnum.fe | examples/current_wall_motion/run.py | Python | lgpl-3.0 | 2,142 | 0.022409 | """
Current driven domain-wall motion with constant current and spin accumulation.
"""
# Copyright (C) 2011-2015 Claas Abert
#
# This file is part of magnum.fe.
#
# magnum.fe is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# magnum.fe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with magnum.fe. If not, see <http://www.gnu.org/licenses/>.
#
# Last modified by Claas Abert, 2015-02-16
from magnumfe import *
#######################################
#### DEFINE MESH, STATE AND MATERIAL
#######################################
mesh = BoxMesh(-600.0/2, -100.0/2, -10.0/2, 600.0/2, 100.0/2, 10.0/2, 120, 20, 1)
state = State(mesh, scale = 1e-9,
material = Material(
alpha = 0.1,
ms = 8e5,
Aex = 1.3e-11,
D0 | = 1e-3,
beta = 0.9,
beta_prime = 0.8,
lambda_sf = 10e-9,
lambda_j = 4e-9,
c = 3.125e-3
),
m = Expression(('1.0 - 2*(x[0] < 0.0)', 'x[0] > -10.0 && x[0] < 10.0', '0.0')),
s = Constant((0.0, 0.0, 0.0)),
j = Constant((0.0, 0.0, 0.0))
)
# normalize since initial configuration is not normalized
state.m.normalize()
# setup integrators
llg = LLGAlougesProject([
ExchangeField(),
Dema | gField("FK"),
SpinTorque()
])
spindiff = SpinDiffusion()
# relax
for j in range(200): state.step(llg, 1e-12)
# apply constant current
state.j = Constant((3e12, 0, 0))
state.t = 0.0
# prepare log files
mfile = File("data/m.pvd")
sfile = File("data/s.pvd")
for j in range(1000):
# save fields every 10th step
if j % 10 == 0:
mfile << (state.m, state.t)
sfile << (state.s, state.t)
# calculate next step
state.step([llg, spindiff], 1e-12)
|
baverman/vial-pytest | vial-plugin/vial_pytest/plugin.py | Python | mit | 3,367 | 0.001188 | import sys
import time
import os.path
from collections import Counter
from vial import vfunc, vim, dref
from vial.utils import redraw, focus_window
from vial.widgets import make_scratch
collector = None
def get_collector():
global collector
if not collector:
collector = ResultCollector()
return collector
def run_test(project_dir, executable=None, match=None, files=None, env=None):
from subprocess import Popen
f | rom multiprocessing.connection import Client, arbitrary_address
addr = arbitrary_address('AF_UNIX')
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pt.py | ')
executable = executable or sys.executable
args = [executable, filename, addr, '-q']
if match:
args.append('-k %s' % match)
environ = None
if env:
environ = os.environ.copy()
environ.update(env)
log = open('/tmp/vial-pytest.log', 'w')
if files:
args.extend(files)
proc = Popen(args, cwd=project_dir, env=environ, stdout=log, stderr=log, close_fds=True)
start = time.time()
while not os.path.exists(addr):
if time.time() - start > 5:
raise Exception('py.test launching timeout exceed')
time.sleep(0.01)
conn = Client(addr)
return proc, conn
def indent(width, lines):
return [' ' * width + r for r in lines]
@dref
def goto_file():
filename, line = vfunc.expand('<cWORD>').split(':')[:2]
for win in vim.windows:
if vfunc.buflisted(win.buffer.number):
focus_window(win)
vim.command('e +{} {}'.format(line, filename))
class ResultCollector(object):
def init(self, win, buf):
vim.command('setlocal syntax=vialpytest')
vim.command('nnoremap <buffer> gf :python {}()<cr>'.format(goto_file.ref))
def reset(self):
cwin = vim.current.window
_, self.buf = make_scratch('__vial_pytest__', self.init, 'pytest')
vim.command('normal! ggdG')
focus_window(cwin)
redraw()
def add_test_result(self, rtype, name, result):
self.counts[rtype] += 1
lines = ['{} {}'.format(name, rtype)]
trace, out = result
for k, v in out:
lines.append(' ----======= {} =======----'.format(k))
lines.extend(indent(1, v.splitlines()))
lines.append('')
if trace:
lines.extend(indent(1, trace.splitlines()))
lines.append('')
lines.append('')
buflen = len(self.buf)
self.buf[buflen-1:] = lines
redraw()
def collect(self, conn):
self.tests = []
self.counts = Counter()
self.reset()
while True:
msg = conn.recv()
cmd = msg[0]
if cmd == 'END':
return
elif cmd == 'COLLECTED_TESTS':
self.tests[:] = cmd[1]
elif cmd in ('PASS', 'ERROR', 'FAIL', 'SKIP', 'FAILED_COLLECT'):
self.add_test_result(*msg)
def run(*args):
project = os.getcwd()
files = None
if args:
files = [vfunc.expand(r) for r in args]
try:
f = vfunc.VialPythonGetExecutable
except vim.error:
executable = None
else:
executable = f()
proc, conn = run_test(project, files=files, executable=executable)
get_collector().collect(conn)
|
Yelp/paasta | paasta_tools/monitoring/check_mesos_active_frameworks.py | Python | apache-2.0 | 1,748 | 0 | #!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitati | ons under the License.
import argparse
import | sys
from a_sync import block
from paasta_tools.mesos.exceptions import MasterNotAvailableException
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.metrics.metastatus_lib import assert_frameworks_exist
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"--expected",
"-e",
dest="expected",
type=str,
default="",
help="Comma separated list of frameworks to expect.\n"
"Will fail if any of these are not found",
)
return parser.parse_args()
def check_mesos_active_frameworks() -> None:
options = parse_args()
expected = options.expected.split(",")
master = get_mesos_master()
try:
state = block(master.state)
except MasterNotAvailableException as e:
print("CRITICAL: %s" % e.args[0])
sys.exit(2)
result = assert_frameworks_exist(state, expected)
if result.healthy:
print("OK: " + result.message)
sys.exit(0)
else:
print(result.message)
sys.exit(2)
if __name__ == "__main__":
check_mesos_active_frameworks()
|
JonnyWong16/plexpy | lib/websocket/tests/test_url.py | Python | gpl-3.0 | 14,537 | 0.002958 | # -*- coding: utf-8 -*-
#
"""
test_url.py
websocket - WebSocket client library for Python
Copyright 2021 engn33r
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import unittest
sys.path[0:0] = [""]
from websocket._url import get_proxy_info, parse_url, _is_address_in_network, _is_no_proxy_host
class UrlTest(unittest.TestCase):
def test_address_in_network(self):
self.assertTrue(_is_address_in_network('127.0.0.1', '127.0.0.0/8'))
self.assertTrue(_is_address_in_network('127.1.0.1', '127.0.0.0/8'))
self.assertFalse(_is_address_in_network('127.1.0.1', '127.0.0.0/24'))
def testParseUrl(self):
p = parse_url("ws://www.example.com/r")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 80)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], False)
p = parse_url("ws://www.example.com/r/")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 80)
self.assertEqual(p[2], "/r/")
self.assertEqual(p[3], False)
p = parse_url("ws://www.example.com/")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 80)
self.assertEqual(p[2], "/")
self.assertEqual(p[3], False)
p = parse_url("ws://www.example.com")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 80)
self.assertEqual(p[2], "/")
self.assertEqual(p[3], False)
p = parse_url("ws://www.example.com:8080/r")
self.assertEqual(p[0], "www.example.com")
| self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], False)
| p = parse_url("ws://www.example.com:8080/")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/")
self.assertEqual(p[3], False)
p = parse_url("ws://www.example.com:8080")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/")
self.assertEqual(p[3], False)
p = parse_url("wss://www.example.com:8080/r")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], True)
p = parse_url("wss://www.example.com:8080/r?key=value")
self.assertEqual(p[0], "www.example.com")
self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/r?key=value")
self.assertEqual(p[3], True)
self.assertRaises(ValueError, parse_url, "http://www.example.com/r")
p = parse_url("ws://[2a03:4000:123:83::3]/r")
self.assertEqual(p[0], "2a03:4000:123:83::3")
self.assertEqual(p[1], 80)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], False)
p = parse_url("ws://[2a03:4000:123:83::3]:8080/r")
self.assertEqual(p[0], "2a03:4000:123:83::3")
self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], False)
p = parse_url("wss://[2a03:4000:123:83::3]/r")
self.assertEqual(p[0], "2a03:4000:123:83::3")
self.assertEqual(p[1], 443)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], True)
p = parse_url("wss://[2a03:4000:123:83::3]:8080/r")
self.assertEqual(p[0], "2a03:4000:123:83::3")
self.assertEqual(p[1], 8080)
self.assertEqual(p[2], "/r")
self.assertEqual(p[3], True)
class IsNoProxyHostTest(unittest.TestCase):
def setUp(self):
self.no_proxy = os.environ.get("no_proxy", None)
if "no_proxy" in os.environ:
del os.environ["no_proxy"]
def tearDown(self):
if self.no_proxy:
os.environ["no_proxy"] = self.no_proxy
elif "no_proxy" in os.environ:
del os.environ["no_proxy"]
def testMatchAll(self):
self.assertTrue(_is_no_proxy_host("any.websocket.org", ['*']))
self.assertTrue(_is_no_proxy_host("192.168.0.1", ['*']))
self.assertTrue(_is_no_proxy_host("any.websocket.org", ['other.websocket.org', '*']))
os.environ['no_proxy'] = '*'
self.assertTrue(_is_no_proxy_host("any.websocket.org", None))
self.assertTrue(_is_no_proxy_host("192.168.0.1", None))
os.environ['no_proxy'] = 'other.websocket.org, *'
self.assertTrue(_is_no_proxy_host("any.websocket.org", None))
def testIpAddress(self):
self.assertTrue(_is_no_proxy_host("127.0.0.1", ['127.0.0.1']))
self.assertFalse(_is_no_proxy_host("127.0.0.2", ['127.0.0.1']))
self.assertTrue(_is_no_proxy_host("127.0.0.1", ['other.websocket.org', '127.0.0.1']))
self.assertFalse(_is_no_proxy_host("127.0.0.2", ['other.websocket.org', '127.0.0.1']))
os.environ['no_proxy'] = '127.0.0.1'
self.assertTrue(_is_no_proxy_host("127.0.0.1", None))
self.assertFalse(_is_no_proxy_host("127.0.0.2", None))
os.environ['no_proxy'] = 'other.websocket.org, 127.0.0.1'
self.assertTrue(_is_no_proxy_host("127.0.0.1", None))
self.assertFalse(_is_no_proxy_host("127.0.0.2", None))
def testIpAddressInRange(self):
self.assertTrue(_is_no_proxy_host("127.0.0.1", ['127.0.0.0/8']))
self.assertTrue(_is_no_proxy_host("127.0.0.2", ['127.0.0.0/8']))
self.assertFalse(_is_no_proxy_host("127.1.0.1", ['127.0.0.0/24']))
os.environ['no_proxy'] = '127.0.0.0/8'
self.assertTrue(_is_no_proxy_host("127.0.0.1", None))
self.assertTrue(_is_no_proxy_host("127.0.0.2", None))
os.environ['no_proxy'] = '127.0.0.0/24'
self.assertFalse(_is_no_proxy_host("127.1.0.1", None))
def testHostnameMatch(self):
self.assertTrue(_is_no_proxy_host("my.websocket.org", ['my.websocket.org']))
self.assertTrue(_is_no_proxy_host("my.websocket.org", ['other.websocket.org', 'my.websocket.org']))
self.assertFalse(_is_no_proxy_host("my.websocket.org", ['other.websocket.org']))
os.environ['no_proxy'] = 'my.websocket.org'
self.assertTrue(_is_no_proxy_host("my.websocket.org", None))
self.assertFalse(_is_no_proxy_host("other.websocket.org", None))
os.environ['no_proxy'] = 'other.websocket.org, my.websocket.org'
self.assertTrue(_is_no_proxy_host("my.websocket.org", None))
def testHostnameMatchDomain(self):
self.assertTrue(_is_no_proxy_host("any.websocket.org", ['.websocket.org']))
self.assertTrue(_is_no_proxy_host("my.other.websocket.org", ['.websocket.org']))
self.assertTrue(_is_no_proxy_host("any.websocket.org", ['my.websocket.org', '.websocket.org']))
self.assertFalse(_is_no_proxy_host("any.websocket.com", ['.websocket.org']))
os.environ['no_proxy'] = '.websocket.org'
self.assertTrue(_is_no_proxy_host("any.websocket.org", None))
self.assertTrue(_is_no_proxy_host("my.other.websocket.org", None))
self.assertFalse(_is_no_proxy_host("any.websocket.com", None))
os.environ['no_proxy'] = 'my.websocket.org, .websocket.org'
self.assertTrue(_is_no_proxy_host("any.websocket.org", None))
class ProxyInfoTest(unittest.TestCase):
def setUp(self):
self.http_proxy = os.environ.get("http_proxy", None)
self.https_proxy = os.environ.get("https_proxy", None)
self.no_proxy = os.environ.get("no_proxy", None)
if "http_proxy" in os.environ:
del os.environ["http_proxy"]
if "https_proxy" in os.environ:
del os.environ["https_proxy"]
if "no_proxy" in os.environ:
del os.environ["no_proxy"]
def t |
open-synergy/opnsynid-stock-reporting | opnsynid_stock_inventory_result_aeroo_report/__openerp__.py | Python | agpl-3.0 | 629 | 0.00159 | # -*- cod | ing: utf-8 -*-
# Copyright 2016 OpenSynergy Indonesia
# Copyright 2022 PT. Simetri Sinergi Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Stock Inventory Result Aeroo Report",
"version": "8.0.1.0.0",
"summary": "Adds Stock Inve | ntory Result Report",
"website": "https://simetri-sinergi.id",
"author": "OpenSynergy Indonesia, PT. Simetri Sinergi Indonesia",
"category": "Stock",
"depends": ["stock", "report_aeroo"],
"data": ["reports/stock_inventory_result.xml", "views/stock_inventory_view.xml"],
"license": "AGPL-3",
"installable": True,
}
|
centrifugal/examples | python_django_chat_tutorial/mysite/chat/urls.py | Python | mit | 391 | 0 | from django.urls import path, re_path
from . import views
urlpatterns = [
path('', views.index, name='index'),
re_path('room/(?P<room_name>[A-z0-9_-]+)/', views.room, name='room'),
path('centrifugo/connect/', views.connect, name='connect'),
path( | 'centrifugo/subscribe/', views.subscribe, name='subscribe'),
path('centrifugo/publish/', views.publ | ish, name='publish'),
]
|
beckastar/django | tests/generic_views/test_list.py | Python | bsd-3-clause | 10,510 | 0.001142 | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.views.generic.base import View
from django.utils.encoding import force_str
from .models import Author, Artist
class ListViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_items(self):
res = self.client.get('/list/dict/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/list.html')
self.assertEqual(res.context['object_list'][0]['first'], 'John')
def test_queryset(self):
res = self.client.get('/list/authors/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIsInstance(res.context[ | 'view'], View)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertIsNone(res.context['paginator'])
self.assertIsNone(res.context['page_obj'])
self.assertFalse(res.context['is_paginated'])
def test_paginated_queryset(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated | /')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertTrue(res.context['is_paginated'])
self.assertEqual(res.context['page_obj'].number, 1)
self.assertEqual(res.context['paginator'].num_pages, 4)
self.assertEqual(res.context['author_list'][0].name, 'Author 00')
self.assertEqual(list(res.context['author_list'])[-1].name, 'Author 29')
def test_paginated_queryset_shortdata(self):
# Test that short datasets ALSO result in a paginated view.
res = self.client.get('/list/authors/paginated/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['page_obj'].number, 1)
self.assertEqual(res.context['paginator'].num_pages, 1)
self.assertFalse(res.context['is_paginated'])
def test_paginated_get_page_by_query_string(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/', {'page': '2'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 30')
self.assertEqual(res.context['page_obj'].number, 2)
def test_paginated_get_last_page_by_query_string(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/', {'page': 'last'})
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context['object_list']), 10)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 90')
self.assertEqual(res.context['page_obj'].number, 4)
def test_paginated_get_page_by_urlvar(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/3/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 60')
self.assertEqual(res.context['page_obj'].number, 3)
def test_paginated_page_out_of_range(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/42/')
self.assertEqual(res.status_code, 404)
def test_paginated_invalid_page(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/?page=frog')
self.assertEqual(res.status_code, 404)
def test_paginated_custom_paginator_class(self):
self._make_authors(7)
res = self.client.get('/list/authors/paginated/custom_class/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['paginator'].num_pages, 1)
# Custom pagination allows for 2 orphans on a page size of 5
self.assertEqual(len(res.context['object_list']), 7)
def test_paginated_custom_page_kwarg(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/custom_page_kwarg/', {'pagina': '2'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 30')
self.assertEqual(res.context['page_obj'].number, 2)
def test_paginated_custom_paginator_constructor(self):
self._make_authors(7)
res = self.client.get('/list/authors/paginated/custom_constructor/')
self.assertEqual(res.status_code, 200)
# Custom pagination allows for 2 orphans on a page size of 5
self.assertEqual(len(res.context['object_list']), 7)
def test_paginated_orphaned_queryset(self):
self._make_authors(92)
res = self.client.get('/list/authors/paginated-orphaned/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 1)
res = self.client.get(
'/list/authors/paginated-orphaned/', {'page': 'last'})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 3)
res = self.client.get(
'/list/authors/paginated-orphaned/', {'page': '3'})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 3)
res = self.client.get(
'/list/authors/paginated-orphaned/', {'page': '4'})
self.assertEqual(res.status_code, 404)
def test_paginated_non_queryset(self):
res = self.client.get('/list/dict/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context['object_list']), 1)
def test_verbose_name(self):
res = self.client.get('/list/artists/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/list.html')
self.assertEqual(list(res.context['object_list']), list(Artist.objects.all()))
self.assertIs(res.context['artist_list'], res.context['object_list'])
self.assertIsNone(res.context['paginator'])
self.assertIsNone(res.context['page_obj'])
self.assertFalse(res.context['is_paginated'])
def test_allow_empty_false(self):
res = self.client.get('/list/authors/notempty/')
self.assertEqual(res.status_code, 200)
Author.objects.all().delete()
res = self.client.get('/list/authors/notempty/')
self.assertEqual(res.status_code, 404)
def test_template_name(self):
res = self.client.get('/list/authors/template_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertTemplateUsed(res, 'generic_views/list.html')
def test_template_name_suffix(self):
res = self.client.get('/list/authors/template_name_suffix/')
self.assertEqual(res.status_code, 200)
|
JorjMcKie/PyMuPDF-Utilities | animations/quad-show2.py | Python | gpl-3.0 | 4,283 | 0.000467 | """
Created on Thu Jan 3 07:05:17 2019
@author: Jorj
@copyright: (c) 2019 Jorj X. McKie
@license: GNU GPL 3.0
Purpose
--------
Visualize function "drawOval" by using a quadrilateral (tetrapod) as parameter.
For demonstration purposes, oval creation is placed in a function, which
accepts an integer parameter. This value controls the position of some
egdes of the quad.
It then creates a dummy temporary PDF with one page, containing stuff we want
to show and returns an image of it.
The function is called by the main program in an "endless" loop, passing in
the parameter. The image is displayed using PySimpleGUI.
Notes
------
* Changed generated page image format to "PPM", which is very much faster than
"PNG" both, in terms of creation and reading by tkinter. It also makes us
independent from the tkinter version used.
* We are not slowing down the speed of showing new images (= "frames per
second"). The statistics displayed at end of program can hence be used as a
performance indicator.
"""
import math
import os
import time
import fitz
import PySimpleGUI as sg
mytime = time.time
if not list(map(int, fitz.VersionBind.split("."))) >= [1, 14, 5]:
raise SystemExit("need PyMuPDF v1.14.5 for this script")
print(fitz.__doc__)
# ------------------------------------------------------------------------------
# make one page
# ------------------------------------------------------------------------------
def make_oval(i):
"""Make a PDF page and draw an oval inside a Quad.
The lower two quad points and the fill color are subject to a passed-in
parameter. Effectively, they exchange their position, thus causing
changes to the drawn shape.
The resulting page picture is passed back as an image and the PDF is
dicarded again.
"""
doc = fitz.open() # dummy PDF
red = (1, 0, 0)
blue = (0, 0, 1)
page = doc.newPage(width=400, height=300) # page dimensions as you like
r = page.rect + (+4, +4, -4, -4) # leave a border of 4 pix
q = r.quad # full page rect as a quad
f = i / 100.0
if f >= 0:
u = f
o = 0
else:
u = 0
o = -f
q1 = fitz.Quad(
q.ul + (q.ur - q.ul) * o,
q.ul + (q.ur - q.ul) * (1 - o),
q.ll + (q.lr - q.ll) * u,
q.ll + (q.lr - q.ll) * (1 - u),
)
# make an entertaining fill color
c1 = min(1, max(o, u))
c3 = min(1, max(1 - u, 1 - o))
fill = (c1, 0, c3)
img = page.newShape()
img.drawOval(q1)
img.finish(
color=blue, # blue border
fill=fill, # variable fill color
width=0.3, # border width
)
img.drawCircle(q1.ll, 4)
img.drawCircle(q1.ul, 4)
img.finish(fill=red)
img.drawCircle(q1.ur, 4)
img.drawCircle(q1.lr, 4)
img.finish(fill=blue)
img.commit()
pix = page.getPixmap(alpha=False) # make pixmap, no alpha
doc.close() # discard PDF again
image = pix.getImageData("ppm")
del pix
del page
del img
return image # return a PPM image | of the page
# ------------------------------------------------------------------------------
# main program
# ------------------------------------------------------------------------------
png = make_oval(0.0) # create first picture
img = sg.Image(data=png) # def | ine form image element
layout = [[img]] # minimal layout
form = sg.FlexForm(
"drawOval: left-right points exchange", layout, finalize=True
) # define form
loop_count = 1 # count the number of loops
t0 = mytime() # start a timer
i = 0
add = 1
while True: # loop forever
event, values = form.Read(timeout=0)
if event is None:
break
png = make_oval(i) # make next picture
try: # guard against form closure
img.Update(data=png) # put in new picture
form.Refresh() # show updated
except:
form.Close()
break # user is fed up seeing this
loop_count += 1 # tally the loops
i += add # update the parameter
if i >= 100: # loop backwards from here
add = -1
continue
if i <= -100: # loop forward again
add = +1
i = -100
t1 = mytime()
fps = round(loop_count / (t1 - t0), 1)
script = os.path.basename(__file__)
print("'%s' was shown with %g frames per second." % (script, fps))
|
tobspr/panda3d | direct/src/leveleditor/LevelEditor.py | Python | bsd-3-clause | 1,521 | 0.000657 | """
This is just a sample code.
LevelEditor, ObjectHandler, ObjectPalette should be rewritten
to be game specific.
"""
from .LevelEditorUI import *
from .LevelEditorBase import *
from .ObjectMgr impo | rt *
from .AnimMgr import *
from .ObjectHandler import *
from .ObjectPalette import *
from .ProtoPalette import *
class LevelEditor(LevelEditorBase):
""" Class for Panda3D LevelEditor """
def __init__(self):
LevelEditorBase.__init__(self)
# define your own config | file similar to this
self.settingsFile = os.path.dirname(__file__) + '/LevelEditor.cfg'
# If you have your own ObjectPalette and ObjectHandler
# connect them in your own LevelEditor class
self.objectMgr = ObjectMgr(self)
self.animMgr = AnimMgr(self)
self.objectPalette = ObjectPalette()
self.objectHandler = ObjectHandler(self)
self.protoPalette = ProtoPalette()
# Populating uderlined data-structures
self.ui = LevelEditorUI(self)
self.ui.SetCursor(wx.StockCursor(wx.CURSOR_WAIT))
self.objectPalette.populate()
self.protoPalette.populate()
# Updating UI-panels based on the above data
self.ui.objectPaletteUI.populate()
self.ui.protoPaletteUI.populate()
# When you define your own LevelEditor class inheriting LevelEditorBase
# you should call self.initialize() at the end of __init__() function
self.initialize()
self.ui.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
|
prymitive/upaas-common | upaas/builder/exceptions.py | Python | gpl-3.0 | 890 | 0 | # -*- coding: utf-8 -*-
"""
:copyright: Copyright 2013-2014 by Łukasz Mierzwa
:contact: l.mierzwa@gmail.com
"""
class InvalidConfiguration(Exception):
| """
Raised if upaas-builder configuration is invalid.
"""
pass
class BuildError(Exception):
"""
General clas | s for catching any build error.
"""
pass
class OSBootstrapError(BuildError):
"""
Raised in case of errors during os image bootstraping.
"""
pass
class PackageSystemError(BuildError):
"""
Raised in case of system errors during package build. This does not cover
errors caused by package configuration or any commands executed by package
itself, only errors independent from package (os bootstrap error for
example)
"""
pass
class PackageUserError(BuildError):
"""
Raised when executing package specific actions.
"""
pass
|
palashahuja/myhdl | myhdl/test/conversion/general/test_interfaces1.py | Python | lgpl-2.1 | 2,907 | 0.020984 |
import sys
from myhdl import *
from myhdl import ConversionError
from myhdl.conversion._misc import _error
from myhdl.conversion import analyze, verify
class MyIntf(object):
def __init__(self):
self.x = Signal(intbv(2,min=0,max=16))
self.y = Signal(intbv(3,min=0,max=18))
def m_one_level(clock,reset,ia,ib):
@always_seq(clock.posedge,reset=reset)
def rtl():
ia.x.next = ib.x + 1
ia.y.next = ib.y + 1
return rtl
def m_two_level(clock,reset,ia,ib):
ic,ie = (MyIntf(),MyIntf(),)
g_one = m_one_level(clock,reset,ic,ie)
@always_seq(clock.posedge,reset=reset)
def rtl():
ia.x.next = ib.x + ic.x
ia.y.next = ib.y + ic.y
return g_one, rtl
def c_testbench_on | e():
clock = Signal(bool(0))
reset = ResetSignal(0,active=0,async=True)
ia = MyIntf()
ib = MyIntf()
tb_dut = m_one_level(clock,reset,ia,ib)
@instance
def tb_clk():
clock.next = False
yield delay(10)
while True:
clock.next = not clock
yield delay(10)
@instance
def | tb_stim():
reset.next = False
yield delay(17)
reset.next = True
yield delay(17)
for ii in range(7):
yield clock.posedge
assert ia.x == 3
assert ia.y == 4
print("%d %d %d %d"%(ia.x,ia.y,ib.x,ib.y))
raise StopSimulation
return tb_dut, tb_clk, tb_stim
def c_testbench_two():
clock = Signal(bool(0))
reset = ResetSignal(0,active=0,async=True)
ia = MyIntf()
ib = MyIntf()
tb_dut = m_two_level(clock,reset,ia,ib)
@instance
def tb_clk():
clock.next = False
yield delay(10)
while True:
clock.next = not clock
yield delay(10)
@instance
def tb_stim():
reset.next = False
yield delay(17)
reset.next = True
yield delay(17)
for ii in range(7):
yield clock.posedge
assert ia.x == 5
assert ia.y == 7
print("%d %d %d %d"%(ia.x,ia.y,ib.x,ib.y))
raise StopSimulation
return tb_dut, tb_clk, tb_stim
def test_one_level_analyze():
clock = Signal(bool(0))
reset = ResetSignal(0,active=0,async=True)
ia = MyIntf()
ib = MyIntf()
analyze(m_one_level,clock,reset,ia,ib)
def test_one_level_verify():
assert verify(c_testbench_one) == 0
def test_two_level_analyze():
clock = Signal(bool(0))
reset = ResetSignal(0,active=0,async=True)
ia = MyIntf()
ib = MyIntf()
analyze(m_two_level,clock,reset,ia,ib)
def test_two_level_verify():
assert verify(c_testbench_two) == 0
if __name__ == '__main__':
print(sys.argv[1])
verify.simulator = analyze.simulator = sys.argv[1]
Simulation(c_testbench_one()).run()
Simulation(c_testbench_two()).run()
print(verify(c_testbench_one))
print(verify(c_testbench_two))
|
trevorstephens/gplearn | gplearn/tests/test_examples.py | Python | bsd-3-clause | 10,059 | 0 | """Testing the examples from the documentation."""
# Author: Trevor Stephens <trevorstephens.com>
#
# License: BSD 3 clause
import numpy as np
from sklearn.datasets import load_boston, load_breast_cancer
from sklearn.datasets import m | ake_moons, make_circles, make_classification
from sklearn.linear_model import Ridge
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import Stan | dardScaler
from sklearn.utils._testing import assert_equal, assert_almost_equal
from sklearn.utils.validation import check_random_state
from gplearn.genetic import SymbolicClassifier, SymbolicRegressor
from gplearn.genetic import SymbolicTransformer
from gplearn.functions import make_function
def test_symbolic_regressor():
"""Check that SymbolicRegressor example works"""
rng = check_random_state(0)
X_train = rng.uniform(-1, 1, 100).reshape(50, 2)
y_train = X_train[:, 0] ** 2 - X_train[:, 1] ** 2 + X_train[:, 1] - 1
X_test = rng.uniform(-1, 1, 100).reshape(50, 2)
y_test = X_test[:, 0] ** 2 - X_test[:, 1] ** 2 + X_test[:, 1] - 1
est_gp = SymbolicRegressor(population_size=5000, generations=20,
stopping_criteria=0.01, p_crossover=0.7,
p_subtree_mutation=0.1, p_hoist_mutation=0.05,
p_point_mutation=0.1, max_samples=0.9,
parsimony_coefficient=0.01, random_state=0)
est_gp.fit(X_train, y_train)
assert_equal(len(est_gp._programs), 7)
expected = 'sub(add(-0.999, X1), mul(sub(X1, X0), add(X0, X1)))'
assert_equal(est_gp.__str__(), expected)
assert_almost_equal(est_gp.score(X_test, y_test), 0.99999, decimal=5)
dot_data = est_gp._program.export_graphviz()
expected = ('digraph program {\nnode [style=filled]\n0 [label="sub", '
'fillcolor="#136ed4"] ;\n1 [label="add", fillcolor="#136ed4"] '
';\n2 [label="-0.999", fillcolor="#60a6f6"] ;\n3 [label="X1", '
'fillcolor="#60a6f6"] ;\n1 -> 3 ;\n1 -> 2 ;\n4 [label="mul", '
'fillcolor="#136ed4"] ;\n5 [label="sub", fillcolor="#136ed4"] '
';\n6 [label="X1", fillcolor="#60a6f6"] ;\n7 [label="X0", '
'fillcolor="#60a6f6"] ;\n5 -> 7 ;\n5 -> 6 ;\n8 [label="add", '
'fillcolor="#136ed4"] ;\n9 [label="X0", fillcolor="#60a6f6"] '
';\n10 [label="X1", fillcolor="#60a6f6"] ;\n8 -> 10 ;\n8 -> 9 '
';\n4 -> 8 ;\n4 -> 5 ;\n0 -> 4 ;\n0 -> 1 ;\n}')
assert_equal(dot_data, expected)
assert_equal(est_gp._program.parents, {'method': 'Crossover',
'parent_idx': 1555,
'parent_nodes': range(1, 4),
'donor_idx': 78,
'donor_nodes': []})
idx = est_gp._program.parents['donor_idx']
fade_nodes = est_gp._program.parents['donor_nodes']
assert_equal(est_gp._programs[-2][idx].__str__(), 'add(-0.999, X1)')
assert_almost_equal(est_gp._programs[-2][idx].fitness_, 0.351803319075)
dot_data = est_gp._programs[-2][idx].export_graphviz(fade_nodes=fade_nodes)
expected = ('digraph program {\nnode [style=filled]\n0 [label="add", '
'fillcolor="#136ed4"] ;\n1 [label="-0.999", '
'fillcolor="#60a6f6"] ;\n2 [label="X1", fillcolor="#60a6f6"] '
';\n0 -> 2 ;\n0 -> 1 ;\n}')
assert_equal(dot_data, expected)
idx = est_gp._program.parents['parent_idx']
fade_nodes = est_gp._program.parents['parent_nodes']
assert_equal(est_gp._programs[-2][idx].__str__(),
'sub(sub(X1, 0.939), mul(sub(X1, X0), add(X0, X1)))')
assert_almost_equal(est_gp._programs[-2][idx].fitness_, 0.17080204042)
dot_data = est_gp._programs[-2][idx].export_graphviz(fade_nodes=fade_nodes)
expected = ('digraph program {\nnode [style=filled]\n0 [label="sub", '
'fillcolor="#136ed4"] ;\n1 [label="sub", fillcolor="#cecece"] '
';\n2 [label="X1", fillcolor="#cecece"] ;\n3 [label="0.939", '
'fillcolor="#cecece"] ;\n1 -> 3 ;\n1 -> 2 ;\n4 [label="mul", '
'fillcolor="#136ed4"] ;\n5 [label="sub", fillcolor="#136ed4"] '
';\n6 [label="X1", fillcolor="#60a6f6"] ;\n7 [label="X0", '
'fillcolor="#60a6f6"] ;\n5 -> 7 ;\n5 -> 6 ;\n8 [label="add", '
'fillcolor="#136ed4"] ;\n9 [label="X0", fillcolor="#60a6f6"] '
';\n10 [label="X1", fillcolor="#60a6f6"] ;\n8 -> 10 ;\n8 -> 9 '
';\n4 -> 8 ;\n4 -> 5 ;\n0 -> 4 ;\n0 -> 1 ;\n}')
assert_equal(dot_data, expected)
def test_symbolic_transformer():
"""Check that SymbolicTransformer example works"""
rng = check_random_state(0)
boston = load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
est = Ridge()
est.fit(boston.data[:300, :], boston.target[:300])
assert_almost_equal(est.score(boston.data[300:, :], boston.target[300:]),
0.759319453049884)
function_set = ['add', 'sub', 'mul', 'div', 'sqrt', 'log',
'abs', 'neg', 'inv', 'max', 'min']
gp = SymbolicTransformer(generations=20, population_size=2000,
hall_of_fame=100, n_components=10,
function_set=function_set,
parsimony_coefficient=0.0005,
max_samples=0.9,
random_state=0)
gp.fit(boston.data[:300, :], boston.target[:300])
gp_features = gp.transform(boston.data)
new_boston = np.hstack((boston.data, gp_features))
est = Ridge()
est.fit(new_boston[:300, :], boston.target[:300])
assert_almost_equal(est.score(new_boston[300:, :], boston.target[300:]),
0.8418372105182055)
def test_custom_functions():
"""Test the custom programs example works"""
rng = check_random_state(0)
boston = load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
def logic(x1, x2, x3, x4):
return np.where(x1 > x2, x3, x4)
logical = make_function(function=logic,
name='logical',
arity=4)
function_set = ['add', 'sub', 'mul', 'div', logical]
gp = SymbolicTransformer(generations=2, population_size=2000,
hall_of_fame=100, n_components=10,
function_set=function_set,
parsimony_coefficient=0.0005,
max_samples=0.9, random_state=0)
gp.fit(boston.data[:300, :], boston.target[:300])
assert_equal(gp._programs[0][906].__str__(),
'sub(logical(X6, add(X11, 0.898), X10, X2), X5)')
dot_data = gp._programs[0][906].export_graphviz()
expected = ('digraph program {\nnode [style=filled]\n0 [label="sub", '
'fillcolor="#136ed4"] ;\n1 [label="logical", '
'fillcolor="#136ed4"] ;\n2 [label="X6", fillcolor="#60a6f6"] '
';\n3 [label="add", fillcolor="#136ed4"] ;\n4 [label="X11", '
'fillcolor="#60a6f6"] ;\n5 [label="0.898", '
'fillcolor="#60a6f6"] ;\n3 -> 5 ;\n3 -> 4 ;\n6 [label="X10", '
'fillcolor="#60a6f6"] ;\n7 [label="X2", fillcolor="#60a6f6"] '
';\n1 -> 7 ;\n1 -> 6 ;\n1 -> 3 ;\n1 -> 2 ;\n8 [label="X5", '
'fillcolor="#60a6f6"] ;\n0 -> 8 ;\n0 -> 1 ;\n}')
assert_equal(dot_data, expected)
def test_classifier_comparison():
"""Test the classifier comparison example works"""
X, y = make_classification(n_features=2, n_redundant=0, n_informative=2,
random_state=1, n_clusters_per_class=1)
rng = np.random.RandomState(2)
X += 2 * rng.uniform(size=X.shape)
linearly_separable = (X, y)
datasets = [make_moons(noise=0.3, random_state=0),
|
refweak/refweak_scripts | ncbifetch/tests/test_basic.py | Python | gpl-3.0 | 451 | 0.006652 | # -*- coding: utf-8 -*-
import sys
sys.path.append('..')
from ncbifetch import main
import uni | ttest
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
def test_absolute_truth_and_meaning(self):
assert True
def test_test(self):
assert main.x() == 'ping'
def test_sal_wgs(self):
main.fetch_wgs(organism='Salmonella enterica')
assert True
if __name__ == '__main__':
| unittest.main() |
akelm/YAMS | yams/em_int.py | Python | gpl-3.0 | 715 | 0.06014 | import numpy as np
def em_int(wspE,wspM,k,RB,xp2inv,d_el,d_m,Dd_el,cc4,cc3,Cepsilon):
fi_el=(wspE[0]*RB[2] + wspE[1]*RB[0])/k
Dfi_el=(wspE[0]*RB[3] + wspE[1]*RB[1])/k
fi_m=(wspM[0]*RB[2] + wspM[1]*RB[0])/k
Dfi_m=(wspM[0]*RB[3] + wspM[1]*RB[1])/k
int_perp = xp2inv**2* np.matmul(np.nan_to_num(-Dfi_el*fi_el.conj()*d_el),cc4)
int_para = xp2inv*np.matmul(np.nan_to_num(-Dfi_el*fi_el.conj()*Dd_el+fi_m*Dfi_m.conj()*d_m),cc3)
perp = -(1j*np.sqrt(Cepsilon.conj())*int_perp).real
para = -0.5*(1j*np.sqrt(Cepsilon.co | nj())*int_para).real
return (perp,para)
def wsp(M,T,RB):
T=list(np.rollaxis(T,2,0))
M=list(np.rollaxis(M,2,0))
w= (T[0]*RB[0]+T[1]*RB[1])/(T[0]*M[0]-T[1]*M[1])
return np.nan_to_num(w.con | j()*w)
|
piotrek-golda/CivilHubIndependantCopy | places_core/admin.py | Python | gpl-3.0 | 596 | 0 | # -*- coding: utf-8 -*-
from django.contrib import | admin
from django.utils.translation import ugettext as _
from .models import AbuseReport, SearchTermRecord
admin.site.register(AbuseReport)
class SearchTermAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'ip_address', 'get_user_full_name', )
search_fields = ('term', )
| def get_user_full_name(self, obj):
if obj.user is None:
return "(%s)" % _(u"None")
return obj.user.get_full_name()
get_user_full_name.short_description = "user"
admin.site.register(SearchTermRecord, SearchTermAdmin)
|
eneldoserrata/marcos_openerp | addons/account_periodical_invoicing/periodical_invoicing.py | Python | agpl-3.0 | 23,307 | 0.006608 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2012 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com) All Rights Reserved.
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import openerp.addons.decimal_precision as dp
class agreement(orm.Model):
_name = 'account.periodical_invoicing.agreement'
_inherit = 'mail.thread'
_description = "Periodical invoicing agreement"
def __get_next_term_date(self, date, unit, interval):
"""
Get the date that results on incrementing given date an interval of time in time unit.
@param date: Original date.
@param unit: Interval time unit.
@param interval: Quantity of the time unit.
@rtype: date
@return: The date incremented in 'interval' units of 'unit'.
"""
if unit == 'days':
| return date + timedelta(days = interval)
elif unit == 'weeks':
return date + timedelta(weeks=interval)
elif unit == 'months':
return date + relativedelta(months=interval)
elif unit == 'years':
return date + relativedelta(years=interval)
def __get | _previous_term_date(self, date, unit, interval):
"""
Get the date that results on decrementing given date an interval of time in time unit.
@param date: Original date.
@param unit: Interval time unit.
@param interval: Quantity of the time unit.
@rtype: date
@return: The date decremented in 'interval' units of 'unit'.
"""
if unit == 'days':
return date - timedelta(days = interval)
elif unit == 'weeks':
return date - timedelta(weeks=interval)
elif unit == 'months':
return date - relativedelta(months=interval)
elif unit == 'years':
return date - relativedelta(years=interval)
def __get_next_expiration_date(self, cr, uid, ids, field_name, arg, context=None):
"""
Get next expiration date of the agreement. For unlimited agreements, get max date
"""
if not ids: return {}
res = {}
for agreement in self.browse(cr, uid, ids):
if agreement.prolong == 'fixed':
res[agreement.id] = agreement.end_date
elif agreement.prolong == 'unlimited':
now = datetime.now()
date = self.__get_next_term_date(datetime.strptime(agreement.start_date, "%Y-%m-%d"), agreement.prolong_unit, agreement.prolong_interval)
while (date < now):
date = self.__get_next_term_date(date, agreement.prolong_unit, agreement.prolong_interval)
res[agreement.id] = date
else:
# for renewable fixed term
res[agreement.id] = self.__get_next_term_date(datetime.strptime( \
agreement.last_renovation_date if agreement.last_renovation_date else agreement.start_date, "%Y-%m-%d"), \
agreement.prolong_unit, agreement.prolong_interval)
return res
_columns = {
'name': fields.char('Name', size=100, select=1, required=True, help='Name that helps to identify the agreement'),
'number': fields.char('Agreement number', select=1, size=32, help="Number of agreement. Keep empty to get the number assigned by a sequence."),
'active': fields.boolean('Active', help='Unchecking this field, quotas are not generated'),
'partner_id': fields.many2one('res.partner', 'Customer', select=1, change_default=True, required=True, help="Customer you are making the agreement with"),
'company_id': fields.many2one('res.company', 'Company', required=True, help="Company that signs the agreement"),
'start_date': fields.date('Start date', select=1, help="Beginning of the agreement. Keep empty to use the current date"),
'prolong': fields.selection([('recurrent','Renewable fixed term'),('unlimited','Unlimited term'),('fixed','Fixed term')], 'Prolongation', help="Sets the term of the agreement. 'Renewable fixed term': It sets a fixed term, but with possibility of manual renew; 'Unlimited term': Renew is made automatically; 'Fixed term': The term is fixed and there is no possibility to renew."),
'end_date': fields.date('End date', help="End date of the agreement"),
'prolong_interval': fields.integer('Interval', help="Interval in time units to prolong the agreement until new renewall (that is automatic for unlimited term, manual for renewable fixed term)."),
'prolong_unit': fields.selection([('days','days'),('weeks','weeks'),('months','months'),('years','years')], 'Interval unit', help='Time unit for the prolongation interval'),
'agreement_line': fields.one2many('account.periodical_invoicing.agreement.line', 'agreement_id', 'Agreement lines'),
'invoice_line': fields.one2many('account.periodical_invoicing.agreement.invoice', 'agreement_id', 'Invoice lines', readonly=True),
'renewal_line': fields.one2many('account.periodical_invoicing.agreement.renewal', 'agreement_id', 'Renewal lines', readonly=True),
'last_renovation_date': fields.date('Last renovation date', help="Last date when agreement was renewed (same as start date if not renewed)"),
'next_expiration_date': fields.function(__get_next_expiration_date, string='Next expiration date', type='date', method=True, store=True),
'period_type': fields.selection([('pre-paid', 'Pre-paid'), ('post-paid', 'Post-paid')], "Period type", required=True, help="Period type for invoicing. 'Pre-paid': Invoices are generated for the upcoming period. 'Post-paid': Invoices are generated for the consumed period."),
#TODO: Añadir posibilidad de seguir cuando se genera una factura con _track = {}
'state': fields.selection([('empty', 'Without invoices'), ('invoices', 'With invoices')], 'State', readonly=True),
'renewal_state': fields.selection([('not_renewed', 'Agreement not renewed'), ('renewed', 'Agreement renewed')], 'Renewal state', readonly=True),
'notes': fields.text('Notes'),
}
_defaults = {
'active': True,
'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'account', context=c),
'period_type': 'pre-paid',
'prolong': 'unlimited',
'prolong_interval': 1,
'prolong_unit': 'years',
'state': 'empty',
'renewal_state': 'not_renewed',
}
def _check_dates(self, cr, uid, ids, context=None):
"""
Check correct dates. When prolongation is unlimited or renewal, end_date is False, so doesn't apply
@rtype: boolean
@return: True if dates are correct or don't apply, False otherwise
"""
if context == None: context = {}
agreements = self.browse(cr, uid, ids, context=context)
val = True
for agreement in agreements:
if agreement.end_date: val = val and agreement.end_date > agreement.start_d |
pqtoan/mathics | mathics/core/definitions.py | Python | gpl-3.0 | 25,568 | 0.001252 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
import six
import six.moves.cPickle as pickle
import os
import base64
import re
import bisect
from collections import defaultdict
from mathics.core.expression import Expression, Symbol, String, fully_qualified_symbol_name, strip_context
from mathics.core.characters import letters, letterlikes
names_wildcards = "@*"
base_names_pattern = r'((?![0-9])([0-9${0}{1}{2}])+)'.format(letters, letterlikes, names_wildcards)
full_names_pattern = r'(`?{0}(`{0})*)'.format(base_names_pattern)
def get_file_time(file):
try:
return os.stat(file).st_mtime
except OSError:
return 0
def valuesname(name):
" 'NValues' -> 'n' "
assert name.startswith('System`'), name
if name == 'System`Messages':
return 'messages'
else:
return name[7:-6].lower()
class Definitions(object):
def __init__(self, add_builtin=False, builtin_filename=None):
super(Definitions, self).__init__()
self.builtin = {}
self.user = {}
self.definitions_cache = {}
self.lookup_cache = {}
self.proxy = defaultdict(set)
self.now = 0 # increments whenever something is updated
if add_builtin:
from mathics.builtin import modules, contribute
from mathics.core.evaluation import Evaluation
from mathics.settings import ROOT_DIR
loaded = False
if builtin_filename is not None:
builtin_dates = [get_file_time(module.__file__)
for module in modules]
builtin_time = max(builtin_dates)
if get_file_time(builtin_filename) > builtin_time:
builtin_file = open(builtin_filename, 'rb')
self.builtin = pickle.load(builtin_file)
loaded = True
if not loaded:
contribute(self)
if builtin_filename is not None:
builtin_file = open(builtin_filename, 'wb')
pickle.dump(self.builtin, builtin_file, -1)
for root, dirs, files in os.walk(os.path.join(ROOT_DIR, 'autoload')):
for path in [os.path.join(root, f) for f in files if f.endswith('.m')]:
Expression('Get', String(path)).evaluate(Evaluation(self))
# Move any user definitions created by autoloaded files to
# builtins, and clear out the user definitions list. This
# means that any autoloaded definitions become shared
# between users and no longer disappear after a Quit[].
#
# Autoloads that accidentally define a name in Global`
# could cause confusion, so check for this.
#
for name in self.user:
if name.startswith('Global`'):
raise ValueError("autoload defined %s." % name | )
self.builtin.update(self.user)
self.user = {}
self.clear_cache()
def clear_cache(self, name=None):
# the definitions cache (self.definitions_cache) caches (incomplete and complete) names -> Definition(),
# e.g. "xy" -> d and "MyContext`xy" -> d. we need to clear this cache if a Definition() changes (which
# would happen if a Definition is | combined from a builtin and a user definition and some content in the
# user definition is updated) or if the lookup rules change and we could end up at a completely different
# Definition.
# the lookup cache (self.lookup_cache) caches what lookup_name() does. we only need to update this if some
# change happens that might change the result lookup_name() calculates. we do not need to change it if a
# Definition() changes.
# self.proxy keeps track of all the names we cache. if we need to clear the caches for only one name, e.g.
# 'MySymbol', then we need to be able to look up all the entries that might be related to it, e.g. 'MySymbol',
# 'A`MySymbol', 'C`A`MySymbol', and so on. proxy identifies symbols using their stripped name and thus might
# give us symbols in other contexts that are actually not affected. still, this is a safe solution.
if name is None:
self.definitions_cache = {}
self.lookup_cache = {}
self.proxy = defaultdict(set)
else:
definitions_cache = self.definitions_cache
lookup_cache = self.lookup_cache
tail = strip_context(name)
for k in self.proxy.pop(tail, []):
definitions_cache.pop(k, None)
lookup_cache.pop(k, None)
def clear_definitions_cache(self, name):
definitions_cache = self.definitions_cache
tail = strip_context(name)
for k in self.proxy.pop(tail, []):
definitions_cache.pop(k, None)
def last_changed(self, expr):
# timestamp for the most recently changed part of a given expression.
if isinstance(expr, Symbol):
symb = self.get_definition(expr.get_name(), only_if_exists=True)
if symb is None:
# symbol doesn't exist so it was never changed
return 0
try:
return symb.changed
except AttributeError:
# must be system symbol
symb.changed = 0
return 0
result = 0
head = expr.get_head()
head_changed = self.last_changed(head)
result = max(result, head_changed)
for leaf in expr.get_leaves():
leaf_changed = self.last_changed(leaf)
result = max(result, leaf_changed)
return result
def get_current_context(self):
# It's crucial to specify System` in this get_ownvalue() call,
# otherwise we'll end up back in this function and trigger
# infinite recursion.
context_rule = self.get_ownvalue('System`$Context')
context = context_rule.replace.get_string_value()
assert context is not None, "$Context somehow set to an invalid value"
return context
def get_context_path(self):
context_path_rule = self.get_ownvalue('System`$ContextPath')
context_path = context_path_rule.replace
assert context_path.has_form('System`List', None)
context_path = [c.get_string_value() for c in context_path.leaves]
assert not any([c is None for c in context_path])
return context_path
def set_current_context(self, context):
assert isinstance(context, six.string_types)
self.set_ownvalue('System`$Context', String(context))
self.clear_cache()
def set_context_path(self, context_path):
assert isinstance(context_path, list)
assert all([isinstance(c, six.string_types) for c in context_path])
self.set_ownvalue('System`$ContextPath',
Expression('System`List',
*[String(c) for c in context_path]))
self.clear_cache()
def get_builtin_names(self):
return set(self.builtin)
def get_user_names(self):
return set(self.user)
def get_names(self):
return self.get_builtin_names() | self.get_user_names()
def get_accessible_contexts(self):
"Return the contexts reachable though $Context or $ContextPath."
accessible_ctxts = set(self.get_context_path())
accessible_ctxts.add(self.get_current_context())
return accessible_ctxts
def get_matching_names(self, pattern):
"""
Return a list of the symbol names matching a string pattern.
A pattern containing a context mark (of the form
"ctx_pattern`short_pattern") matches symbols whose context and
short name individually match the two patterns. A pattern
without a context mark matches symbols accessible through
$Context and $ContextPath whose short names match the pattern.
'*' matches any sequence of symbol characters or an empty
|
frankk00/realtor | scripts/batch/live_interactive.py | Python | bsd-3-clause | 828 | 0 | #!/usr/bin/python2.5
#
# Copyright 2009 Roman Nurik
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is dist | ributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permission | s and
# limitations under the License.
"""Local interactive console for a remote app, using the remote API."""
__author__ = 'api.roman.public@gmail.com (Roman Nurik)'
import code
import remote_client
code.interact('App Engine interactive console', None, locals())
|
xmnlab/minilab | labtrans/daq/MultiChannelAnalogInput.py | Python | gpl-3.0 | 2,739 | 0.013874 | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 8 16:11:48 2013
@author: ivan
"""
import numpy
import time
from PyDAQmx.DAQmxFunctions import *
from PyDAQmx.DAQmxConstants import *
terminals = {}
terminals['Dev1'] = ['Dev1/ai%s' % line for line in range(0, 2)]
terminals['Dev2'] = ['Dev2/ai%s' % line for line in range(0, 16)]
class MultiChannelAnalogInput():
"""Class to create an multi-channel analog input
Utilisation: AI = MultiChannelInput(physicalChannel)
physicalChannel: a string or a list of string
optional parameter: limit: tuple or list of tuple, the AI limit values
reset: booleen
Methodes:
read(name), return the value of the input name
readAll(), return a dictionnary name:value
"""
def __init__(self,physicalChannel, limit = None, reset = False):
if type(physicalChannel) == type(""):
self.physicalChannel = [physicalChannel]
else:
self.physicalChannel =physicalChannel
self.numberOfChannel = physicalChannel.__len__()
if limit is None:
self.limit = dict([(name, (-5.0,5.0)) for name in self.physicalChannel])
elif type(limit) == tuple:
s | elf.limit = dict([(name, limit) for name in self.physicalChannel])
else:
self.limit = dict([(name, limit[i]) for i,name in enumerate(self.physicalChannel)])
if reset:
DAQmxResetDevice(physicalChannel[0].split('/')[0] )
def configure(self):
# Create one task handle per Channel
taskHandles = dict([(name,TaskHandle(0)) for name in self.physicalChannel])
for name in self.ph | ysicalChannel:
DAQmxCreateTask("",byref(taskHandles[name]))
DAQmxCreateAIVoltageChan(taskHandles[name],name,"",DAQmx_Val_RSE,
self.limit[name][0],self.limit[name][1],
DAQmx_Val_Volts,None)
self.taskHandles = taskHandles
def readAll(self):
return dict([(name, self.read(name)) for name in self.physicalChannel])
def read(self,name = None):
if name is None:
name = self.physicalChannel[0]
taskHandle = self.taskHandles[name]
DAQmxStartTask(taskHandle)
data = numpy.zeros((1,), dtype=numpy.float64)
# data = AI_data_type()
read = int32()
DAQmxReadAnalogF64(taskHandle,1,10.0,DAQmx_Val_GroupByChannel,data,1,byref(read),None)
DAQmxStopTask(taskHandle)
return data[0], time.time()
if __name__ == '__main__':
multipleAI = MultiChannelAnalogInput(terminals['Dev1'])
multipleAI.configure()
while True:
print multipleAI.readAll()
time.sleep(0.0002) |
infothrill/flask-socketio-dbus-demo | sensors/tdbus_upower.py | Python | mit | 7,190 | 0.002643 | # -*- coding: utf-8 -*-
'''
This module presents a little code to deal with battery status using DBUS and
UPower on Linux
@author: pkremer
'''
import sys
import logging
from six.moves import filter
from functools import partial
import six
import tdbus
# 'constants'
UPOWER_NAME = 'org.freedesktop.UPower'
UPOWER_DEVICE_IFACE = 'org.freedesktop.UPower.Device'
UPOWER_PATH = '/org/freedesktop/UPower'
UPOWER_IFACE = 'org.freedesktop.UPower'
DBUS_PROP_NAME = 'org.freedesktop.DBus.Properties'
log = logging.getLogger(__name__)
def convert_DBUS_to_python(val):
'''
quick hack to convert DBUS types to python types
'''
if isinstance(val, (str, six.text_type,)):
return str(val)
elif isinstance(val, (int,)):
return int(val)
elif isinstance(val, (dict,)):
return convert_DBUSDictionary_to_dict(val)
elif isinstance(val, (list,)):
return convert_DBUSArray_to_tuple(val)
elif isinstance(val, (tuple,)):
return val[1]
elif isinstance(val, (float,)):
return float(val)
else:
raise TypeError("Unknown type '%s': '%r'!" % (str(type(val)),
repr(val)))
def convert_DBUSArray_to_tuple(dbusarray):
return ((convert_DBUS_to_python(val) for val in dbusarray),)
def convert_DBUSDictionary_to_dict(dbusdict):
return {convert_DBUS_to_python(k): convert_DBUS_to_python(dbusdict[k])
for k in dbusdict}
def uPowerEnumerateDevices(conn):
# list all UPower devices:
for device in conn.call_method(UPOWER_PATH, member='EnumerateDevices',
interface=UPOWER_IFACE,
destination=UPOWER_NAME).get_args()[0]:
yield device
def uPowerDeviceGetAll(conn, device):
'''
Utility method that uses the given DBUS connection to call the
UPower.GetAll method on the UPower device specified and returns pure
python data.
:param conn: DBUS connection
:param device: the device
'''
log.debug("uPowerDeviceGetAll %s", device)
return convert_DBUS_to_python(conn.call_method(device,
member='GetAll',
interface=DBUS_PROP_NAME,
destination=UPOWER_NAME,
format='s',
args=(UPOWER_DEVICE_IFACE,)
).get_args()[0])
def uPowerDeviceGet(conn, device, attribute):
log.debug("uPowerDeviceGet %s.%s", device, attribute)
| return convert_DBUS_to_python(
conn.call_method(device,
member='Get',
interface=DBUS_PROP_NAME,
destination=UPOWER_NAME,
format='ss',
| args=(UPOWER_DEVICE_IFACE, attribute)
).get_args()[0]
)
class UPowerDeviceHandler(tdbus.DBusHandler):
def __init__(self, connect, devices):
'''
A DBUS signal handler class for the org.freedesktop.UPower.Device
'Changed' event. To re-read the device data, a DBUS connection is
required. This is established when an event is fired using the provided
connect method.
Essentially, this is a cluttered workaround for a bizarre object design
and use of decorators in the tdbus library.
:param connect: a DBUS system bus connection factory
:param devices: the devices to watch
'''
self.connect = connect
self.device_paths = devices
log.debug('Installing signal handler for devices: %r', devices)
self._observers = {}
super(UPowerDeviceHandler, self).__init__()
def register_observer(self, observer, devices=None):
"""
register a listener function
Parameters
-----------
observer : external listener function
events : tuple or list of relevant events (default=None)
"""
if devices is not None and type(devices) not in (tuple, list):
devices = (devices,)
if observer in self._observers:
log.warning("Observer '%r' already registered, overwriting for "
"devices %r", observer, devices)
self._observers[observer] = devices
def notify_observers(self, device=None, attributes=None):
"""notify observers """
log.debug("%s %r", device, attributes)
for observer, devices in list(self._observers.items()):
#log.debug("trying to notify the observer")
if devices is None or device is None or device in devices:
try:
observer(self, device, attributes)
except (Exception,) as ex: # pylint: disable=W0703
self.unregister_observer(observer)
errmsg = "Exception in message dispatch: Handler '{0}'" + \
" unregistered for device '{1}' ".format(
observer.__class__.__name__, device)
log.error(errmsg, exc_info=ex)
@tdbus.signal_handler(member='Changed', interface=UPOWER_DEVICE_IFACE)
def Changed(self, message):
device = message.get_path()
if device in self.device_paths:
log.debug('signal received: %s, args = %r', message.get_member(),
message.get_args())
conn = self.connect()
self.notify_observers(device, uPowerDeviceGetAll(conn, device))
conn.close()
def connect_dbus_system():
'''
Factory for DBUS system bus connections
'''
return tdbus.SimpleDBusConnection(tdbus.DBUS_BUS_SYSTEM)
def upower_present(connect):
conn = connect()
result = conn.call_method(tdbus.DBUS_PATH_DBUS, 'ListNames',
tdbus.DBUS_INTERFACE_DBUS,
destination=tdbus.DBUS_SERVICE_DBUS)
conn.close()
# see if UPower is in the known services:
return UPOWER_NAME in (name for name in result.get_args()[0]
if not name.startswith(':'))
def ibatteries(conn):
'''
Utility that returns an generator for rechargeable power devices.
:param conn: DBUS system bus connection
'''
def is_rechargeable(conn, device):
log.debug("testing IsRechargeable for '%s'", device)
return uPowerDeviceGet(conn, device, 'IsRechargeable')
return filter(partial(is_rechargeable, conn),
uPowerEnumerateDevices(conn))
def main():
logging.basicConfig(level=logging.DEBUG)
if not upower_present(connect_dbus_system):
raise EnvironmentError("DBUS connection to UPower impossible")
conn = connect_dbus_system()
conn.add_handler(UPowerDeviceHandler(connect_dbus_system,
set(ibatteries(conn))))
conn.subscribe_to_signals()
# basic select() loop, i.e. we assume there is no event loop
conn.dispatch()
if __name__ == '__main__':
sys.exit(main())
|
bundgus/python-playground | matplotlib-playground/examples/pylab_examples/tricontour_demo.py | Python | mit | 4,613 | 0.004552 | """
Contour plots of unstructured triangular grids.
"""
import matplotlib.pyplot as plt
import matplotlib.tri as tri
import numpy as np
import math
# Creating a Triangulation without specifying the triangles results in the
# Delaunay triangulation of the points.
# First create the x and y coordinates of the points.
n_angles = 48
n_radii = 8
min_radius = 0.25
radii = np.linspace(min_radius, 0.95, n_radii)
angles = np.linspace(0, 2*math.pi, n_angles, endpoint=False)
angles = np.repeat(angles[..., np.newaxis], n_radii, axis=1)
angles[:, 1::2] += math.pi/n_angles
x = (radii*np.cos(angles)).flatten()
y = (radii*np.sin(angles)).flatten()
z = (np.cos(radii)*np.cos(angles*3.0)).flatten()
# Create the Triangulation; no triangles so Delaunay triangulation created.
triang = tri.Triangulation(x, y)
# Mask off unwanted triangles.
xmid = x[triang.triangles].mean(axis=1)
ymid = y[triang.triangles].mean(axis=1)
mask = np.where(xmid*xmid + ymid*ymid < min_radius*min_radius, 1, 0)
triang.set_mask(mask)
# pcolor plot.
plt.figure()
plt.gca().set_aspect('equal')
plt.tricontourf(triang, z)
plt.colorbar()
plt.tricontour(triang, z, colors='k')
plt.title('Contour plot of Delaunay triangulation')
# You can specify your own triangulation rather than perform a Delaunay
# triangulation of the points, where each triangle is given by the indices of
# the three points that make up the triangle, ordered in either a clockwise or
# anticlockwise manner.
xy = np.asarray([
[-0.101, 0.872], [-0.080, 0.883], [-0.069, 0.888], [-0.054, 0.890],
[-0.045, 0.897], [-0.057, 0.895], [-0.073, 0.900], [-0.087, 0.898],
[-0.090, 0.904], [-0.069, 0.907], [-0.069, 0.921], [-0.080, 0.919],
[-0.073, 0.928], [-0.052, 0.930], [-0.048, 0.942], [-0.062, 0.949],
[-0.054, 0.958], [-0.069, 0.954], [-0.087, 0.952], [-0.087, 0.959],
[-0.080, 0.966], [-0.085, 0.973], [-0.087, 0.965], [-0.097, 0.965],
[-0.097, 0.975], [-0.092, 0.984], [-0.101, 0.980], [-0.108, 0.980],
[-0.104, 0.987], [-0.102, 0.993], [-0.115, 1.001], [-0.099, 0.996],
[-0.101, 1.007], [-0.090, 1.010], [-0.087, 1.021], [-0.069, 1.021],
[-0.052, 1.022], [-0.052, 1.017], [-0.069, 1.010], [-0.064, 1.005],
[-0.048, 1.005], [-0.031, 1.005], [-0.031, 0.996], [-0.040, 0.987],
[-0.045, 0.980], [-0.052, 0.975], [-0.040, 0.973], [-0.026, 0.968],
[-0.020, 0.954], [-0.006, 0.947], [ 0.003, 0.935], [ 0.006, 0.926],
[ 0.005, 0.921], [ 0.022, 0.923], [ 0.033, 0.912], [ 0.029, 0.905],
[ 0.017, 0.900], [ 0.012, 0.895], [ 0.027, 0.893], [ 0.019, 0.886],
[ 0.001, 0.883], [-0.012, 0.884], [-0.029, 0.883], [-0.038, 0.879],
[-0.057, 0.881], [-0.062, 0.876], [-0.078, 0.876], [-0.087, 0.872],
[-0.030, 0.907], [-0.007, 0.905], [-0.057, 0.916], [-0.025, 0.933],
[-0.077, 0.990], [-0.059, 0.993]])
x = np.degrees(xy[:, 0])
y = np.degrees(xy[:, 1])
x0 = -5
y0 = 52
z = np.exp(-0.01*((x - x0)*(x - x0) + (y - y0)*(y - y0)))
triangles = np.asarray([
[67, 66, 1], [65, 2, 66], [ 1, 66, 2], [64, 2, 65], [63, 3, 64],
[60, 59, 57], [ 2, 64, 3], [ 3, 63, 4], [ 0, 67, 1], [62, 4, 63],
[57, 59, 56], [59, 58, 56], [61, 60, 69], [57, 69, 60], [ 4, 62, 68],
[ 6, 5, 9], [61, 68, 62], [69, 68, 61], [ 9, 5, 70], [ 6, 8, 7],
[ 4, 70, 5], [ 8, 6, 9], [56, 69, 57], [69, 56, 52], [70, 10, 9],
[54, 53, 55], [56, 55, 53], [68, 70, 4], [52, 56, 53], [11, 10, 12],
[69, 71, 68], [68, 13, 70], [10, 70, 13], [51, 50, 52], [13, 68, 71],
[52, 71, 69], [12, 10, 13], [71, 52, 50], [71, 14, 13], [50, 49, 71],
[49, 48, 71], [14, 16, 15], [14, 71, 48], [17, 19, 18], [17, 20, 19],
[48, 16, 14], [48, 47, 16], [47, 46, 16], [16, 46, 45], [23, 22, 24],
[21, 24, 22], [17, 16, 45], [20, 17, 45], [21, 25, 24], [27, 26, 28],
[20, 72, 21], [25, 21, 72], [45, 72, 20], [25, 28, 26], [4 | 4, 73, 45],
[72, 45, 73], [28, 25, 29], [29, 25, 31], [43, 73, 44], [73, 43, 40],
[72, 73, 39], [72, 31, 25], [42, 40, 43], [31, 30, 29], [39, | 73, 40],
[42, 41, 40], [72, 33, 31], [32, 31, 33], [39, 38, 72], [33, 72, 38],
[33, 38, 34], [37, 35, 38], [34, 38, 35], [35, 37, 36]])
# Rather than create a Triangulation object, can simply pass x, y and triangles
# arrays to tripcolor directly. It would be better to use a Triangulation
# object if the same triangulation was to be used more than once to save
# duplicated calculations.
plt.figure()
plt.gca().set_aspect('equal')
plt.tricontourf(x, y, triangles, z)
plt.colorbar()
plt.title('Contour plot of user-specified triangulation')
plt.xlabel('Longitude (degrees)')
plt.ylabel('Latitude (degrees)')
plt.show()
|
philipn/sycamore | Sycamore/i18n/fi.py | Python | gpl-2.0 | 2,372 | 0.038786 | # -*- coding: iso-8859-1 -*-
# Text translations for Suomi (fi).
# Automatically generated - DO NOT EDIT, edit fi.po instead!
meta = {
'language': 'Suomi',
'maintainer': '***vacant***',
'encoding': 'iso-8859-1',
'direction': 'ltr',
}
text = {
'''Create this page''':
'''Luo tämä sivu''',
'''Edit "%(pagename)s"''':
'''Muokkaa "%(pagename)s"''',
'''Reduce editor size''':
'''Pienennä editointi ikkunan kokoa''',
'''Describe %s here.''':
'''Kuvaile %s tässä.''',
'''Check Spelling''':
'''Oikolue''',
'''Save Changes''':
'''Talleta muutokset''',
'''Cancel''':
'''Peruuta''',
'''Preview''':
'''Esikatsele''',
'''Edit was cancelled.''':
'''Muokkaus peruttu.''',
'''Edit''':
'''Muokkaa''',
'''Default''':
'''Oletusarvo''',
'''Name''':
'''Nimi''',
'''Password''':
'''Tunnussana''',
'''Email''':
'''Sähköposti''',
'''Editor size''':
'''Editointikentän koko''',
'''Time zone''':
'''Aikavyöhyke''',
'''Your time is''':
'''Aikasi on''',
'''Server time is''':
'''Palvelimen aika on''',
'''Date format''':
'''Päivämäärän muoto''',
'''General options''':
'''Yleiset Asetukset''',
'''General Information''':
'''Yleiset Tied | ot''',
'''Revision History''':
'''Versiohistoria''',
'''Date''':
'''Päivämäärä''',
'''Size''':
'''Koko''',
'''Editor''':
'''Editori''',
'''Comment''':
'''Huomautus''',
'''view''':
'''näytä''',
'''revert''':
'''palauta''',
'''Show "%(title)s"''':
'''Näytä "%(title)s"''',
'''You are not allowed to revert this page!''':
'''Sinulla ei ole oikeutta palauttaa tätä sivua!''',
'''Python Version''':
'''Python Ve | rsio''',
'''Sycamore Version''':
'''Sycamore Versio''',
'''4Suite Version''':
'''4Suite Versio''',
'''del''':
'''poista''',
'''get''':
'''hae''',
'''edit''':
'''muokkaa''',
'''No attachments stored for %(pagename)s''':
'''Sivulla %(pagename)s ei ole liitteitä''',
'''attachment:%(filename)s of %(pagename)s''':
'''liite:%(filename)s / %(pagename)s''',
'''Page "%s" was successfully deleted!''':
'''Sivu "%s" poistettiin onnistuneesti!''',
'''Really delete this page?''':
'''Haluatko varmasti poistaa tämän sivun?''',
'''Drawing \'%(filename)s\' saved.''':
'''Piirrustus \'%(filename)s\' talletettu.''',
'''Create new drawing "%(filename)s"''':
'''Luo uusi piirrustus "%(filename)s"''',
'''date''':
'''päivämäärä''',
'''Others''':
'''Muut''',
'''Clear message''':
'''Tyhjennä viesti''',
'''Mail sent OK''':
'''Sähköposti lähetetty onnistuneesti''',
}
|
pandas-dev/pandas | pandas/tests/indexes/categorical/test_category.py | Python | bsd-3-clause | 14,639 | 0.000615 | import numpy as np
import pytest
from pandas._libs import index as libindex
from pandas._libs.arrays import NDArrayBacked
import pandas as pd
from pandas import (
Categorical,
CategoricalDtype,
)
import pandas._testing as tm
from pandas.core.indexes.api import (
CategoricalIndex,
Index,
)
from pandas.tests.indexes.common import Base
class TestCategoricalIndex(Base):
_index_cls = CategoricalIndex
@pytest.fixture
def simple_index(self) -> CategoricalIndex:
return self._index_cls(list("aabbca"), categories=list("cab"), ordered=False)
@pytest.fixture
def index(self):
return tm.makeCategoricalIndex(100)
def create_index(self, *, categories=None, ordered=False):
if categories is None:
categories = list("cab")
return CategoricalIndex(list("aabbca"), categories=categories, ordered=ordered)
def test_can_hold_identifiers(self):
idx = self.create_index(categories=list("abcd"))
key = idx[0]
assert idx._can_hold_identifiers_and_holds_name(key) is True
def test_pickle_compat_construction(self):
# Once the deprecation is enforced, we can use the parent class's test
with tm.assert_produces_warning(FutureWarning, match="without passing data"):
self._index_cls()
def test_insert(self, simple_index):
ci = simple_index
categories = ci.categories
# test 0th element
result = ci.insert(0, "a")
expected = CategoricalIndex(list("aaabbca"), categories=categories)
tm.assert_index_equal(result, expected, exact=True)
# test Nth element that follows Python list behavior
result = ci.insert(-1, "a")
expected = CategoricalIndex(list("aabbcaa"), categories=categories)
tm.assert_index_equal(result, expected, exact=True)
# test empty
result = CategoricalIndex([], categories=categories).insert(0, "a")
expected = CategoricalIndex(["a"], categories=categories)
tm.assert_index_equal(result, expected, exact=True)
# invalid -> cast to object
expected = ci.astype(object).insert(0, "d")
result = ci.insert(0, "d")
tm.assert_index_equal(result, expected, exact=True)
# GH 18295 (test missing)
expected = CategoricalIndex(["a", np.nan, "a", "b", "c", "b"])
for na in (np.nan, pd.NaT, None):
result = CategoricalIndex(list("aabcb")).insert(1, na)
tm.assert_index_equal(result, expected)
def test_insert_na_mismatched_dtype(self):
ci = CategoricalIndex([0, 1, 1])
result = ci.insert(0, pd.NaT)
expected = Index([pd.NaT, 0, 1, 1], dtype=object)
tm.assert_index_equal(result, expected)
def test_delete(self, simple_index):
ci = simple_index
categories = ci.categories
result = ci.delete(0)
expected = CategoricalIndex(list("abbca"), categories=categories)
tm.assert_index_equal(result, expected, exact=True)
result = ci.delete(-1)
expected = CategoricalIndex(list("aabbc"), categories=categories)
tm.assert_index_equal(result, expected, exact=True)
with tm.external_error_raised((IndexError, ValueError)):
# Either depending on NumPy version
ci.delete(10)
@pytest.mark.parametrize(
"data, non_lexsorted_data",
[[[1, 2, 3], [9, 0, 1, 2, 3]], [list("abc"), list("fabcd")]],
)
def test_is_monotonic(self, data, non_lexsorted_data):
c = CategoricalIndex(data)
assert c.is_monotonic_increasing is True
assert c.is_monotonic_decreasing is False
c = CategoricalIndex(data, ordered=True)
assert c.is_monotonic_increasing is True
assert c.is_monotonic_decreasing is False
c = CategoricalIndex(data, categories=reversed(data))
assert c.is_monotonic_increasing is False
assert c.is_monotonic_decreasing is True
c = CategoricalIndex(data, categories=reversed(data), ordered=True)
assert c.is_monotonic_increasing is False
assert c.is_monotonic_decreasing is True
# test when data is neither monotonic increasing nor decreasing
reordered_data = [data[0], data[2], data[1]]
c = CategoricalIndex(reordered_data, categories=reversed(data))
assert c.is_monotonic_increasing is False
assert c.is_monotonic_decreasing is False
# non lexsorted categories
categories = non_lexsorted_data
c = CategoricalIndex(categories[:2], categories=categories)
assert c.is_monotonic_increasing is True
assert c.is_monotonic_decreasing is False
c = CategoricalIndex(categories[1:3], categories=categories)
assert c.is_monotonic_increasing is True
assert c.is_monotonic_decreasing is False
def test_has_duplicates(self):
idx = CategoricalIndex([0, 0, 0], name="foo")
assert idx.is_unique is False
assert idx.has_duplicates is True
idx = CategoricalIndex([0, 1], categories=[2, 3], name="foo")
assert idx.is_unique is False
assert idx.has_duplicates is True
idx = CategoricalIndex([0, 1, 2, 3], categories=[1, 2, 3], name="foo")
assert idx.is_unique is True
assert idx.has_duplicates is False
@pytest.mark.parametrize(
"data, categories, expected",
[
(
[1, 1, 1],
[1, 2, 3],
{
"first": np.array([False, True, True]),
"last": np.array([True, True, False]),
False: np.array([True, True, True]),
},
),
(
[1, 1, 1],
list("abc"),
{
"first": np.array([False, True, True]),
"last": np.array([True, True, False]),
False: np.array([True, True, True]),
},
),
(
[2, "a", "b"],
list("abc"),
{
"first": np.zeros(shape=(3), dtype=np.bool_),
"last" | : np.zeros(shape=(3), dtype=np.bool_),
False: np.zeros(shape=(3), dtype=np.bool_),
| },
),
(
list("abb"),
list("abc"),
{
"first": np.array([False, False, True]),
"last": np.array([False, True, False]),
False: np.array([False, True, True]),
},
),
],
)
def test_drop_duplicates(self, data, categories, expected):
idx = CategoricalIndex(data, categories=categories, name="foo")
for keep, e in expected.items():
tm.assert_numpy_array_equal(idx.duplicated(keep=keep), e)
e = idx[~e]
result = idx.drop_duplicates(keep=keep)
tm.assert_index_equal(result, e)
@pytest.mark.parametrize(
"data, categories, expected_data",
[
([1, 1, 1], [1, 2, 3], [1]),
([1, 1, 1], list("abc"), [np.nan]),
([1, 2, "a"], [1, 2, 3], [1, 2, np.nan]),
([2, "a", "b"], list("abc"), [np.nan, "a", "b"]),
],
)
def test_unique(self, data, categories, expected_data, ordered):
dtype = CategoricalDtype(categories, ordered=ordered)
idx = CategoricalIndex(data, dtype=dtype)
expected = CategoricalIndex(expected_data, dtype=dtype)
tm.assert_index_equal(idx.unique(), expected)
def test_repr_roundtrip(self):
ci = CategoricalIndex(["a", "b"], categories=["a", "b"], ordered=True)
str(ci)
tm.assert_index_equal(eval(repr(ci)), ci, exact=True)
# formatting
str(ci)
# long format
# this is not reprable
ci = CategoricalIndex(np.random.randint(0, 5, size=100))
str(ci)
def test_isin(self):
ci = CategoricalIndex(list("aabca") + [np.nan], categories=["c", "a", "b"])
tm.assert_numpy_array_equal(
ci.isin(["c"] |
RNAer/Calour | calour/tests/test_amplicon_experiment.py | Python | bsd-3-clause | 7,382 | 0.001761 | # ----------------------------------------------------------------------------
# Copyright (c) 2016--, Calour development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from unittest import main
from copy import deepcopy
import pandas.testing as pdt
import numpy as np
import numpy.testing as npt
from calour._testing import Tests
import calour as ca
class ExperimentTests(Tests):
def setUp(self):
super().setUp()
self.test1 = ca.read_amplicon(self.test1_biom, self.test1_ | samp,
min_reads=1000, normalize=10000)
def test_filter_by_taxonomy(self):
# default - substring and keep matching
exp = self.test1.filter_by_taxonomy('proteobacteria')
self.assertEqual(exp.shape[1], 2)
self.assertEqual(set(exp.feature_metadata.index), set(self.test1.feature_metadata.index[[2, 3]]))
# check we didn't change the samples
pdt.assert_frame_equal(exp.sample_metadata, self.t | est1.sample_metadata)
# test with list of values and negate
exp = self.test1.filter_by_taxonomy(['Firmicutes', 'proteobacteria'], negate=True)
# should have all these sequences
fids = ['AA', 'AT', 'TT', 'TG', 'GG', 'badfeature']
self.assertListEqual(fids, exp.feature_metadata.index.tolist())
def test_filter_by_fasta(self):
# test keeping the sequences from fasta
exp = self.test1.filter_by_fasta(self.seqs1_fasta)
# test we get only 1 sequence and the correct one
self.assertEqual(len(exp.feature_metadata), 1)
self.assertEqual(exp.shape[1], 1)
self.assertEqual(exp.feature_metadata.index[0], self.test1.feature_metadata.index[5])
# and same number of samples
self.assertEqual(exp.shape[0], self.test1.shape[0])
# and data is ok
data = exp.get_data(sparse=False)
orig_data = self.test1.get_data(sparse=False)
npt.assert_array_equal(data[:, 0], orig_data[:, 5])
# and is not inplace
self.assertIsNot(exp, self.test1)
def test_filter_by_fasta_negate(self):
# test removing sequences from fasta and inplace
orig_exp = deepcopy(self.test1)
exp = self.test1.filter_by_fasta(self.seqs1_fasta, negate=True, inplace=True)
# test we remove only 1 sequence and the correct one
self.assertEqual(len(exp.feature_metadata), orig_exp.shape[1] - 1)
self.assertEqual(exp.shape[1], orig_exp.shape[1] - 1)
self.assertNotIn(orig_exp.feature_metadata.index[5], exp.feature_metadata.index)
# and same number of samples
self.assertEqual(exp.shape[0], orig_exp.shape[0])
# and data is ok
data = exp.get_data(sparse=False)
orig_data = orig_exp.get_data(sparse=False)
okseqs = np.hstack([np.arange(5), np.arange(6, 12)])
npt.assert_array_equal(data, orig_data[:, okseqs])
# and is inplace
self.assertIs(exp, self.test1)
def test_sort_by_taxonomy(self):
obs = self.test1.sort_by_taxonomy()
self.assertListEqual(
['GG', 'badfeature', 'TG', 'AA', 'TT', 'GT', 'TA', 'TC', 'GA', 'AC', 'AG', 'AT'],
obs.feature_metadata.index.tolist())
self.assertListEqual(self.test1.sample_metadata.index.tolist(),
obs.sample_metadata.index.tolist())
def test_filter_orig_reads(self):
obs = self.test1.filter_orig_reads(2900)
self.assertEqual(obs.shape[0], 2)
self.assertIn('S19', obs.sample_metadata.index)
self.assertIn('S20', obs.sample_metadata.index)
self.assertEqual(obs.shape[1], self.test1.shape[1])
def test_collapse_taxonomy_kingdom(self):
res = self.test1.collapse_taxonomy(level=0)
self.assertListEqual(res.feature_metadata['taxonomy'].tolist(),
['k__Bacteria', 'Unknown', 'bad_bacteria'])
# test we did't lose any reads when grouping
npt.assert_array_almost_equal(res.get_data(sparse=False).sum(axis=1), self.test1.get_data(sparse=False).sum(axis=1))
# and all samples are there
npt.assert_equal(res.shape[0], self.test1.shape[0])
def test_collapse_taxonomy_phylum(self):
res = self.test1.collapse_taxonomy(level='phylum')
self.assertCountEqual(res.feature_metadata['taxonomy'].values,
['k__Bacteria; p__Actinobacteria', 'k__Bacteria; p__Firmicutes',
'k__Bacteria; p__Proteobacteria', 'k__Bacteria; p__Tenericutes',
'k__Bacteria; p__Bacteroidetes', 'Unknown;other', 'bad_bacteria;other'])
# test we did't lose any reads when grouping
npt.assert_array_almost_equal(res.get_data(sparse=False).sum(axis=1), self.test1.get_data(sparse=False).sum(axis=1))
# and all samples are there
npt.assert_equal(res.shape[0], self.test1.shape[0])
def test_get_lowest_taxonomy(self):
self.test1.get_lowest_taxonomy(new_field='foo')
exp = ['g__Prevotella',
'g__Mycoplasma',
's__lividum',
's__paradoxus',
'g__Veillonella',
'g__Capnocytophaga',
'g__Actinomyces',
'f__[Mogibacteriaceae]',
's__moorei',
'o__Clostridiales',
'Unknown',
'bad_bacteria']
self.assertListEqual(exp, self.test1.feature_metadata['foo'].tolist())
def test_split_taxonomy(self):
self.test1.split_taxonomy(sep='; ')
k = ['k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'k__Bacteria',
'Unknown',
'bad_bacteria']
self.assertListEqual(k, self.test1.feature_metadata['kingdom'].tolist())
p = ['p__Bacteroidetes',
'p__Tenericutes',
'p__Proteobacteria',
'p__Proteobacteria',
'p__Firmicutes',
'p__Bacteroidetes',
'p__Actinobacteria',
'p__Firmicutes',
'p__Firmicutes',
'p__Firmicutes',
None,
None]
self.assertListEqual(p, self.test1.feature_metadata['phylum'].tolist())
s = ['s__',
's__',
's__lividum',
's__paradoxus',
's__',
's__',
's__',
's__',
's__moorei',
None,
None,
None]
self.assertListEqual(s, self.test1.feature_metadata['species'].tolist())
g = ['g__Prevotella',
'g__Mycoplasma',
'g__Janthinobacterium',
'g__Variovorax',
'g__Veillonella',
'g__Capnocytophaga',
'g__Actinomyces',
'g__',
'g__Bulleidia',
None,
None,
None]
self.assertListEqual(g, self.test1.feature_metadata['genus'].tolist())
if __name__ == "__main__":
main()
|
niwinz/djorm-ext-pgjson | testing/pg_json_fields/forms.py | Python | bsd-3-clause | 169 | 0 | # -*- coding: utf-8 -*-
from django.forms.models import ModelForm
| from .models import IntModel
class IntArrayForm(ModelForm):
class Meta:
model = In | tModel
|
iulian787/spack | var/spack/repos/builtin.mock/packages/dtbuild3/package.py | Python | lgpl-2.1 | 455 | 0 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project D | evelopers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Dtbuild3(Packa | ge):
"""Simple package which acts as a build dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/dtbuild3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
|
imincik/pkg-qgis-1.8 | python/plugins/db_manager/db_plugins/plugin.py | Python | gpl-2.0 | 32,055 | 0.035314 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QuantumGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* | (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from ..db_plugins import createD | bPlugin
from .html_elems import HtmlParagraph, HtmlTable
class BaseError(Exception):
"""Base class for exceptions in the plugin."""
def __init__(self, e):
msg = e if isinstance(e, (str,unicode,QString)) else e.message
try:
msg = unicode( msg )
except UnicodeDecodeError:
msg = unicode( msg, 'utf-8' )
Exception.__init__(self, msg)
def __unicode__(self):
return self.message
def __str__(self):
return unicode(self).encode('utf-8')
class InvalidDataException(BaseError):
pass
class ConnectionError(BaseError):
pass
class DbError(BaseError):
def __init__(self, e, query=None):
BaseError.__init__(self, e)
self.query = unicode( query ) if query != None else None
def __unicode__(self):
if self.query == None:
return BaseError.__unicode__(self)
msg = u"Error:\n%s" % BaseError.__unicode__(self)
if self.query:
msg += u"\n\nQuery:\n%s" % self.query
return msg
class DBPlugin(QObject):
def __init__(self, conn_name, parent=None):
QObject.__init__(self, parent)
self.connName = conn_name
self.db = None
def __del__(self):
pass #print "DBPlugin.__del__", self.connName
def connectionName(self):
return self.connName
def database(self):
return self.db
def info(self):
from .info_model import DatabaseInfo
return DatabaseInfo(None)
def connectToUri(self, uri):
self.db = self.databasesFactory( self, uri )
if self.db:
return True
return False
def reconnect(self):
if self.db is not None:
uri = self.db.uri()
self.db.deleteLater()
self.db = None
return self.connectToUri( uri )
return self.connect( self.parent() )
@classmethod
def icon(self):
return None
@classmethod
def typeName(self):
# return the db typename (e.g. 'postgis')
pass
@classmethod
def typeNameString(self):
# return the db typename string (e.g. 'PostGIS')
pass
@classmethod
def providerName(self):
# return the provider's name (e.g. 'postgres')
pass
@classmethod
def connectionSettingsKey(self):
# return the key used to store the connections in settings
pass
@classmethod
def connections(self):
# get the list of connections
conn_list = []
settings = QSettings()
settings.beginGroup( self.connectionSettingsKey() )
for name in settings.childGroups():
conn_list.append( createDbPlugin(self.typeName(), name) )
settings.endGroup()
return conn_list
def databasesFactory(self, connection, uri):
return None
class DbItemObject(QObject):
def __init__(self, parent=None):
QObject.__init__(self, parent)
def database(self):
return None
def refresh(self):
self.emit( SIGNAL('changed') ) # refresh the item data reading them from the db
def aboutToChange(self):
self.emit( SIGNAL('aboutToChange') )
def info(self):
pass
def runAction(self):
pass
def registerActions(self, mainWindow):
pass
class Database(DbItemObject):
def __init__(self, dbplugin, uri):
DbItemObject.__init__(self, dbplugin)
self.connector = self.connectorsFactory( uri )
def connectorsFactory(self, uri):
return None
def __del__(self):
self.connector = None
pass #print "Database.__del__", self
def connection(self):
return self.parent()
def dbplugin(self):
return self.parent()
def database(self):
return self
def uri(self):
return self.connector.uri()
def publicUri(self):
return self.connector.publicUri()
def info(self):
from .info_model import DatabaseInfo
return DatabaseInfo(self)
def sqlResultModel(self, sql, parent):
from .data_model import SqlResultModel
return SqlResultModel(self, sql, parent)
def toSqlLayer(self, sql, geomCol, uniqueCol, layerName="QueryLayer", layerType=None):
from qgis.core import QgsMapLayer, QgsVectorLayer, QgsRasterLayer
uri = self.uri()
uri.setDataSource("", u"(%s\n)" % sql, geomCol, QString(), uniqueCol)
provider = self.dbplugin().providerName()
if layerType == QgsMapLayer.RasterLayer:
return QgsRasterLayer(uri.uri(), layerName, provider)
return QgsVectorLayer(uri.uri(), layerName, provider)
def registerAllActions(self, mainWindow):
self.registerDatabaseActions(mainWindow)
self.registerSubPluginActions(mainWindow)
def registerSubPluginActions(self, mainWindow):
# load plugins!
try:
exec( u"from .%s.plugins import load" % self.dbplugin().typeName() )
except ImportError:
pass
else:
load(self, mainWindow)
def registerDatabaseActions(self, mainWindow):
action = QAction("&Re-connect", self)
mainWindow.registerAction( action, "&Database", self.reconnectActionSlot )
if self.schemas() != None:
action = QAction("&Create schema", self)
mainWindow.registerAction( action, "&Schema", self.createSchemaActionSlot )
action = QAction("&Delete (empty) schema", self)
mainWindow.registerAction( action, "&Schema", self.deleteSchemaActionSlot )
action = QAction("Delete selected item", self)
mainWindow.registerAction( action, None, self.deleteActionSlot )
action.setShortcuts(QKeySequence.Delete)
action = QAction(QIcon(":/db_manager/actions/create_table"), "&Create table", self)
mainWindow.registerAction( action, "&Table", self.createTableActionSlot )
action = QAction(QIcon(":/db_manager/actions/edit_table"), "&Edit table", self)
mainWindow.registerAction( action, "&Table", self.editTableActionSlot )
action = QAction(QIcon(":/db_manager/actions/del_table"), "&Delete table/view", self)
mainWindow.registerAction( action, "&Table", self.deleteTableActionSlot )
action = QAction("&Empty table", self)
mainWindow.registerAction( action, "&Table", self.emptyTableActionSlot )
if self.schemas() != None:
action = QAction("&Move to schema", self)
action.setMenu( QMenu(mainWindow) )
invoke_callback = lambda: mainWindow.invokeCallback(self.prepareMenuMoveTableToSchemaActionSlot)
QObject.connect( action.menu(), SIGNAL("aboutToShow()"), invoke_callback )
mainWindow.registerAction( action, "&Table" )
def reconnectActionSlot(self, item, action, parent):
db = item.database()
db.connection().reconnect()
db.refresh()
def deleteActionSlot(self, item, action, parent):
if isinstance(item, Schema):
self.deleteSchemaActionSlot(item, action, parent)
elif isinstance(item, Table):
self.deleteTableActionSlot(item, action, parent)
else:
QApplication.restoreOverrideCursor()
QMessageBox.information(parent, "Sorry", "Cannot delete the selected item.")
QApplication.setOverrideCursor(Qt.WaitCursor)
def createSchemaActionSlot(self, item, action, parent):
QApplication.restoreOverrideCursor()
try:
if not isinstance(item, (DBPlugin, Schema, Table)) or item.database() == None:
QMessageBox.information(parent, "Sorry", "No database selected or you are not connected to it.")
return
(schema, ok) = QInputDialog.getText(parent, "New schema", "Enter new schema name")
if not ok:
return
finally:
QApplication.setOverrideCursor(Qt.WaitCursor)
self.createSchema(schema)
def deleteSchemaActionSlot(self, item, action, parent):
QApplication.restoreOverrideCursor()
try:
if not isins |
mjtamlyn/django | tests/template_tests/filter_tests/test_escapejs.py | Python | bsd-3-clause | 2,409 | 0.002906 | from django.template.defaultfilters import escapejs_filter
from django.test import SimpleTestCase
from django.utils.functional import lazy
from ..utils import setup
class EscapejsTests(SimpleTestCase):
@setup({'escapejs01': '{{ a|escapejs }}'})
def test_escapejs01(self):
output = self.engine.render_to_string('escapejs01', {'a': 'testing\r\njavascript \'string" <b>escaping</b>'})
self.assertEqual(ou | tput, 'testing\\u000D\\u000Ajavascript '
'\\u0027string\\u0022 \\u003Cb\\u003E'
'escaping\\u003C/b\\u003E')
@setup({'escapejs02': '{% autoescape off %}{{ a|escapejs }}{% endautoesca | pe %}'})
def test_escapejs02(self):
output = self.engine.render_to_string('escapejs02', {'a': 'testing\r\njavascript \'string" <b>escaping</b>'})
self.assertEqual(output, 'testing\\u000D\\u000Ajavascript '
'\\u0027string\\u0022 \\u003Cb\\u003E'
'escaping\\u003C/b\\u003E')
class FunctionTests(SimpleTestCase):
def test_quotes(self):
self.assertEqual(
escapejs_filter('"double quotes" and \'single quotes\''),
'\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027',
)
def test_backslashes(self):
self.assertEqual(escapejs_filter(r'\ : backslashes, too'), '\\u005C : backslashes, too')
def test_whitespace(self):
self.assertEqual(
escapejs_filter('and lots of whitespace: \r\n\t\v\f\b'),
'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008',
)
def test_script(self):
self.assertEqual(
escapejs_filter(r'<script>and this</script>'),
'\\u003Cscript\\u003Eand this\\u003C/script\\u003E',
)
def test_paragraph_separator(self):
self.assertEqual(
escapejs_filter('paragraph separator:\u2029and line separator:\u2028'),
'paragraph separator:\\u2029and line separator:\\u2028',
)
def test_lazy_string(self):
append_script = lazy(lambda string: r'<script>this</script>' + string, str)
self.assertEqual(
escapejs_filter(append_script('whitespace: \r\n\t\v\f\b')),
'\\u003Cscript\\u003Ethis\\u003C/script\\u003E'
'whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'
)
|
JoaoRodrigues/pdb-tools | tests/test_pdb_chain.py | Python | apache-2.0 | 4,623 | 0.000649 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for `pdb_chain`.
"""
import os
import sys
import unittest
from config import data_dir
from utils import OutputCapture
class TestTool(unittest.TestCase):
"""
Generic class for testing tools.
"""
def setUp(self):
# Dynamically import the module
name = 'pdbtools.pdb_chain'
self.module = __import__(name, fromlist=[''])
def exec_module(self):
"""
Execs module.
"""
with OutputCapture() as output:
try:
self.module.main()
except SystemExit as e:
self.retcode = e.code
self.stdout = output.stdout
self.stderr = output.stderr
return
def test_default(self):
"""$ pdb_chain data/dummy.pdb"""
# Simulate input
sys.argv = ['', os.path.join(data_dir | , 'dummy.pdb')]
# Execute the script
self.exec_module()
# Validate results
self.assertEqual(self.retcode, 0) # ensure the program exited OK.
self.assertEqual(len(self.stdout), 204) # no lines deleted
self.assertEqual(len(self.stderr), 0) # no errors
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
se | lf.assertEqual(unique_chain_ids, [' '])
def test_two_options(self):
"""$ pdb_chain -X data/dummy.pdb"""
sys.argv = ['', '-X', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 0)
self.assertEqual(len(self.stdout), 204)
self.assertEqual(len(self.stderr), 0)
records = (('ATOM', 'HETATM'))
chain_ids = [l[21] for l in self.stdout if l.startswith(records)]
unique_chain_ids = list(set(chain_ids))
self.assertEqual(unique_chain_ids, ['X'])
def test_file_not_found(self):
"""$ pdb_chain -A not_existing.pdb"""
afile = os.path.join(data_dir, 'not_existing.pdb')
sys.argv = ['', '-A', afile]
self.exec_module()
self.assertEqual(self.retcode, 1) # exit code is 1 (error)
self.assertEqual(len(self.stdout), 0) # nothing written to stdout
self.assertEqual(self.stderr[0][:22],
"ERROR!! File not found") # proper error message
def test_file_missing(self):
"""$ pdb_chain -A"""
sys.argv = ['', '-A']
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr[0],
"ERROR!! No data to process!")
def test_helptext(self):
"""$ pdb_chain"""
sys.argv = ['']
self.exec_module()
self.assertEqual(self.retcode, 1) # ensure the program exited gracefully.
self.assertEqual(len(self.stdout), 0) # no output
self.assertEqual(self.stderr, self.module.__doc__.split("\n")[:-1])
def test_invalid_option(self):
"""$ pdb_chain -AH data/dummy.pdb"""
sys.argv = ['', '-AH', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0][:47],
"ERROR!! Chain identifiers must be a single char")
def test_not_an_option(self):
"""$ pdb_chain A data/dummy.pdb"""
sys.argv = ['', 'A', os.path.join(data_dir, 'dummy.pdb')]
self.exec_module()
self.assertEqual(self.retcode, 1)
self.assertEqual(len(self.stdout), 0)
self.assertEqual(self.stderr[0],
"ERROR! First argument is not an option: 'A'")
if __name__ == '__main__':
from config import test_dir
mpath = os.path.abspath(os.path.join(test_dir, '..'))
sys.path.insert(0, mpath) # so we load dev files before any installation
unittest.main()
|
crossroadchurch/paul | openlp/plugins/alerts/lib/alertsmanager.py | Python | gpl-2.0 | 4,307 | 0.002322 | # -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection | #
# ------------------------------------------------------ | --------------------- #
# Copyright (c) 2008-2015 OpenLP Developers #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
The :mod:`~openlp.plugins.alerts.lib.alertsmanager` module contains the part of the plugin which manages storing and
displaying of alerts.
"""
from PyQt4 import QtCore
from openlp.core.common import OpenLPMixin, RegistryMixin, Registry, RegistryProperties, Settings, translate
class AlertsManager(OpenLPMixin, RegistryMixin, QtCore.QObject, RegistryProperties):
"""
AlertsManager manages the settings of Alerts.
"""
def __init__(self, parent):
super(AlertsManager, self).__init__(parent)
self.timer_id = 0
self.alert_list = []
Registry().register_function('live_display_active', self.generate_alert)
Registry().register_function('alerts_text', self.alert_text)
QtCore.QObject.connect(self, QtCore.SIGNAL('alerts_text'), self.alert_text)
def alert_text(self, message):
"""
Called via a alerts_text event. Message is single element array containing text.
:param message: The message text to be displayed
"""
if message:
self.display_alert(message[0])
def display_alert(self, text=''):
"""
Called from the Alert Tab to display an alert.
:param text: The text to display
"""
self.log_debug('display alert called %s' % text)
if text:
self.alert_list.append(text)
if self.timer_id != 0:
self.main_window.show_status_message(
translate('AlertsPlugin.AlertsManager', 'Alert message created and displayed.'))
return
self.main_window.show_status_message('')
self.generate_alert()
def generate_alert(self):
"""
Format and request the Alert and start the timer.
"""
if not self.alert_list or (self.live_controller.display.screens.display_count == 1 and
not Settings().value('core/display on monitor')):
return
text = self.alert_list.pop(0)
alert_tab = self.parent().settings_tab
self.live_controller.display.alert(text, alert_tab.location)
# Check to see if we have a timer running.
if self.timer_id == 0:
self.timer_id = self.startTimer(int(alert_tab.timeout) * 1000)
def timerEvent(self, event):
"""
Time has finished so if our time then request the next Alert if there is one and reset the timer.
:param event: the QT event that has been triggered.
"""
if event.timerId() == self.timer_id:
alert_tab = self.parent().settings_tab
self.live_controller.display.alert('', alert_tab.location)
self.killTimer(self.timer_id)
self.timer_id = 0
self.generate_alert()
|
JoeLaMartina/aima-python | submissions/Karman/vacuum2Runner.py | Python | mit | 6,342 | 0.00678 | import agents as ag
import envgui as gui
# change this line ONLY to refer to your project
import submissions.Karman.vacuum2 as v2
# ______________________________________________________________________________
# Vacuum environment
class Dirt(ag.Thing):
pass
class VacuumEnvironment(ag.XYEnvironment):
"""The environment of [Ex. 2.12]. Agent perceives dirty or clean,
and bump (into obstacle) or not; 2D discrete world of unknown size;
performance measure is 100 for each dirt cleaned, and -1 for
each turn taken."""
def __init__(self, width=4, height=3):
super(VacuumEnvironment, self).__init__(width, height)
self.add_walls()
def thing_classes(self):
return [ag.Wall, Dirt,
# ReflexVacuumAgent, RandomVacuumAgent,
# TableDrivenVacuumAgent, ModelBasedVacuumAgent
]
def percept(self, agent):
"""The percept is a tuple of ('Dirty' or 'Clean', 'Bump' or 'None').
Unlike the TrivialVacuumEnvironment, location is NOT perceived."""
status = ('Dirty' if self.some_things_at(
agent.location, Dirt) else 'Clean')
bump = ('Bump' if agent.bump else'None')
return (bump, status)
def execute_action(self, agent, action):
if action == 'Suck':
dirt_list = self.list_things_at(agent.location, Dirt)
if dirt_list != []:
dirt = dirt_list[0]
agent.performance += 100
self.delete_thing(dirt)
else:
super(VacuumEnvironment, self).execute_action(agent, action)
if action != 'NoOp':
agent.performance -= 1
# # Launch a Text-Based Environment
# print('Two Cells, Agent on Left:')
# v = VacuumEnvironment(4, 3)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (2, 1))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Right:')
# v = VacuumEnvironment(4, 3)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (2, 1))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (2, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Top:')
# v = VacuumEnvironment(3, 4)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (1, 2))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Bottom:')
# v = VacuumEnvironment(3, 4)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (1, 2))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 2))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
def testVacuum(label, w=4, h=3,
dloc=[(1,1),(2,1)],
vloc=(1,1),
limit=6):
print(label)
v = VacuumEnvironment(w, h)
for loc in dloc:
v.add_thing(Dirt(), loc)
a = v2.HW2Agent()
a = ag.TraceAgent(a)
v.add_thing(a, vloc)
t = gui.EnvTUI(v)
t.mapImageNames({
ag.Wall: '#',
Dirt: '@',
ag.Agent: 'V',
})
t.step(0)
t.list_things(Dirt)
t.step(limit)
if len(t.env.get_things(Dirt)) > 0:
t.list_things(Dirt)
else:
print('All clean!')
# Check to continue
if input('Do you want to continue [Y/n]? ') == 'n':
exit(0)
else:
print('---- | ------------------------------------')
testVacuum('Two Cells, Agent on Left:')
testVacuum('Two Cells, Agent on Right:', vloc=(2,1))
testVacuum('Two Cells, Agent on Top:', w=3, h=4,
dloc=[(1,1), (1,2)], vloc=(1,1) )
testVacuum('Two Cells, Agent on Bottom:', w=3, h=4,
| dloc=[(1,1), (1,2)], vloc=(1,2) )
testVacuum('Five Cells, Agent on Left:', w=7, h=3,
dloc=[(2,1), (4,1)], vloc=(1,1), limit=12)
testVacuum('Five Cells, Agent near Right:', w=7, h=3,
dloc=[(2,1), (3,1)], vloc=(4,1), limit=12)
testVacuum('Five Cells, Agent on Top:', w=3, h=7,
dloc=[(1,2), (1,4)], vloc=(1,1), limit=12 )
testVacuum('Five Cells, Agent Near Bottom:', w=3, h=7,
dloc=[(1,2), (1,3)], vloc=(1,4), limit=12 )
testVacuum('5x4 Grid, Agent in Top Left:', w=7, h=6,
dloc=[(1,4), (2,2), (3, 3), (4,1), (5,2)],
vloc=(1,1), limit=46 )
testVacuum('5x4 Grid, Agent near Bottom Right:', w=7, h=6,
dloc=[(1,3), (2,2), (3, 4), (4,1), (5,2)],
vloc=(4, 3), limit=46 )
v = VacuumEnvironment(6, 3)
a = v2.HW2Agent()
a = ag.TraceAgent(a)
loc = v.random_location_inbounds()
v.add_thing(a, location=loc)
v.scatter_things(Dirt)
g = gui.EnvGUI(v, 'Vaccuum')
c = g.getCanvas()
c.mapImageNames({
ag.Wall: 'images/wall.jpg',
# Floor: 'images/floor.png',
Dirt: 'images/dirt.png',
ag.Agent: 'images/vacuum.png',
})
c.update()
g.mainloop() |
google/clusterfuzz | src/clusterfuzz/_internal/bot/untrusted_runner/remote_process.py | Python | apache-2.0 | 3,341 | 0.010177 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remote process implementation."""
from clusterfuzz._internal.metrics import logs
from clusterfuzz._internal.protos import untrusted_runner_pb2
from clusterfuzz._internal.system import new_process
from clusterfuzz._internal.system import process_handler
from . import protobuf_utils
def process_result_to_proto(process_result):
"""Convert new_process.ProcessResult to proto."""
process_result_proto = untrusted_runner_pb2.ProcessResult(
return_code=process_result.return_code,
output=process_result.output,
time_executed=process_result.time_executed,
timed_out=process_result.timed_out)
process_result_proto.command.extend(process_result.command)
return process_result_proto
def run_and_wait(request, _):
"""Implementation of RunAndWait."""
process_runner = new_process.ProcessRunner(request.executable_path,
request.default_args)
args = {}
protobuf_utils.get_protobuf_field(args, request.popen_args, 'bufsize')
protobuf_utils.get_protobuf_field(args, request.popen_args, 'executable')
protobuf_utils.get_protobuf_field(args, request.popen_args, 'shell')
protobuf_utils.get_protobuf_field(args, request.popen_args, 'cwd')
if request.popen_args.env_is_set:
args['env'] = request.popen_args.env
else:
args['env'] = None
args['additional_args'] = request.additional_args
protobuf_utils.get_protobuf_field(args, request, 'timeout')
protobuf_utils.get_protobuf_field(args, request, 'terminate_before_kill')
protobuf_utils.get_protobuf_field(args, request, 'terminate_wait_time')
protobuf_utils.get_protobuf_field(args, request, 'input_data')
protobuf_utils.get_protobuf_field(args, request, 'max_stdout_len')
logs.log('Running command: %s' % process_runner.get_command())
return untrusted_runner_pb2.RunAndWaitResponse(
result=process_result_to_proto(process_runner.run_and_wait(**args)))
def run_process(request, _):
"""Implementation of RunProcess."""
args = {}
protobuf_utils.get_protobuf_field(args, request, 'cmdline')
protobuf_utils.get_protobuf_field(args, request, 'current_working_directory')
protobuf_utils.get_protobuf_field(args, request, 'timeout')
protobuf_utils.get_protobuf_field(args, request, 'need_shell')
if request.gestures:
args['gestures'] = request.gestures
if requ | est.env_copy:
args['env_copy'] = request.env_copy
protobuf_utils.get_protobuf_field(args, request, 'testcase_run')
protobuf_utils.get_protobuf_field(args, request, 'ignore_children')
return_code, execution_time, output = process_handler.run_process(**args)
response = untrusted_runner_pb2.RunProcessResponse(
return_code=return_code, execution_time=execution_time, output | =output)
return response
|
plotly/python-api | packages/python/plotly/plotly/graph_objs/layout/yaxis/title/_font.py | Python | mit | 8,542 | 0.000585 | from plotly.basedatatypes import BaseLayoutHierarchyType as _BaseLayoutHierarchyType
import copy as _copy
class Font(_BaseLayoutHierarchyType):
# class properties
# --------------------
_parent_path_str = "layout.yaxis.title"
_path_str = "layout.yaxis.title.font"
_valid_props = {"color", "family", "size"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which | it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cl | oud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Font object
Sets this axis' title font. Note that the title's font used to
be customized by the now deprecated `titlefont` attribute.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.layout.yaxis.title.Font`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Font
"""
super(Font, self).__init__("font")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.layout.yaxis.title.Font
constructor must be a dict or
an instance of :class:`plotly.graph_objs.layout.yaxis.title.Font`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["fam |
SINGROUP/pycp2k | pycp2k/classes/_qm_non_adaptive1.py | Python | lgpl-3.0 | 686 | 0.001458 | from pycp2k.inputsection import InputSection
from ._qm_kind1 import _qm_kind1
class _qm_non_adaptive1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.QM_KIND_list = []
self._name = "QM_NON_ADAPTIVE"
self._repeated_subsections = {'QM_KIND': '_qm_kind1'}
self._attributes = ['QM_KIND_list']
def QM_KIND_add(self, section_pa | rameters=None) | :
new_section = _qm_kind1()
if section_parameters is not None:
if hasattr(new_section, 'Section_parameters'):
new_section.Section_parameters = section_parameters
self.QM_KIND_list.append(new_section)
return new_section
|
hpcloud-mon/monasca-perf | influx_test/influxparawrite.py | Python | apache-2.0 | 1,308 | 0.017584 | from multiprocessing import Process
import time
import sys
count = 1000 #Note!!!!!: This needs to be a multiple of env.numThreads, else tests will likely fail with the wrong count(s)
actionWaitTime = 5
class InfluxParaWrite(object):
def __init__(self,env):
self.env = env
self.count = count
if self.count % self | .env.numThreads != 0:
print "influxparawrite.count is not a multiple of env.numThreads"
sys.exit(1)
self.actionWaitTime = actionWaitTime
def start(self,write_node,action_node,action,tsname):
self.write_node = write_node
self.action_node = action_node
self.action = action
self.tsname | = tsname
p_write = Process(target=self.doWrites,args=(write_node,tsname))
p_write.start()
p_action = Process(target=self.doAction, args=(action_node,action))
p_action.start()
p_action.join()
p_write.join()
def doAction(self,node,action):
if len(action) == 0: return
time.sleep(self.actionWaitTime)
method = getattr(self.env,action)
if not method:
raise Exception("Method %s not implemented" % action)
method(node)
def doWrites(self,node,tsname):
self.env.sendMultipleMetrics(node,tsname,self.count)
|
dana-i2cat/felix | optin_manager/src/python/openflow/common/permissions/tests/views.py | Python | apache-2.0 | 3,108 | 0.002574 | '''
Created on Jun 8, 2010
Contains views for permissions tests
@author: jnaous
'''
from django.shortcuts import get_object_or_404
from models import PermissionTestClass
from ..decorators import require_objs_permissions_for_view
from ..utils import get_user_from_req, get_queryset
from django.http import HttpResponse, HttpResponseRedirect
from django.views.generic import create_update
from django.core.urlresolvers import reverse
from expedient.common.permissions.utils import give_permission_to,\
get_queryset_from_class
from django.contrib.csrf.middleware import csrf_exempt
@csrf_exempt
@require_objs_permissions_for_view(
["can_get_x2", "can_read_val"],
get_user_from_req,
get_queryset(PermissionTestClass, "obj_id"),
)
def test_view_x2(request, obj_id=None):
obj = get_object_or_404(PermissionTestClass, pk=obj_id)
return HttpResponse("%s" % obj.get_val_x2(user_kw=request.user))
@require_objs_permissions_for_view(
["can_add"],
get_user_from_req,
get_queryset_from_class(PermissionTestClass),
["POST"],
)
def test_view_create(request):
return create_update.create_object(
request, PermissionTestClass,
template_name="permissions/empty.html",
post_save_redirect=reverse("test_view_crud"),
)
def test_protected_url(request):
return HttpResponse("Worked")
@require_objs_permissions_for_view(
["can_set_val"],
get_user_from_req,
get_queryset(PermissionTestClass, 'obj_id'),
["POST"],
)
@require_objs_permissions_for_view(
["can_read_val"],
get_user_from_req,
get_queryset(PermissionTestClass, 'obj_id'),
["GET"],
)
def test_view_update(request, obj_id=None):
return create_update.update_object(
request, PermissionTestClass,
object_id=obj_id,
template_name="permissions/empty.html",
post_save_redirect=reverse("test_view_update",
kwargs=dict(obj_id=obj_id)),
)
@csrf_exempt
def add_perms_view(request, permission, user, target, redirect_to=None):
if request.method == "POST":
give_permission_to(user, permission, target)
redirect_to = redirect_to or reverse("test_view_crud")
return HttpResponseRedirect(redirect_to)
else:
return HttpResponse(
"""
Do you want to get permissions to create PermissionTestClass instances?
<form action="" method="POST">
<input type="submit" value="Yes" />
<input type="button" value="No" onclick="document.location='%s'" />
</form>
""" % reverse("test_view_crud"))
@csrf_exempt
def other_perms_view(request, permission, user, target, redirect_to=None):
if request.method == "POS | T":
give_permission_to(user, permission, target)
redirect_to = redirect | _to or reverse("test_view_crud")
return HttpResponseRedirect(redirect_to)
else:
return HttpResponse(
"""
Do you want to get %s permission for obj %s?
<form action="" method="POST">
<input type="submit" value="Yes" />
<input type="button" value="No" onclick="document.location='%s'" />
</form>
""" % (permission.name, target, reverse("test_view_crud")))
|
rentalita/django-layoutdemo | src/python/layoutdemo/default/urls.py | Python | mit | 276 | 0.003623 | # -*- coding: utf-8 -*-
from djan | go.conf.urls.defaults import patterns, url
from . import tasks
from . import views
urlpatterns = patterns('',
url(r'^$', views.index | , name='layoutdemo_index'),
)
# Local Variables:
# indent-tabs-mode: nil
# End:
# vim: ai et sw=4 ts=4
|
unicef/un-partner-portal | backend/unpp_api/apps/externals/urls.py | Python | apache-2.0 | 707 | 0.002829 | from django.conf.urls import url
from externals.views import (
PartnerVendorNumberAPIView,
PartnerExternalDetailsAPIView,
PartnerBasicInfoAPIView,
)
urlpatterns = [
url(r'^vendor-number/partner/$', PartnerVendorNumberAPIView.as_view(), name="vendor-number-create"),
url(r'^vendor-number/partner/(?P<pk>\d+)/$', PartnerVendorNumberAPIView.as_view(), name="vendor-number-details"),
url(
| r'^partner-details/(?P<agency_id>\d+)/(?P<partner_id>\d+)/$',
PartnerExternalDetailsAPIView.as_view(),
| name="partner-external-details"
),
url(
r'^partner-basic-info/$',
PartnerBasicInfoAPIView.as_view(),
name="partner-basic-info"
),
]
|
rushter/MLAlgorithms | examples/nnet_rnn_binary_add.py | Python | mit | 2,518 | 0.001589 | import logging
from itertools import combinations, islice
import numpy as np
try:
from sklearn.model_selection import train_test_split
except ImportError:
from sklearn.cross_validation import train_test_split
from mla.metrics import accuracy
from mla.neuralnet import NeuralNet
from mla.neuralnet.layers import Activation, TimeDistributedDense
from mla.neuralnet.layers.recurrent import LSTM
from mla.neuralnet.optimizers import Adam
logging.basicConfig(level=logging.DEBUG)
def addition_dataset(dim=10, n_samples=10000, batch_size=64):
"""Generate binary addition dataset.
http://devankuleindiren.com/Projects/rnn_arithmetic.php
"""
binary_format = "{:0" + str(dim) + "b}"
# Generate all possible number combinations
combs = list(islice(combinations(range(2 ** (dim - 1)), 2), n_samples))
# Initialize empty arrays
X = np.zeros((len(combs), dim, 2), dtype=np.uint8)
y = np.zeros((len(combs), dim, 1), dtype=np.uint8)
for i, (a, b) in enumerate(combs):
# Convert numbers to binary format
X[i, :, 0] = list(reversed([int(x) for x in binary_format.format(a)]))
X[i, :, 1] = list(reversed([int(x) for x in binary_format.format(b)]))
# Generate target variable (a+b)
y[i, :, 0] = list(reversed([int(x) for x in binary_format.format(a + b)]))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1111)
# Round number of examples for batch processing
train_b = (X_train.shape[0] // batch_size) * batch_size
test_b = (X_test.shape[0] // batch_size) * batch_size
X_train = X_train[0:train_b]
y_train = y_train[0:train_b]
X_test = X_test[0:test_b]
| y_test = y_test[0:test_b]
return X_train, X_test, y_train, y_test
def addition_problem(ReccurentLayer):
X_train, X_test, y_train, y_test = addition_dataset(8, 5000)
print(X_train.shape, X_test.shape)
model = NeuralNet(
layers=[ReccurentLayer, TimeDistributedDense(1), Activation("sigmoid")],
loss="mse",
optimizer=Adam(),
metric="mse",
batch_size=64,
max_epochs=15,
)
model.fit(X_train, y_tr | ain)
predictions = np.round(model.predict(X_test))
predictions = np.packbits(predictions.astype(np.uint8))
y_test = np.packbits(y_test.astype(np.int))
print(accuracy(y_test, predictions))
# RNN
# addition_problem(RNN(16, parameters=Parameters(constraints={'W': SmallNorm(), 'U': SmallNorm()})))
# LSTM
addition_problem(LSTM(16))
|
hypernicon/pyec | pyec/tests/test_history.py | Python | mit | 1,736 | 0.011521 | """
Copyright (C) 2012 Alan J Lockett
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR P | URPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE | OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import numpy as np
from pyec.config import Config
import unittest
class TestHistory(unittest.TestCase):
def test_base(self):
cfg = Config()
history = History(cfg)
self.assertEqual(history.evals, 0)
self.assertEqual(history.printEvery, 1000000000000L)
self.assertEqual(history.updates, 0)
self.assertEqual(history.minScore, np.inf)
self.assertTrue(history.minSolution is None)
self.assertEqual(history.maxScore, -np.inf)
self.assertTrue(history.maxSolution is None)
self.assertTrue(history.empty())
self.assertTrue(history.config is cfg)
self.assertTrue(history.cache is not None)
|
mibitzi/pwm | test/test_workspaces.py | Python | mit | 8,632 | 0 | # Copyright (c) 2013 Michael Bitzi
# Licensed under the MIT license http://opensource.org/licenses/MIT
import unittest
from unittest.mock import create_autospec, patch
from pwm.ffi.xcb import xcb
import pwm.bar
import pwm.workspaces
import pwm.windows
from pwm.config import config
import test.util as util
class TestWorkspace(unittest.TestCase):
def setUp(self):
util.setup()
self.workspace = pwm.workspaces.current()
self.workspace.tiling = create_autospec(self.workspace.tiling)
self.workspace.floating = create_autospec(self.workspace.floating)
self.workspace.fullscreen = create_autospec(self.workspace.fullscreen)
self.tiling = self.workspace.tiling
self.tiling.path.return_value = (0, 0)
self.floating = self.workspace.floating
self.fullscreen = self.workspace.fullscreen
def tearDown(self):
util.tear_down()
def test_geometry(self):
self.assertEqual(self.workspace.x, 0)
self.assertEqual(self.workspace.y, pwm.bar.primary.height)
self.assertEqual(self.workspace.width, xcb.screen.width_in_pixels)
self.assertEqual(
self.workspace.height,
xcb.screen.height_in_pixels - pwm.bar.primary.height)
def test_add_window_fullscreen(self):
wid = util.create_window(manage=False, fullscreen=True)
self.workspace.add_window(wid)
self.fullscreen.add_window.assert_called_once_with(wid)
def test_add_window_floating(self):
wid = util.create_window(floating=True)
self.floating.add_window.assert_called_once_with(wid)
def test_add_window_tiling_empty(self):
window = util.create_window()
self.tiling.add_window.assert_called_once_with(window, 0, -1)
def test_add_window_tiling_below_focus(self):
wid = util.create_window()
util.create_window(floating=True)
self.tiling.add_window.reset_mock()
self.tiling.windows = [wid]
with patch.object(self.tiling, "path", return_value=(1, 2)) as path:
window = util.create_window()
path.assert_called_once_with(wid)
self.tiling.add_window.assert_called_once_with(window, 1, 3)
def test_add_window_added(self):
window = util.create_window()
self.assertIn(window, self.workspace.windows)
def test_add_window_map_if_current(self):
window = util.create_window()
self.assertTrue(pwm.windows.is_mapped(window))
def test_add_window_dont_map_if_not_current(self):
window = util.create_window(manage=False)
pwm.workspaces.switch(1)
self.workspace.add_window(window)
self.assertFalse(pwm.windows.is_mapped(window))
def test_remove_window(self):
win = util.create_window(manage=False)
self.workspace.add_window(win)
self.workspace.remove_window(win)
self.assertNotIn(win, self.workspace.windows)
def test_show(self):
wid = util.create_window()
self.workspace.hide()
self.workspace.show()
self.assertTrue(pwm.windows.is_mapped(wid))
def test_hide(self):
wid = util.create_window()
self.workspace.hide()
self.assertFalse(pwm.windows.is_mapped(wid))
def test_hide_ignore_unmaps(self):
wid = util.create_window()
self.workspace.hide()
self.assertEqual(pwm.windows.managed[wid].ignore_unmaps, 1)
def test_top_focus_priority(self):
wid1 = util.create_window()
wid2 = util.create_window()
wid3 = util.create_window()
self.workspace.handle_focus(wid1)
self.workspace.handle_focus(wid2)
self.workspace.handle_focus(wid3)
self.assertEqual(self.workspace.top_focus_priority(), wid3)
self.workspace.handle_focus(wid2)
self.assertEqual(self.workspace.top_focus_priority(), wid2)
def test_handle_focus(self):
wid1 = util.create_window()
wid2 = util.create_window()
wid3 = util.create_window()
self.workspace.handle_focus(wid1)
self.assertEqual(self.workspace.windows, [wid2, wid3, wid1])
self.workspace.handle_focus(wid3)
self.assertEqual(self.workspace.windows, [wid2, wid1, wid3])
def test_toggle_floating_floating(self):
wid = util.create_window(floating=True)
self.workspace.toggle_floating(wid)
self.tiling.add_window.assert_called_once_with(wid)
def test_toggle_floating_tiling(self):
wid = util.create_window()
self.workspace.toggle_floating(wid)
self.floating.add_window.assert_called_once_with(wid)
def test_toggle_focus_layer(self):
wid_float = util.create_window(floating=True)
util.create_window()
with patch.object(pwm.windows, "focus") as focus:
self.workspace.toggle_focus_layer()
focus.assert_called_once_with(wid_float)
def test_toggle_fullscreen_add(self):
wid = util.create_window()
with patch.object(self.workspace, "add_fullscreen") as add:
self.workspace.toggle_fullscreen(wid)
add.assert_called_once_with(wid)
def test_toggle_fullscreen_remove(self):
wid = util.create_window()
pwm.windows.managed[wid].fullscreen = True
with patch.object(self.workspace, "remove_fullscreen") as rem:
self.workspace.toggle_fullscreen(wid)
rem.assert_called_once_with(wid)
def test_add_fullscreen(self):
wid = util.create_window()
self.workspace.add_fullscreen(wid)
self.fullscreen.add_window.assert_called_once_with(wid)
def test_add_fullscreen_remove(self):
wid = util.create_window(floating=True)
with patch.object(self.workspace, "_proxy_layout") as proxy:
self.workspace.add_fullscreen(wid)
proxy.assert_called_once_with("remove_window", wid)
def test_remove_fullscreen(self):
wid = util.create_window(fullscreen=True)
self.workspace.remove_fullscreen(wid)
self.fullscreen.remove_window.assert_called_once_with(wid)
def test_remove_fullscreen_add_tiling(self):
wid = util.create_window(fullscreen=True)
self.tiling.reset_mock()
self.workspace.remove_fullscreen(wid)
self.tiling.add_window.assert_called_once_with(wid)
def test_remove_fullscreen_add_floating(self):
wid = util.create_window(floating=True, fullscreen=True)
self.floating.reset_mock()
self.workspace.remove_fullscreen(wid)
self.floating.add_window.assert_called_once_with(wid)
def test_is_urgent(self):
wid = util.create_window()
pwm.windows.managed[wid].urgent = True
self.assertTrue(self.workspace.is_urgent())
class TestWorkspaces(unittest.TestCase):
def setUp(self):
util.setup()
def tearDown(self):
util.tear_down()
def test_setup(self):
# setup() was already called in setUp
self.assertEqual(len(pwm.workspaces.workspaces), config.workspaces)
self.assertEqual(pwm.workspaces.current(),
pwm.workspaces.workspaces[0])
def test_destroy(self):
pwm.workspaces.destroy()
self.assertEqual(len(pwm.workspaces.workspaces | ), 0)
def test_switch(self):
pwm.workspaces.switch(1)
self.assertEqual(pwm.workspaces.current(),
| pwm.workspaces.workspaces[1])
def test_switch_focus(self):
util.create_window()
pwm.workspaces.switch(1)
self.assertEqual(pwm.windows.focused, None)
def test_opened(self):
# Create window on current workspace (idx=0)
util.create_window()
pwm.workspaces.switch(5)
active = [i for i in pwm.workspaces.opened()]
self.assertEqual(len(active), 2)
self.assertEqual(active[0][0], 0)
self.assertEqual(active[1][0], 5)
def test_send_window_to(self):
wid = util.create_window()
pwm.workspaces.send_window_to(wid, 1)
self.assertIn(wid, pwm.workspaces.workspaces[1].windows)
def test_send_window_to_ignore_unmap(self):
wid = util.create_window()
|
vgonisanz/piperoScripts | python2/usage/usage-withconf-sample.py | Python | mit | 803 | 0.011208 | from configur | ation import *
import argparse
def print_configuration_vars():
"This prints confi | guration variables"
print "This prints configuration variables:"
print "-------------------------------------"
print "var_foo value is: " + var_foo
print "var_var value is: " + var_var
print "var_home value is: " + var_home
return
parser = argparse.ArgumentParser(description="Load easy configuration")
group = parser.add_mutually_exclusive_group()
group.add_argument("-v", "--verbose", action="store_true")
group.add_argument("-q", "--quiet", action="store_true")
args = parser.parse_args()
if args.quiet:
print "quiet = nothing to say"
elif args.verbose:
print "\nThis sample talk a lot....\nOk, lets go\n"
print_configuration_vars()
else:
print_configuration_vars()
|
drdoctr/doctr | doctr/_version.py | Python | mit | 15,755 | 0 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.15 (https://github.com/warner/python-versioneer)
import errno
import os
import re
import subprocess
import sys
def get_keywords():
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
pass
def get_config():
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = ""
cfg.versionfile_source = "doctr/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
pass
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
def decorate(f):
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
| if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, | this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX |
youtube/cobalt | build/android/test_wrapper/logdog_wrapper.py | Python | bsd-3-clause | 4,633 | 0.00885 | #!/usr/bin/env vpython
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper for adding logdog streaming support to swarming tasks."""
import argparse
import contextlib
import logging
import os
import signal
import subprocess
import sys
_SRC_PATH = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', '..'))
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'devil'))
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'catapult', 'common',
'py_utils'))
from devil.utils import signal_handler
from devil.utils import timeout_retry
from py_utils import tempfile_ext
PROJECT = 'chromium'
OUTPUT = 'logdog'
COORDINATOR_HOST = 'luci-logdog.appspot.com'
SERVICE_ACCOUNT_JSON = ('/creds/service_accounts'
'/service-account-luci-logdog-publisher.json')
LOGDOG_TERMINATION_TIMEOUT = 30
def CommandParser():
# Parses the command line arguments being passed in
parser = argparse.ArgumentParser()
wrapped = parser.add_mutually_exclusive_group()
wrapped.add_argument(
'--target',
help='The test target to be run. If neither target nor script are set,'
' any extra args passed to this script are assumed to be the'
' full test command to run.')
wrapped.add_argument(
'--script',
help='The script target to be run. If neither target nor script are set,'
' any extra args passed to this script are assumed to be the'
' full test command to run.')
parser.add_argument('--logdog-bin-cmd', required=True,
help='The logdog bin cmd.')
return parser
def CreateStopTestsMethod(proc):
def StopTests(signum, _frame):
logging.error('Forwarding signal %s to test process', str(signum))
proc.send_signal(signum)
return StopTests
@contextlib.contextmanager
def NoLeakingProcesses(popen):
try:
yield popen
finally:
if popen is not None:
try:
if popen.poll() is None:
popen.kill()
except OSError:
logging.warning('Failed to kill %s. Process may be leaked.',
str(popen.pid))
def main():
parser = CommandParser()
args, extra_cmd_args = parser.parse_known_args(sys.argv[1:])
logging.basicConfig(level=logging.INFO)
if args.target:
test_cmd = [os.path.join('bin', 'run_%s' % args.target), '-v']
test_cmd += extra_cmd_args
elif args.script:
test_cmd = [args.script]
test_cmd += extra_cmd | _args
else:
test_cmd = extra_cm | d_args
test_env = dict(os.environ)
logdog_cmd = []
with tempfile_ext.NamedTemporaryDirectory(
prefix='tmp_android_logdog_wrapper') as temp_directory:
if not os.path.exists(args.logdog_bin_cmd):
logging.error(
'Logdog binary %s unavailable. Unable to create logdog client',
args.logdog_bin_cmd)
else:
streamserver_uri = 'unix:%s' % os.path.join(temp_directory,
'butler.sock')
prefix = os.path.join('android', 'swarming', 'logcats',
os.environ.get('SWARMING_TASK_ID'))
logdog_cmd = [
args.logdog_bin_cmd,
'-project', PROJECT,
'-output', OUTPUT,
'-prefix', prefix,
'--service-account-json', SERVICE_ACCOUNT_JSON,
'-coordinator-host', COORDINATOR_HOST,
'serve',
'-streamserver-uri', streamserver_uri]
test_env.update({
'LOGDOG_STREAM_PROJECT': PROJECT,
'LOGDOG_STREAM_PREFIX': prefix,
'LOGDOG_STREAM_SERVER_PATH': streamserver_uri,
'LOGDOG_COORDINATOR_HOST': COORDINATOR_HOST,
})
logdog_proc = None
if logdog_cmd:
logdog_proc = subprocess.Popen(logdog_cmd)
with NoLeakingProcesses(logdog_proc):
with NoLeakingProcesses(
subprocess.Popen(test_cmd, env=test_env)) as test_proc:
with signal_handler.SignalHandler(signal.SIGTERM,
CreateStopTestsMethod(test_proc)):
result = test_proc.wait()
if logdog_proc:
def logdog_stopped():
return logdog_proc.poll() is not None
logdog_proc.terminate()
timeout_retry.WaitFor(logdog_stopped, wait_period=1,
max_tries=LOGDOG_TERMINATION_TIMEOUT)
# If logdog_proc hasn't finished by this point, allow
# NoLeakingProcesses to kill it.
return result
if __name__ == '__main__':
sys.exit(main())
|
CospanDesign/nysa-gui | NysaGui/common/nysa_bus_view/wishbone_controller.py | Python | gpl-2.0 | 21,215 | 0.009522 | # -*- coding: utf-8 -*-
# Distributed under the MIT licesnse.
# Copyright (c) 2013 Dave McCoy (dave.mccoy@cospandesign.com)
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
#of the Software, and to permit persons to whom the Software is furnished to do
#so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
'''
Log
6/10/2013: Initial commit
'''
import sys
import os
import sys
import json
import copy
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import controller
from defines import PS_COLOR
from defines import MS_COLOR
from host_interface import HostInterface
from master import Master
from peripheral_bus import PeripheralBus
from memory_bus import MemoryBus
from peripheral_slave import PeripheralSlave
from memory_slave import MemorySlave
from nysa.ibuilder.lib import utils
from nysa.ibuilder.lib import verilog_utils as vutils
from nysa.ibuilder.lib import wishbone_utils
from nysa.ibuilder.lib import ibuilder_error
from NysaGui.common.graph.graph_manager import NodeType
from NysaGui.common.graph.graph_manager import SlaveType
from .wishbone_model import WishboneModel
class WishboneController (controller.Controller):
def __init__(self, config_dict, scene, status):
self.s = status
self.model = WishboneModel(config_dict)
self.scene = scene
super(WishboneController, self).__init__(self.model, status)
self.s.Debug( "Wishbone controller started")
self.bus = "wishbone"
if "INTERFACE" not in config_dict. | keys():
self.model.set_default_board_project(config_dict["board"])
else:
self.model.load | _config_dict(config_dict)
#self.model.initialize_graph(debug = True)
self.model.initialize_graph(debug = False)
#self.initialize_view()
def initialize_view(self):
self.s.Debug( "Add Master")
m = Master(scene = self.scene)
self.boxes["master"] = m
self.s.Debug( "Add Host Interface")
hi_name = self.model.get_host_interface_name()
hi = HostInterface(self.scene,
hi_name)
hi.link_master(m)
self.boxes["host_interface"] = hi
self.s.Debug( "Add Peripheral Bus")
pb = PeripheralBus(self.scene,
m)
m.link_peripheral_bus(pb)
self.boxes["peripheral_bus"] = pb
self.s.Debug( "Add Memory Bus")
mb = MemoryBus(self.scene,
m)
self.boxes["memory_bus"] = mb
m.link_memory_bus(mb)
self.editable = False
self.refresh_slaves()
#self.initialize_bindings()
def refresh_slaves(self, editable = None):
if editable is None:
editable = self.editable
else:
self.editable = editable
self.s.Debug("WBC: refresh_slaves")
#Create a list of slaves to send to the bus
slave_type = SlaveType.PERIPHERAL
nslaves = self.model.get_number_of_slaves(slave_type)
slave_list = []
paths = utils.get_local_verilog_paths()
for i in range(nslaves):
sitem = {}
sitem["instance_name"] = self.model.get_slave_name(slave_type, i)
#print "peripheral instance name: %s" % self.model.get_slave_name(slave_type, i)
#sitem["parameters"] = self.model.get_slave_parameters(slave_type, i)
slave_list.append(sitem)
pb = self.boxes["peripheral_bus"]
#update the bus
self.s.Debug("updating slave view")
pb.update_slaves(slave_list, editable)
slave_type = SlaveType.MEMORY
nslaves = self.model.get_number_of_slaves(slave_type)
slave_list = []
for i in range(nslaves):
sitem = {}
sitem["instance_name"] = self.model.get_slave_name(slave_type, i)
#print "memory instance name: %s" % self.model.get_slave_name(slave_type, i)
#sitem["parameters"] = self.model.get_slave_parameters(slave_type, i)
slave_list.append(sitem)
mb = self.boxes["memory_bus"]
#update the bus
self.s.Debug("WBC: updating slave view")
mb.update_slaves(slave_list, editable)
def enable_editing(self):
self.editable = True
self.refresh_slaves()
def disable_editing(self):
self.editable = False
self.refresh_slaves()
def drag_move(self, event):
#print "Drag movvve"
if event.mimeData().hasFormat("application/flowchart-data"):
event.accept()
else:
event.ignore()
def find_slave_position(self, drop_position):
self.s.Debug("Looking for slave position")
return drop_position
def drag_enter(self, event):
'''
An item has entered the canvas
'''
if event.mimeData().hasFormat("application/flowchart-data"):
self.s.Debug("Detect box")
event.accept()
else:
event.ignore()
def drag_leave(self, event):
#print "leave :("
event.ignore()
'''
if event.mimeData().hasFormat("application/flowchart-data"):
self.s.Debug("Detect box")
event.accept()
else:
event.ignore()
'''
def drag_move(self, event):
self.s.Debug("Drag Move Event")
if event.mimeData().hasFormat("application/flowchart-data"):
event.accept()
else:
event.ignore()
def drop_event(self, event):
#print "Drop Event: %s" % str(event)
#position = self.fd.position()
view = self.scene.get_view()
position = view.mapToScene(event.pos())
#position = QPoint(0, 0)
self.s.Debug("VC: drop_event()")
if event.mimeData().hasFormat("application/flowchart-data"):
data = event.mimeData().data("application/flowchart-data")
#position = self.fd.position()
#print "Data: %s" % str(data)
d = json.loads(str(data))
if event.dropAction() == Qt.MoveAction:
self.s.Debug("Moving Slave")
if "type" in d.keys():
self.s.Debug("\tSlave type: %s" % d["type"])
if d["type"] == "peripheral_slave":
pb = self.boxes["peripheral_bus"]
self.s.Debug("\tMoving within peripheral bus")
index = pb.find_index_from_position(position)
self.move_slave(bus=pb, slave_name = d["name"], to_index = index)
else:
self.s.Debug("\tMoving within memory bus")
mb = self.boxes["memory_bus"]
index = mb.find_index_from_position(position)
self.move_slave(bus=mb, slave_name = d["name"], to_index = index)
else:
if "type" in d.keys():
self.s.Debug("\ttype: %s" % d["type"])
if d["type"] == "memory_slave" or d["type"] == "peripheral_slave":
if d["type"] == "peripheral_slave":
self.s.Debug("\tSearching peripheral bus")
pb = self.boxes["peripheral_bus"]
index = pb.find_index_from_ |
vatslav/perfectnote | keepnote/gui/editor_richtext.py | Python | gpl-2.0 | 52,254 | 0.00733 | """
KeepNote
Editor widget in main window
"""
#
# KeepNote
# Copyright (c) 2008-2009 Matt Rasmussen
# Author: Matt Rasmussen <rasmus@mit.edu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
#
# python imports
import gettext
import sys, os, re
# pygtk imports
import pygtk
pygtk.require('2.0')
from gtk import gdk
import gtk.glade
import gobject
# keepnote imports
import keepnote
from keepnote import \
KeepNoteError, is_url, unicode_gtk
from keepnote.notebook import \
NoteBookError, \
get_node_url, \
parse_node_url, \
is_node_url
from keepnote import notebook as notebooklib
from keepnote.gui import richtext
from keepnote.gui.richtext import \
RichTextView, RichTextBuffer, \
RichTextIO, RichTextError, RichTextImage
from keepnote.gui.richtext.richtext_tags import \
RichTextTagTable, RichTextLinkTag
from keepnote.gui.icons import \
get_node_icon, lookup_icon_filename
from keepnote.gui.font_selector import FontSelector
from keepnote.gui.colortool import FgColorTool, BgColorTool
from keepnote.gui.richtext.richtext_tags import color_tuple_to_string
from keepnote.gui.popupwindow import PopupWindow
from keepnote.gui.linkcomplete import LinkPickerPopup
from keepnote.gui.link_editor import LinkEditor
from keepnote.gui.editor import KeepNoteEditor
from keepnote.gui import \
CONTEXT_MENU_ACCEL_PATH, \
DEFAULT_FONT, \
FileChooserDialog, \
get_pixbuf, \
get_resource, \
get_resource_image, \
get_resource_pixbuf, \
Action, \
ToggleAction, \
add_actions, \
update_file_preview, \
dialog_find, \
dialog_image_resize
_ = keepnote.translate
def is_relative_file(filename):
"""Returns True if filename is relative"""
return (not re.match("[^:/]+://", filename) and
not os.path.isabs(filename))
def is_local_file(filename):
return filename and ("/" not in | filename) and ("\\" not in filename)
class NodeIO (RichTextIO):
"""Read/Writes the contents of a RichTextBuffer to disk"""
def __init__(self):
RichTextIO.__init__(self)
self._node = None
self._image_files = set()
self._saved_image_files = set()
def set_node(self, node):
self._node = node
def save(self, textbuffer, filename, title=None):
"""Save buffer contents to file"""
RichTextIO.save(self, textbuffer | , filename, title)
def load(self, textview, textbuffer, filename):
RichTextIO.load(self, textview, textbuffer, filename)
def _load_images(self, textbuffer, html_filename):
"""Load images present in textbuffer"""
self._image_files.clear()
RichTextIO._load_images(self, textbuffer, html_filename)
def _save_images(self, textbuffer, html_filename):
"""Save images present in text buffer"""
# reset saved image set
self._saved_image_files.clear()
# don't allow the html file to be deleted
self._saved_image_files.add(os.path.basename(html_filename))
RichTextIO._save_images(self, textbuffer, html_filename)
#print "saved", self._saved_image_files
# delete images not part of the saved set
self._delete_images(html_filename,
self._image_files - self._saved_image_files)
self._image_files = set(self._saved_image_files)
def _delete_images(self, html_filename, image_files):
for image_file in image_files:
#print image_file, is_local_file(image_file)
# only delete an image file if it is local
if is_local_file(image_file):
try:
os.remove(self._get_filename(html_filename, image_file))
except:
pass
def _load_image(self, textbuffer, image, html_filename):
image.set_from_file(
self._get_filename(html_filename, image.get_filename()))
# record loaded images
self._image_files.add(image.get_filename())
def _save_image(self, textbuffer, image, html_filename):
filename = self._get_filename(html_filename, image.get_filename())
# write image if it is modified or its file does not exist
if image.save_needed() or not os.path.exists(filename):
image.write(filename)
# mark image as saved
self._saved_image_files.add(image.get_filename())
def _get_filename(self, html_filename, filename):
path = os.path.dirname(html_filename)
if is_relative_file(filename):
return os.path.join(path, filename)
return filename
class RichTextEditor (KeepNoteEditor):
def __init__(self, app):
KeepNoteEditor.__init__(self, app)
self._app = app
self._notebook = None
self._link_picker = None
self._maxlinks = 10 # maximum number of links to show in link picker
# state
self._page = None # current NoteBookPage
self._page_scrolls = {} # remember scroll in each page
self._page_cursors = {}
self._textview_io = NodeIO()
# editor
self.connect("make-link", self._on_make_link)
# textview and its callbacks
self._textview = RichTextView(RichTextBuffer(
self._app.get_richtext_tag_table())) # textview
self._textview.disable()
self._textview.connect("font-change", self._on_font_callback)
self._textview.connect("modified", self._on_modified_callback)
self._textview.connect("child-activated", self._on_child_activated)
self._textview.connect("visit-url", self._on_visit_url)
self._textview.get_buffer().connect("ending-user-action",
self._on_text_changed)
self._textview.connect("key-press-event", self._on_key_press_event)
# scrollbars
self._sw = gtk.ScrolledWindow()
self._sw.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self._sw.set_shadow_type(gtk.SHADOW_IN)
self._sw.add(self._textview)
self.pack_start(self._sw)
# link editor
self._link_editor = LinkEditor()
self._link_editor.set_textview(self._textview)
self._link_editor.set_search_nodes(self._search_nodes)
self.connect("font-change", self._link_editor.on_font_change)
self.pack_start(self._link_editor, False, True, 0)
self.make_image_menu(self._textview.get_image_menu())
# menus
self.editor_menus = EditorMenus(self._app, self)
self.connect("font-change", self.editor_menus.on_font_change)
# find dialog
self.find_dialog = dialog_find.KeepNoteFindDialog(self)
self.show_all()
def set_notebook(self, notebook):
"""Set notebook for editor"""
# set new notebook
self._notebook = notebook
if self._notebook:
self.load_notebook_preferences()
else:
# no new notebook, clear the view
self.clear_view()
def load_preferences(self, app_pref, first_open=False):
"""Load application preferences"""
self.editor_menus.enable_spell_check(
app_pref.get("editors", "general", "spell_check",
default=True))
self.load_notebook_preferences()
def save_preferences(self, app_pref):
"""Save application preferences" |
meta-it/misc-addons | account_invoice_dates/__openerp__.py | Python | lgpl-3.0 | 601 | 0 | # -*- coding: utf-8 -*-
{
"name": """Start-End dates in invoice and analytic lines""",
"summary": """Technical core for other modules""",
"category": "Accounting",
"images": [],
"version": "1.0.0",
"author": "IT-Projects LLC, Ivan Yelizariev",
| "website": "https://it-projects.info",
"license": "LGPL-3",
# "price": | 9.00,
# "currency": "EUR",
"depends": [
"account",
],
"external_dependencies": {"python": [], "bin": []},
"data": [
"views.xml",
],
"demo": [
],
"installable": False,
"auto_install": False,
}
|
cbeing/remoteSlideShow | third_party/pypoppler-qt4/configure.py | Python | mit | 7,528 | 0.009033 | import os, sys
import commands
import optparse
import shutil
INSTALL_DIR = ""
BASE_DIR = os.path.dirname(__file__)
SIP_FILE = "poppler-qt4.sip"
BUILD_DIR = "build"
SBF_FILE = "QtPoppler.sbf"
def _cleanup_path(path):
"""
Cleans the path:
- Removes traling / or \
"""
path = path.rstrip('/')
path = path.rstrip('\\')
return path
def pkgconfig(package):
'''
Calls pkg-config for the given package
Returns: - None if the package is not found.
- {'inc_dirs': [List of -L Paths]
'lib_dirs' : [List of -I Paths]
'libs ' : [List of -l libs]
}
'''
code, msg = commands.getstatusoutput("pkg-config --exists %s" % package)
if code != 0:
return None
tokens = commands.getoutput("pkg-config --libs --cflags %s" % package).split()
return {
'inc_dirs': [ token[2:] for token in tokens if token[:2] == '-I'],
'lib_dirs': [ token[2:] for token in tokens if token[:2] == '-L'],
'libs': [ token[2:] for token in tokens if token[:2] == '-l'],
}
def create_optparser(sipcfg):
'''Comandline parser'''
def store_abspath(option, opt_str, value, parser):
setattr(parser.values, option.dest, os.path.abspath(value))
def get_default_moddir():
default = sipcfg.default_mod_dir
default = os.path.join(default, INSTALL_DIR)
return default
p = optparse.OptionParser(usage="%prog [options]")
default_moddir = get_default_moddir()
p.add_option("-d", "--destdir", action="callback",
default=default_moddir, type="string",
metavar="DIR",
dest="moddir", callback=store_abspath,
help="Where to install PyPoppler-Qt4 python modules."
"[default: %default]")
p.add_option("-s", "--sipdir", action="callback",
default=os.path.join(sipcfg.default_sip_dir, INSTALL_DIR),
metavar="DIR", dest="sipdir", callback=store_abspath,
type="string", help="Where the .sip files will be installed "
"[default: %default]")
p.add_option("", "--popplerqt-includes-dir", action="callback",
default=None,
metavar="DIR", dest="popplerqt_inc_dirs", callback=store_abspath,
type="string", help="PopplerQt include paths"
"[default: Auto-detected with pkg-config]")
p.add_option("", "--popplerqt-libs-dir", action="callback",
default=None,
metavar="DIR", dest="popplerqt_lib_dirs", callback=store_abspath,
type="string", help="PopplerQt libraries paths"
"[default: Auto-detected with pkg-config]")
return p
def get_pyqt4_config():
try:
import PyQt4.pyqtconfig
return PyQt4.pyqtconfig.Configuration()
except ImportError, e:
print >> sys.stderr, "ERROR: PyQt4 not found."
sys.exit(1)
def get_sip_config():
try:
import sipconfig
return sipconfig.Configuration()
except ImportError, e:
print >> sys.stderr, "ERROR: SIP (sipconfig) not found."
sys.exit(1)
def get_popplerqt_config(opts):
config = pkgconfig('poppler-qt4')
if config is not None:
found_pkgconfig = True
else:
found_pkgconfig = False
config = {'libs': ['poppler-qt4', 'poppler'],
'inc_dirs': None,
'lib_dirs': None}
if opts.popplerqt_inc_dirs is not None:
config['inc_dirs'] = opts.popplerqt_inc_dirs.split(" ")
if opts.popplerqt_lib_dirs is not None:
config['lib_dirs'] = opts.popplerqt_lib_dirs.split(" ")
if config['lib_dirs'] is None or config['inc_dirs'] is None:
print >> sys.stderr, "ERROR: poppler-qt4 not found."
print "Try to define PKG_CONFIG_PATH "
print "or use --popplerqt-libs-dir and --popplerqt-includes-dir options"
sys.exit(1)
config['inc_dirs'] = map(_cleanup_path, config['inc_dirs'])
config['lib_dirs'] = map(_cleanup_path, config['lib_dirs'])
config['sip_dir'] = _cleanup_path(opts.sipdir)
config['mod_dir'] = _cleanup_path(opts.moddir)
print "Using PopplerQt include paths: %s" % config['inc_dirs']
print "Using PopplerQt libraries paths: %s" % config['lib_dirs']
print "Configured to install SIP in %s" % config['sip_dir']
print "Configured to install binaries in %s" % config['mod_dir']
return config
def create_build_dir():
dir = | os.path.join(BASE_DIR, BUILD_DIR)
if os.path.exists(dir):
return
try:
os.mkdir(dir)
except:
print >> sys.stderr, "ERROR: Unable to create the build directory (%s)" % dir
sys.exit(1)
def run_sip(pyqtcfg):
create_build_dir()
cmd = [pyqtcfg.sip_bin,
| "-c", os.path.join(BASE_DIR, BUILD_DIR),
"-b", os.path.join(BUILD_DIR, SBF_FILE),
"-I", pyqtcfg.pyqt_sip_dir,
pyqtcfg.pyqt_sip_flags,
os.path.join(BASE_DIR, SIP_FILE)]
os.system( " ".join(cmd) )
def generate_makefiles(pyqtcfg, popplerqtcfg, opts):
from PyQt4 import pyqtconfig
import sipconfig
pypopplerqt4config_file = os.path.join(BASE_DIR, "pypopplerqt4config.py")
# Creeates the Makefiles objects for the build directory
makefile_build = pyqtconfig.sipconfig.ModuleMakefile(
configuration=pyqtcfg,
build_file=SBF_FILE,
dir=BUILD_DIR,
install_dir=popplerqtcfg['mod_dir'],
warnings=1,
qt=['QtCore', 'QtGui', 'QtXml']
)
# Add extras dependencies for the compiler and the linker
# Libraries names don't include any platform specific prefixes
# or extensions (e.g. the "lib" prefix on UNIX, or the ".dll" extension on Windows)
makefile_build.extra_lib_dirs = popplerqtcfg['lib_dirs']
makefile_build.extra_libs = popplerqtcfg['libs']
makefile_build.extra_include_dirs = popplerqtcfg['inc_dirs']
# Generates build Makefile
makefile_build.generate()
# Generates root Makefile
installs_root = []
installs_root.append( (os.path.join(BASE_DIR, SIP_FILE), popplerqtcfg['sip_dir']) )
installs_root.append( (pypopplerqt4config_file, popplerqtcfg['mod_dir']) )
sipconfig.ParentMakefile(
configuration=pyqtcfg,
subdirs=[_cleanup_path(BUILD_DIR)],
installs=installs_root
).generate()
def generate_configuration_module(pyqtcfg, popplerqtcfg, opts):
import sipconfig
content = {
"pypopplerqt4_sip_dir": popplerqtcfg['sip_dir'],
"pypopplerqt4_sip_flags": pyqtcfg.pyqt_sip_flags,
"pypopplerqt4_mod_dir": popplerqtcfg['mod_dir'],
"pypopplerqt4_modules": 'PopplerQt',
"popplerqt4_inc_dirs": popplerqtcfg['inc_dirs'],
"popplerqt4_lib_dirs": popplerqtcfg['lib_dirs'],
}
# This creates the pypopplerqt4config.py module from the pypopplerqt4config.py.in
# template and the dictionary.
sipconfig.create_config_module(
os.path.join(BASE_DIR, "pypopplerqt4config.py"),
os.path.join(BASE_DIR, "pypopplerqt4config.py.in"),
content)
def main():
sipcfg = get_sip_config()
pyqtcfg = get_pyqt4_config()
parser = create_optparser(sipcfg)
opts, args = parser.parse_args()
popplerqtcfg = get_popplerqt_config(opts)
run_sip(pyqtcfg)
generate_makefiles(pyqtcfg, popplerqtcfg, opts)
generate_configuration_module(pyqtcfg, popplerqtcfg, opts)
if __name__ == "__main__":
main()
|
Kaggle/docker-python | tests/test_tensorflow_addons.py | Python | apache-2.0 | 665 | 0.003008 | import unittest
import tensorflow as tf
import tensorflow_addons as tfa
class TestTensorflowAddons(unittest.TestCase):
def test_tfa_image(self):
img_raw = tf.io.read_file('/input/tests/data/dot.png')
img = tf.io.decode_image(img_raw)
img = tf.image.convert_image_dtype(img, tf.float32)
mean = tfa.image.mean_filter2d(img, filter_shape=1)
| self.assertEqual([1, 1, 3], mean.shape)
# This test exercises TFA Custom Op. See: b/145555176
def test_gelu(self):
x = tf.constant([[0.5, 1.2, -0.3]])
layer = tfa.layers.G | ELU()
result = layer(x)
self.assertEqual((1, 3), result.shape) |
hryamzik/ansible | test/units/conftest.py | Python | gpl-3.0 | 783 | 0.001277 | """Monkey patch os._exit when running under coverage so we don't lose co | verage data in forks, such as with `pytest - | -boxed`."""
import gc
import os
try:
import coverage
except ImportError:
coverage = None
try:
test = coverage.Coverage
except AttributeError:
coverage = None
def pytest_configure():
if not coverage:
return
coverage_instances = []
for obj in gc.get_objects():
if isinstance(obj, coverage.Coverage):
coverage_instances.append(obj)
if not coverage_instances:
return
os_exit = os._exit
def coverage_exit(*args, **kwargs):
for instance in coverage_instances:
instance.stop()
instance.save()
os_exit(*args, **kwargs)
os._exit = coverage_exit
|
lavalamp-/ws-backend-community | wselasticsearch/query/dns/enumeration.py | Python | gpl-3.0 | 1,098 | 0.002732 | # -*- coding: utf-8 -*-
from __ | future__ import absolute_import
from .base import BaseDomainNameScanQuery
class SubdomainEnumerationQuery(BaseDomainNameScanQuery):
"""
This is an Elasticsearch query class for querying SubdomainEnumerationModel objects.
"""
@classmethod
def get_queried_class(cl | s):
from wselasticsearch.models import SubdomainEnumerationModel
return SubdomainEnumerationModel
def filter_by_enumeration_method(self, method):
"""
Apply a filter to this query that restricts results to only those results found by
the given method.
:param method: The method to filter on.
:return: None
"""
self.must_by_term(key="enumeration_method", value=method)
def filter_by_parent_domain(self, parent_domain):
"""
Apply a filter to this query that restricts results to only those results for the
given parent domain.
:param parent_domain: The parent domain.
:return: None
"""
self.must_by_term(key="parent_domain", value=parent_domain)
|
samuelmaudo/yepes | tests/datamigrations/tests_fields.py | Python | bsd-3-clause | 89,595 | 0.001184 | # -*- coding:utf-8 -*-
from __future__ import unicode_literals
import csv
from datetime import date, datetime, time
from decimal import Decimal
from io import open
import os
from unittest import expectedFailure
import warnings
from django import test
from django.test.utils import override_settings
from django.utils import six
from django.utils._os import upath
from django.utils.six.moves import zip
from django.utils.timezone import utc as UTC
from yepes.contrib.datamigrations.importation_plans.direct import DirectPlan
from yepes.contrib.datamigrations.importation_plans.replace_all import ReplaceAllPlan
from yepes.contrib.datamigrations.serializers.csv import CsvSerializer
from yepes.contrib.datamigrations.serializers.json import JsonSerializer
from yepes.contrib.datamigrations.serializers.tsv import TsvSerializer
from yepes.contrib.datamigrations.serializers.xls import XlsSerializer
from yepes.contrib.datamigrations.serializers.xlsx import XlsxSerializer
from yepes.contrib.datamigrations.serializers.yaml import YamlSerializer
from yepes.test_mixins import TempDirMixin
from .data_migrations import (
BooleanMigration,
DateTimeMigration,
DateTimeEdgeMigration,
FileMigration,
NumericMigration,
TextMigration,
)
from .models import (
BooleanModel,
DateTimeModel,
FileModel,
NumericModel,
TextModel,
)
MODULE_DIR = os.path.abspath(os.path.dirname(upath(__file__)))
MIGRATIONS_DIR = os.path.join(MODULE_DIR, 'data_migrations')
class BooleanFieldsTests(TempDirMixin, test.TestCase):
expectedResults = [
(True, True),
(True, True),
(True, True),
(True, True),
(False, False),
(False, False),
(False, False),
(False, False),
(True, None),
(False, None),
]
maxDiff = None
tempDirPrefix = 'test_data_migrations_'
def test_data_migration(self):
migration = BooleanMigration(BooleanModel)
self.assertEqual(
[repr(fld) for fld in migration.fields],
['<yepes.contrib.datamigrations.fields.BooleanField: boolean>',
'<yepes.contrib.datamigrations.fields.BooleanField: boolean_as_string>',
'<yepes.contrib.datamigrations.fields.BooleanField: null_boolean>',
'<yepes.contrib.datamigrations.fields.BooleanField: null_boolean_as_string>'],
)
self.assertEqual(
[repr(fld) for fld in migration.fields_to_import],
['<yepes.contrib.datamigrations.fields.BooleanField: boolean>',
'<yepes.contrib.datamigrations.fields.BooleanField: boolean_as_string>',
'<yepes.contrib.datamigrations.fields.BooleanField: null_boolean>',
'<yepes.contrib.datamigrations.fields.BooleanField: null_boolean_as_string>'],
)
self.assertIsNone(migration.primary_key)
self.assertIsNone(migration.natural_foreign_keys)
self.assertTrue(migration.can_create)
self.assertFalse(migration.can_update)
self.assertFalse(migration.requires_model_instances)
def test_csv_serializer(self):
migration = BooleanMigration(BooleanModel)
import_serializer = CsvSerializer()
export_serializer = CsvSerializer(quoting=csv.QUOTE_NONNUMERIC)
source_path = os.path.join(MIGRATIONS_DIR, 'boolean_source.csv')
expected_path = os.path.join(MIGRATIONS_DIR, 'boolean_result.csv')
result_path = os.path.join(self.temp_dir, 'boolean_result.csv')
# Import data from a string.
with import_serializer.open_to_load(source_path) as source_file:
migration.import_data(source_file.read(), import_serializer, DirectPlan)
objs = list(BooleanModel.objects.all())
self.assertEqual(len(objs), len(self.expectedResults))
for obj, result in zip(objs, self.expectedResults):
self.assertEqual(obj.boolean, result[0])
self.assertEqual(obj.boolean_as_string, result[0])
self.assertEqual(obj.null_boolean, result[1])
self.assertEqual(obj.null_boolean_as_string, result[1])
# Export data to a string.
result = migration.export_data(serializer=export_serializer)
with import_serializer.open_to_load(expected_path) as expected_file:
self.assertEqual(
result.splitlines(),
expected_file.read().splitlines())
# Import data from a file.
with import_serializer.open_to_load(source_path) as source_file:
migration.import_data(source_file, import_serializer, ReplaceAllPlan)
objs = list(Boolean | Model.objects.all())
self.assertEqual(len(objs), len(self.expectedResults))
for obj, result in zip(objs, s | elf.expectedResults):
self.assertEqual(obj.boolean, result[0])
self.assertEqual(obj.boolean_as_string, result[0])
self.assertEqual(obj.null_boolean, result[1])
self.assertEqual(obj.null_boolean_as_string, result[1])
# Export data to a file.
with export_serializer.open_to_dump(result_path) as result_file:
migration.export_data(result_file, export_serializer)
with open(expected_path, 'rt') as expected_file:
with open(result_path, 'rt') as result_file:
self.assertEqual(
result_file.read().splitlines(),
expected_file.read().splitlines())
def test_json_serializer(self):
migration = BooleanMigration(BooleanModel)
serializer = JsonSerializer()
source_path = os.path.join(MIGRATIONS_DIR, 'boolean_source.json')
expected_path = os.path.join(MIGRATIONS_DIR, 'boolean_result.json')
result_path = os.path.join(self.temp_dir, 'boolean_result.json')
# Import data from a string.
with serializer.open_to_load(source_path) as source_file:
migration.import_data(source_file.read(), serializer, DirectPlan)
objs = list(BooleanModel.objects.all())
self.assertEqual(len(objs), len(self.expectedResults))
for obj, result in zip(objs, self.expectedResults):
self.assertEqual(obj.boolean, result[0])
self.assertEqual(obj.boolean_as_string, result[0])
self.assertEqual(obj.null_boolean, result[1])
self.assertEqual(obj.null_boolean_as_string, result[1])
# Export data to a string.
result = migration.export_data(serializer=serializer)
with open(expected_path, 'rt') as expected_file:
self.assertEqual(
result.splitlines(),
expected_file.read().splitlines())
# Import data from a file.
with serializer.open_to_load(source_path) as source_file:
migration.import_data(source_file, serializer, ReplaceAllPlan)
objs = list(BooleanModel.objects.all())
self.assertEqual(len(objs), len(self.expectedResults))
for obj, result in zip(objs, self.expectedResults):
self.assertEqual(obj.boolean, result[0])
self.assertEqual(obj.boolean_as_string, result[0])
self.assertEqual(obj.null_boolean, result[1])
self.assertEqual(obj.null_boolean_as_string, result[1])
# Export data to a file.
with serializer.open_to_dump(result_path) as result_file:
migration.export_data(result_file, serializer)
with open(expected_path, 'rt') as expected_file:
with open(result_path, 'rt') as result_file:
self.assertEqual(
result_file.read().splitlines(),
expected_file.read().splitlines())
def test_tsv_serializer(self):
migration = BooleanMigration(BooleanModel)
import_serializer = TsvSerializer()
export_serializer = TsvSerializer(quoting=csv.QUOTE_NONNUMERIC)
source_path = os.path.join(MIGRATIONS_DIR, 'boolean_source.tsv')
expected_path = os.path.join(MIGRATIONS_DIR, 'boolean_result.tsv')
result_path = os.path.join(self.temp_dir, 'boolean_result.tsv')
# Import data from a string.
with i |
brandonw/personal-site | personal-site/blog/views.py | Python | bsd-3-clause | 1,941 | 0.003606 | from django.shortcuts import render
from django.views.generic.base import TemplateView
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from blog.models import Post
from taggit.models import Tag
class BlogHomeView(ListView):
template_name = 'blog/home.html'
context_object_name = 'posts'
paginate_by = 10
def get_queryset(self):
posts = Post.objects.order_by('-pub_date')
if self.request.user.is_superuser:
return posts
else:
return posts.filter(is_published=True)
class BlogPostView(DetailView):
context_object_name = 'post'
template_name = 'blog/post.html'
def get_queryset(self):
if self.request.user.is_superuser:
return Post.objects.all()
return Post.objects.filter(is_published=True)
class BlogTagView(TemplateView):
template_name = 'blog/tag.html'
def get_context_data(self, **kwargs):
context = super(BlogTagView, self).get_context_data(**kwargs)
tagslug = self.kwargs['slug']
tag = Tag.objects.get(slug=tagslug)
context['tag'] = tag.name
context['taggedposts'] = (Post.objects
.filter(is_published=True)
.filter(tags__name=tag.name)
.distinct())
context['published_tags'] = Post.objects.filter(is_published=True)
return context
class BlogRssFeed | (Feed):
title = "Brandon Waskiewicz's blog"
link = '/blog/'
description = 'Inside my head'
def items(self):
return Post.objects.filter(is_published=True).order_by('-pub_date')
def item_title(self, item):
return item.name
def item_description(self, item) | :
return item.get_preview()
class BlogAtomFeed(BlogRssFeed):
feed_type = Atom1Feed
subtitle = BlogRssFeed.title
|
marcosfede/algorithms | linkedlist/reverse.py | Python | gpl-3.0 | 679 | 0 | """
Reverse a singly linked list.
"""
#
# Iterative solution
# T | (n)- O(n)
#
def reverse_list(head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not head or not head.next:
return head
prev = None
while head:
current = head
head = head.next
current.next = prev
prev = c | urrent
return prev
#
# Recursive solution
# T(n)- O(n)
#
def reverse_list_recursive(head):
"""
:type head: ListNode
:rtype: ListNode
"""
if head is None or head.next is None:
return head
p = head.next
head.next = None
revrest = reverse_list_recursive(p)
p.next = head
return revrest
|
anarang/robottelo | robottelo/cli/base.py | Python | gpl-3.0 | 14,874 | 0 | # -*- encoding: utf-8 -*-
"""Generic base class for cli hammer commands."""
import logging
from robottelo import ssh
from robottelo.cli import hammer
from robottelo.config import settings
class CLIError(Exception):
"""Indicates that a CLI command could not be run."""
class CLIReturnCodeError(Exception):
"""Indicates that a CLI command has finished with return code, different
from zero.
:param return_code: CLI command return code
:param stderr: contents of the ``stderr``
:param msg: explanation of the error
"" | "
def __init__(self, return_code, stderr, msg):
self.return_code = return_code
self.stderr = stderr
self.msg = msg
def __str__(self):
return self.msg
class Base(object):
"""
@param command_base: base command of hammer.
Output of recent `hammer --help`::
activation-key Manipulate activation keys.
architecture Ma | nipulate architectures.
auth Foreman connection login/logout.
auth-source Manipulate auth sources.
bootdisk Download boot disks
capsule Manipulate capsule
compute-resource Manipulate compute resources.
content-host Manipulate content hosts on the server
content-report View Content Reports
content-view Manipulate content views.
defaults Defaults management
docker Manipulate docker content
domain Manipulate domains.
environment Manipulate environments.
erratum Manipulate errata
fact Search facts.
filter Manage permission filters.
global-parameter Manipulate global parameters.
gpg Manipulate GPG Key actions on the server
host Manipulate hosts.
host-collection Manipulate host collections
hostgroup Manipulate hostgroups.
import Import data exported from a Red Hat Sat..
lifecycle-environment Manipulate lifecycle_environments
location Manipulate locations.
medium Manipulate installation media.
model Manipulate hardware models.
organization Manipulate organizations
os Manipulate operating system.
ostree-branch Manipulate ostree branches
package Manipulate packages.
package-group Manipulate package groups
partition-table Manipulate partition tables.
ping Get the status of the server
product Manipulate products.
proxy Manipulate smart proxies.
puppet-class Search puppet modules.
puppet-module View Puppet Module details.
report Browse and read reports.
repository Manipulate repositories
repository-set Manipulate repository sets on the server
role Manage user roles.
sc-param Manipulate smart class parameters.
settings Change server settings.
shell Interactive shell
smart-variable Manipulate smart variables.
subnet Manipulate subnets.
subscription Manipulate subscriptions.
sync-plan Manipulate sync plans
task Tasks related actions.
template Manipulate config templates.
user Manipulate users.
user-group Manage user groups.
@since: 27.Nov.2013
"""
command_base = None # each inherited instance should define this
command_sub = None # specific to instance, like: create, update, etc
command_requires_org = False # True when command requires organization-id
logger = logging.getLogger('robottelo')
@classmethod
def _handle_response(cls, response, ignore_stderr=None):
"""Verify ``return_code`` of the CLI command.
Check for a non-zero return code or any stderr contents.
:param response: a ``SSHCommandResult`` object, returned by
:mod:`robottelo.ssh.command`.
:param ignore_stderr: indicates whether to throw a warning in logs if
``stderr`` is not empty.
:returns: contents of ``stdout``.
:raises robottelo.cli.base.CLIReturnCodeError: If return code is
different from zero.
"""
if response.return_code != 0:
raise CLIReturnCodeError(
response.return_code,
response.stderr,
u'Command "{0} {1}" finished with return_code {2}\n'
'stderr contains following message:\n{3}'
.format(
cls.command_base,
cls.command_sub,
response.return_code,
response.stderr,
)
)
if len(response.stderr) != 0 and not ignore_stderr:
cls.logger.warning(
u'stderr contains following message:\n{0}'
.format(response.stderr)
)
return response.stdout
@classmethod
def add_operating_system(cls, options=None):
"""
Adds OS to record.
"""
cls.command_sub = 'add-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def create(cls, options=None):
"""
Creates a new record using the arguments passed via dictionary.
"""
cls.command_sub = 'create'
if options is None:
options = {}
result = cls.execute(
cls._construct_command(options), output_format='csv')
# Extract new object ID if it was successfully created
if len(result) > 0 and 'id' in result[0]:
obj_id = result[0]['id']
# Fetch new object
# Some Katello obj require the organization-id for subcommands
info_options = {u'id': obj_id}
if cls.command_requires_org:
if 'organization-id' not in options:
raise CLIError(
'organization-id option is required for {0}.create'
.format(cls.__name__)
)
info_options[u'organization-id'] = options[u'organization-id']
new_obj = cls.info(info_options)
# stdout should be a dictionary containing the object
if len(new_obj) > 0:
result = new_obj
return result
@classmethod
def delete(cls, options=None):
"""Deletes existing record."""
cls.command_sub = 'delete'
return cls.execute(
cls._construct_command(options),
ignore_stderr=True,
)
@classmethod
def delete_parameter(cls, options=None):
"""
Deletes parameter from record.
"""
cls.command_sub = 'delete-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def dump(cls, options=None):
"""
Displays the content for existing partition table.
"""
cls.command_sub = 'dump'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def _get_username_password(cls, username=None, password=None):
"""Lookup for the username and passwo |
flegald/django-imager | imagersite/imager_profile/handler.py | Python | mit | 1,201 | 0 | # -*- coding: utf-8 -*-
"""signal handlers registered by the imager_profile app"""
from __future__ import unicode_literals
from django.conf import settings
from django.db.models.signals import post_save
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from imager_profile.models import ImagerProfile
import logging
logger = logging.getLogger(__name__)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def ensure_imager_profile(sender, **kwargs):
"""Create and save an ImagerProfil | e after every new User is created."""
if kwargs.get('created', False):
try:
new_profile = ImagerProfile(user=kwargs['instance'])
new_profile.save()
except (KeyError, ValueError):
logger.error('Unable to create ImagerProfile for User instance.')
@receiver(pre_delete, sender=settings.AUTH_USER_MODEL)
def remove_imager_profile(sender, **kwargs):
try:
kwargs['instance'].profile.delete()
except (KeyError, AttributeError): |
msg = (
"ImagerProfile instance not deleted for {}. "
"Perhaps it does not exist?"
)
logger.warn(msg.format(kwargs['instance']))
|
pviafore/rcfc | rcfc/input_methods.py | Python | mit | 1,204 | 0 | """ A collection of input methods that aren't necessarily buttons """
from rcfc.server import register_post_with_state, register_post_with_input
from rcfc.groups import convertGroup
class DIRECTIONAL:
LEFT = "left"
RIGHT = "right"
def slider(text, getter, input_range=(0, 100), group=None):
min_value, max_value = input_range
def wrapper(setter):
slider = {
"text": text,
"type": "input.slider",
"min": min_value,
"max": max_value,
"groups": convertGroup(group)
}
register_post_with_state(slider, getter, setter)
return wrapper
def left_right_arrows(text, group=None):
def wrapper(setter):
arrows = {
"text": text,
"type": "input.leftright",
"groups": convertGroup(group)
}
register_post_with_input(arrows, setter)
return wrapper
def colorpicker(text, getter, gr | oup=None):
def wrapper(setter):
colorpicker = {
"text": text | ,
"type": "input.colorpicker",
"groups": convertGroup(group)
}
register_post_with_state(colorpicker, getter, setter)
return wrapper
|
matpalm/malmomo | p/reward_freqs.py | Python | mit | 605 | 0.018182 | #!/usr/bin/env python
import json
import sys
import numpy as np
rewards = []
for line in sys.stdin:
if not line.startswith("REWARD"): continue
cols = line.strip().split("\t")
data = json.loads(cols[1])
rewards.append(data['reward'])
rmin, rmax = np.min(rewards), np.max(rewards)
rrange = rmax - rmin
num_bins = 20
histo = [0] * (num_bins+1)
for r in rewards:
n = (r-rmin)/rrange
rbin = int(n*num_bins)
histo[rbin] += | 1
for i, h in enumerate(histo):
print rmin + (i*(rrange/num_bins)), "\t", h
#grep ^REWARD $1 | cut -f2 | jq '.reward' | sort | uniq | -c | normalise.py | sort -k3 -nr
|
mbilalzafar/fair-classification | preferential_fairness/synthetic_data_demo/plot_synthetic_boundaries.py | Python | gpl-3.0 | 3,067 | 0.024128 | import matplotlib
import matplotlib.pyplot as plt # for plotting stuff
import os
import numpy as np
matplotlib.rcParams['text.usetex'] = True # for type-1 fonts
def get_line_coordinates(w, x1, x2):
y1 = (-w[0] - (w[1] * x1)) / w[2]
y2 = (-w[0] - (w[1] * x2)) / w[2]
return y1,y2
def plot_data(X, y, x_sensitive, w_arr, label_arr, lt_arr, fname, title, group=None):
# print fp_fn_arr
plt.figure()
num_to_draw = 200 # we will only draw a small number of points to avoid clutter
fs = 20 # font size for labels and legends
x_draw = X[:num_to_draw]
y_draw = y[:num_to_draw]
x_sensitive_draw = x_sensitive[:num_to_draw]
x_lim = [min(x_draw[:,-2]) - np.absolute(0.3*min(x_draw[:,-2])), max(x_draw[:,-2]) + np.absolute(0.5 * max(x_draw[:,-2]))]
y_lim = [min(x_draw[:,-1]) - np.absolute(0.3*min(x_draw[:,-1])), max(x_draw[:,-1]) + np.absolute(0.7 * max(x_draw[:,-1]))]
X_s_0 = x_draw[x_sensitive_draw == 0.0]
X_s_1 = x_draw[x_sensitive_draw == 1.0]
y_s_0 = y_draw[x_sensitive_draw == 0.0]
y_s_1 = y_draw[x_sensitive_draw == 1.0]
if w_arr is not None: # we are plotting the boundaries of a trained classifier
plt.scatter(X_s_0[y_s_0==1.0][:, -2], X_s_0[y_s_0==1.0][:, -1], color='green', marker= | 'x', s=70, linewidth=2)
plt.scatter(X_s_0[y_s_0==-1.0][:, -2], X_s_0[y_s_0==-1.0][:, -1], | color='red', marker='x', s=70, linewidth=2)
plt.scatter(X_s_1[y_s_1==1.0][:, -2], X_s_1[y_s_1==1.0][:, -1], color='green', marker='o', facecolors='none', s=70, linewidth=2)
plt.scatter(X_s_1[y_s_1==-1.0][:, -2], X_s_1[y_s_1==-1.0][:, -1], color='red', marker='o', facecolors='none', s=70, linewidth=2)
for i in range(0, len(w_arr)):
w = w_arr[i]
l = label_arr[i]
lt = lt_arr[i]
x1,x2 = min(x_draw[:,1]), max(x_draw[:,1])
y1,y2 = get_line_coordinates(w, x1, x2)
plt.plot([x1,x2], [y1,y2], lt, linewidth=3, label = l)
plt.title(title, fontsize=fs)
else: # just plotting the data
plt.scatter(X_s_0[y_s_0==1.0][:, -2], X_s_0[y_s_0==1.0][:, -1], color='green', marker='x', s=70, linewidth=2, label= "group-0 +ve")
plt.scatter(X_s_0[y_s_0==-1.0][:, -2], X_s_0[y_s_0==-1.0][:, -1], color='red', marker='x', s=70, linewidth=2, label= "group-0 -ve")
plt.scatter(X_s_1[y_s_1==1.0][:, -2], X_s_1[y_s_1==1.0][:, -1], color='green', marker='o', facecolors='none', s=70, linewidth=2, label= "group-1 +ve")
plt.scatter(X_s_1[y_s_1==-1.0][:, -2], X_s_1[y_s_1==-1.0][:, -1], color='red', marker='o', facecolors='none', s=70, linewidth=2, label= "group-1 -ve")
if True: # turn the ticks on or off
plt.tick_params(axis='x', which='both', bottom='off', top='off', labelbottom='off') # dont need the ticks to see the data distribution
plt.tick_params(axis='y', which='both', left='off', right='off', labelleft='off')
plt.legend(loc=2, fontsize=fs)
plt.xlim(x_lim)
plt.ylim(y_lim)
plt.savefig(fname)
plt.show()
|
googleapis/python-api-common-protos | google/api/log_pb2.py | Python | apache-2.0 | 2,402 | 0.001665 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/log.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import label_pb2 as google_dot_api_dot_label__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x14google/api/log.proto\x12\ngoogle.api\x1a\x16google/api/label.proto"u\n\rLogDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x06labels\x18\x02 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x04 \x01(\tBj\n\x0e\x63om.google.apiB\x08LogProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3'
)
_LO | GDESCRIPTOR = DESCRIPTOR.message_types_by_name["LogDescriptor"]
LogDescriptor = _reflection.GeneratedProtocolMessageType(
"LogDescriptor",
(_message.Message,),
{
"DESCRIPTOR": _LOGDESCRIPTOR,
"__module__": "google.api.log_pb2"
# @@protoc_insertion_point(class_scope:google.api.LogDescriptor)
},
)
_sym_db.RegisterMessage(LogDescriptor)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b"\n\016com.google.ap | iB\010LogProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI"
_LOGDESCRIPTOR._serialized_start = 60
_LOGDESCRIPTOR._serialized_end = 177
# @@protoc_insertion_point(module_scope)
|
gaborvecsei/Color-Tracker | color_tracker/utils/camera/base_camera.py | Python | mit | 3,526 | 0.001702 | import threading
import cv2
class Camera(object):
"""
Base Camera object
"""
def __init__(self):
self._cam = None
self._frame = None
self._frame_width = None
self._frame_height = None
self._ret = False
self._auto_undistortion = False
self._camera_matrix = None
self._distortion_coefficients = None
self._is_running = False
def _init_camera(self):
"""
This is the first for creating our camera
We should override this!
"""
pass
def start_camera(self):
"""
Start the running of the camera, without this we can't capture frames
Camera runs on a separate thread so we can reach a higher FPS
"""
self._init_camera()
self._is_running = True
threading.Thread(target=self._update_camera, args=()).start()
def _read_from_camera(self):
"""
This method is responsible for grabbing frames from the camera
We should override this!
"""
if self._cam is None:
raise Exception("Camera is not started!")
def _update_camera(self):
"""
Grabs the frames from the camera
"""
while True:
if self._is_running:
self._ret, self._frame = self._read_from_camera()
else:
break
def get_frame_width_and_height(self):
"""
Returns the width and height of the grabbed images
:return (int int): width and height
"""
return self._frame_width, self._frame_height
def read(self):
"""
With this you can grab the last frame from the camera
:return (boolean, np.array): return value and frame
"""
if self._is_running:
return self._ret, self._frame
else:
import warnings
warnings.warn("Camera is not started, you should start it with start_camera()")
return False, None
def release(self):
"""
Stop the camera
"""
self._is_running = False
def is_running(self):
return self._is_running
def set_calibration_matrices(self, camera_matrix, distortion_coefficients):
self._camera_matrix = camera_matrix
self._distortion_coefficients = distortion_coefficients
def activate_auto_undistortion(self):
self._auto_undistortion = True
def deactivate_auto_undistortion(self):
self._auto_undistortion = False
def _undistort_image(self, image):
if self._camera_matrix is None or self._dis | tortion_coefficients is None:
import warnings
warnings.warn("Undistortion has no effect because <camera_matrix>/<distortion_coefficients> is None!")
return image
h, w = image | .shape[:2]
new_camera_matrix, roi = cv2.getOptimalNewCameraMatrix(self._camera_matrix,
self._distortion_coefficients, (w, h),
1,
(w, h))
undistorted = cv2.undistort(image, self._camera_matrix, self._distortion_coefficients, None,
new_camera_matrix)
return undistorted
def __enter__(self):
self.start_camera()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
|
iulian787/spack | lib/spack/spack/operating_systems/mac_os.py | Python | lgpl-2.1 | 2,263 | 0 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform as py_platform
from spack.architecture import OperatingSystem
from spack.version import Version
from spack.util.executable import Executable
# FIXME: store versions inside OperatingSystem as a Version instead of string
def macos_version():
"""temporary workaround to return a macOS version as a Version object
"""
return Version(py_platform.mac_ver()[0])
def macos | _sdk_path():
"""Return SDK path
"""
xcrun = Executable('xcrun')
return xcrun('--show-sdk-path', output=str, error=str).rstrip()
class MacOs(OperatingSystem):
"""This class represents the macOS operating syste | m. This will be
auto detected using the python platform.mac_ver. The macOS
platform will be represented using the major version operating
system name, i.e el capitan, yosemite...etc.
"""
def __init__(self):
"""Autodetects the mac version from a dictionary.
If the mac version is too old or too new for Spack to recognize,
will use a generic "macos" version string until Spack is updated.
"""
mac_releases = {
'10.0': 'cheetah',
'10.1': 'puma',
'10.2': 'jaguar',
'10.3': 'panther',
'10.4': 'tiger',
'10.5': 'leopard',
'10.6': 'snowleopard',
'10.7': 'lion',
'10.8': 'mountainlion',
'10.9': 'mavericks',
'10.10': 'yosemite',
'10.11': 'elcapitan',
'10.12': 'sierra',
'10.13': 'highsierra',
'10.14': 'mojave',
'10.15': 'catalina',
'10.16': 'bigsur',
'11': 'bigsur',
}
# Big Sur versions go 11.0, 11.0.1, 11.1 (vs. prior versions that
# only used the minor component)
part = 1 if macos_version() >= Version('11') else 2
mac_ver = str(macos_version().up_to(part))
name = mac_releases.get(mac_ver, "macos")
super(MacOs, self).__init__(name, mac_ver)
def __str__(self):
return self.name
|
sanja7s/CI_urban_rural | CI_urban_rural/__main__.py | Python | mit | 1,723 | 0.006384 | '''
Created on June 09, 2014
@author: sscepano
'''
# This one serves for the starting point
import logging
import traceback
import multiprocessing
#####################################################
# imports distributor
#####################################################
from distribute import task_manager as D
#####################################################
_log = logging.getLogger(__name__)
def | test():
print 'cpu_count() = %d\n' % multiprocessing.cpu_count()
if __name__ == '__main__' | :
logging.basicConfig(level=logging.INFO, format='%(name)s: %(levelname)-8s %(message)s')
test()
# data1 will be the read in from all 10 parallel processes
# data2 will be processed & arranged from those
data1 = None
data2 = None
while True:
raw_input("Press enter to start a process cycle:\n")
try:
reload(D)
except NameError:
_log.error("Could not reload the module.")
try:
# THIS THE FUNCTION YOU ARE TESTING
####################################################
# this is for distributing the task
####################################################
print "Distribute task started."
# homework
# data1, data2 = D.distribute_task(data1, data2)
# commuting
data1, data2 = D.distribute_task_commuting(data1, data2)
print "Distribute task finished."
####################################################
except Exception as e:
_log.error("Caught exception from the process\n%s\n%s" % (e, traceback.format_exc()))
_log.info("Cycle ready.") |
tBaxter/tango-happenings | happenings/templatetags/event_tags.py | Python | mit | 3,870 | 0.00155 | import datetime
from django import template
register = template.Library()
today = datetime.date.today()
@register.simple_tag
def get_upcoming_events_count(days=14, featured=False):
"""
Returns count of upcoming events for a given number of days, either featured or all
Usage:
{% get_upcoming_events_count DAYS as events_count %}
with days being the number of days you want, or 5 by default
"""
from happenings.models import Event
start_period = today - datetime.timedelta(days=2)
end_period = today + datetime.timedelta(days=days)
if featured:
return Event.objects.filter(
featured=True,
start_date__gte=start_period,
start_date__lte=end_period
).count()
return Event.objects.filter(start_date__gte=start_period, start_date__lte=end_period).count()
@register.simple_tag
def get_upcoming_events(num, days, featured=False):
"""
Get upcoming events.
Allows slicing to a given number,
picking the number of days to hold them after they've started
and whether they should be featured or not.
Usage:
{% get_upcoming_events 5 14 featured as events %}
Would return no more than 5 Featured events,
holding them for 14 days past their start date.
"""
from happenings.models import Event
start_date = today - datetime.timedelta(days=days)
events = Event.objects.filter(start_date__gt=start_date).order_by('start_date')
if featured:
events = events.filter(featured=True)
events = events[:num]
return events
@register.simple_tag
def get_events_by_date_range(days_out, days_hold, max_num=5, featured=False):
"""
Get upcoming events for a given number of days (days out)
Allows specifying number of days to hold events after they've started
The max number to show (defaults to 5)
and whether they should be featured or not.
Usage:
{% get_events_by_date_range 14 3 3 'featured' as events %}
Would return no more than 3 featured events,
that fall within the next 14 days or have ended within the past 3.
"""
from happenings.models import Event
range_start = today - datetime.timedelta(days=days_hold)
range_end = today + datetime.timedelta(days=days_out)
events = Event.objects.filter(
start_date__gte=range_start,
start_date__lte=range_end
).order_by('start_date')
if featured:
events = events.filter(featured=True)
events = events[:max_num]
return events
@register.inclusion_tag('happenings/includes/event_subnav.html')
def load_event_subnav(event, user=None, use_domain=False):
context = {
'event': event,
'user': user,
}
if use_domain:
from django.contrib.sites.models import Site
current_site = Site.objects.get_current()
context['domain'] = 'http://{}'.format(current_site.domain)
return context
@register.inclusion_tag('happenings/includes/past_events.html')
def load_past_events():
from happenings.models import Event
inverval = today - datetime.timedelta(days=2)
return {'events': Event.objects.filter(start_date__lt=inverval, featured=True)}
@register.inclusion_tag('includes/pagination/prev_next.html')
def paginate_update(update):
"""
attempts to get next and previous on updates
"""
from happenings.models import Update
time = update.pub_time
event = update.event
try:
next = Update.objects.filter(
event=event,
pub_time__gt=time
).order_by('pub_time').only('title')[0]
except:
next = | None
try:
previous = Update.objects.filter(
event=event,
| pub_time__lt=time
).order_by('-pub_time').only('title')[0]
except:
previous = None
return {'next': next, 'previous': previous, 'event': event}
|
IT-SeanWANG/CodeJam | 2016_1st/Refer2_Q3.py | Python | apache-2.0 | 2,440 | 0.007377 | #!flask/bin/python
from flask import Flask, jsonify, abort
from flask import make_response
from flask import request
app = Flask(__name__)
#simulated data i | n list
vnfs = [
{
'vnf_id':1,
'vnf_name': u'vnf01',
'description': u'vnf03 description'
},
{
'vnf_id':2,
'vnf_name': u'vnf02',
'description': u'Test of nvf03 descritption'
},
{
'vnf_id':3,
'vnf_name': u'vnf03',
'description': u'this is a test for vnf03'
}
]
#friendly error feedback
@app.errorhandler(404)
def not_found(error):
return | make_response(jsonify({'error': 'Sorry, Nothing Found'}),404)
#GET, Get all the VNFs info
@app.route('/todo/api/v1.0/vnfs', methods=['GET'])
def get_vnfs():
return jsonify({'vnfs': vnfs}), 200
#GET, Get a VNF info
@app.route('/todo/api/v1.0/vnfs/<int:vnf_id>', methods=['GET'])
def get_vnf(vnf_id):
vnf = filter(lambda t: t['vnf_id'] == vnf_id, vnfs)
if len(vnf) == 0:
abort(404)
return jsonify({'vnf': vnf[0]}), 200
#POST, Create a VNF
@app.route('/todo/api/v1.0/vnfs', methods=['POST'])
def create_vnf():
if not request.json or not 'vnf_name' in request.json:
abort(400)
vnf = {
'vnf_id': vnfs[-1]['vnf_id'] + 1,
'vnf_name': request.json['vnf_name'],
'description': request.json.get('description', "")
}
vnfs.append(vnf)
return jsonify({'vnf': vnf}), 201
#PUT, update a VNF infor
@app.route('/todo/api/v1.0/vnfs/<int:vnf_id>', methods=['PUT'])
def update_vnf(vnf_id):
vnf = filter(lambda t: t['vnf_id'] == vnf_id, vnfs)
if len(vnf) == 0:
abort(404)
if not request.json:
abort(400)
if 'vnf_name' in request.json and type(request.json['vnf_name']) != unicode:
abort(400)
if 'description' in request.json and type(request.json['description']) is not unicode:
abort(400)
vnf[0]['vnf_name'] = request.json.get('vnf_name', vnf[0]['vnf_name'])
vnf[0]['description'] = request.json.get('description', vnf[0]['description'])
return jsonify({'vnf': vnf[0]})
#DELETE, del a VNF
@app.route('/todo/api/v1.0/vnfs/<int:vnf_id>', methods=['DELETE'])
def delete_vnf(vnf_id):
vnf = filter(lambda t: t['vnf_id'] == vnf_id, vnfs)
if len(vnf) == 0:
abort(404)
vnfs.remove(vnf[0])
return jsonify({'result': True}), 204
if __name__== '__main__':
app.run(host='0.0.0.0',debug=True)
|
deadc0de6/cidr | setup.py | Python | gpl-3.0 | 337 | 0 | #!/usr/bin/env python
from distutils.core import setup
setup(
name='cidr',
version='0.1',
description='CIDR ranges helper',
license='GPLv3',
author='deadc0de6',
url='https://github.com/deadc0de6/cidr',
py_modules=['cidr'],
scripts=['cidr.py'],
install_requires=['docopt', 'netaddr', ' | ipaddress'],
)
| |
romeotestuser/lc5_form_customizations | sale_order.py | Python | gpl-2.0 | 28,437 | 0.008088 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp import netsvc
class lc5_sale_order_warranty(osv.osv):
_name = 'lc5.sale.order.warranty'
_description = "Warranty"
_columns = {
'name':fields.char('Warranty', size=64),
}
lc5_sale_order_warranty()
class lc5_sale_order_dlt(osv.osv):
_name = 'lc5.sale.order.dlt'
_description = "Delivery Lead Time"
_columns = {
'name':fields.char('Lead Time(Days)', size=64),
}
lc5_sale_order_dlt()
class lc5_sale_order_pm(osv.osv):
_name = 'lc5.sale.order.pm'
_description = "Payment Method"
_columns = { |
'name':fields.char('Payment Method', size=64),
}
lc5_ | sale_order_pm()
class lc5_sale_order_top(osv.osv):
_name = 'lc5.sale.order.top'
_description = "Terms of Payment"
_columns = {
'name':fields.char('Terms of Payment', size=64),
}
lc5_sale_order_top()
class lc5_sale_order_sig(osv.osv):
_name = 'lc5.sale.order.signature'
_description = "Message Signatures"
_columns = {
'name':fields.char('Signature', size=64),
}
lc5_sale_order_sig()
class lc5_stock_move(osv.osv):
_inherit="stock.move"
_columns={
'parent_id':fields.many2one('stock.move', 'Parent', required=False),
'child_ids':fields.one2many('stock.move','parent_id','Childs',required=False)
# 'line_number':fields.char('Line number',size=64),
}
def create(self, cr, uid, data, context=None):
result = super(lc5_stock_move, self).create(cr, uid, data, context=context)
#check for product supply method if bundled
move_obj = self.browse(cr,uid,result)
product_supply_method = move_obj.product_id.supply_method
if product_supply_method == 'bundle':
#loop for product components
for item in move_obj.product_id.item_ids:
# if context and 'mode' in context and context['mode']=='main bundle included':
# continue
temp_val = data.copy()
temp_val['product_id']=item.item_id.id
temp_val['product_qty']=data['product_qty']*item.qty_uom
temp_val['parent_id']=result
self.create(cr,uid,temp_val,context=context)
return result
def fetch_all_child(self,cr,uid,parent_id,result=None):
if not result:
result = []
temp_result = self.search(cr,uid,[('parent_id','=',parent_id)])
result.extend(temp_result)
for x in temp_result:
result.extend(self.fetch_all_child(cr, uid, parent_id, result))
return result
# def write(self, cr, uid, ids, data, context=None):
# result = super(stock_move, self).write(cr, uid, ids, data, context=context)
# #if product_id is in data
# #check for product bundle supply method
# if not isinstance(ids,list):
# ids=[ids]
# for obj in self.browse(cr,uid,ids):
# product_supply_method = obj.product_id.supply_method
# #delete all stock move childs
# #fetch all child
# target_ids = self.fetch_all_child(cr,uid,obj.id)
# self.unlink(cr,uid,target_ids)
# if product_supply_method == 'bundle':
# target_fields = ['product_uos_qty', 'date_expected', 'product_uom', 'product_uos', 'prodlot_id', 'product_qty', 'date', 'partner_id', 'product_id', 'name', 'location_id', 'parent_id', 'location_dest_id', 'tracking_id', 'product_packaging', 'type', 'picking_id']
# val = self.read(cr,uid,obj.id,target_fields)
# for item in obj.product_id.item_ids:
# temp_val = val.copy()
# temp_val['product_id']=item.item_id.id
# temp_val['product_qty']=val['product_qty']*item.qty_uom
# temp_val['parent_id']=result
# self.create(cr,uid,temp_val,context=context)
# return result
def unlink(self, cr, uid, ids, context=None):
temp_ids=ids
if not isinstance(temp_ids,list):
temp_ids = [temp_ids]
#remove child ids
for temp_id in temp_ids:
child_ids=self.fetch_all_child(cr, uid, temp_id, result)
self.unlink(cr,uid,child_ids,context=context)
return super(stock_move, self).unlink(cr, uid, ids, context=context)
class lc5_sale_order(osv.osv):
_inherit = 'sale.order'
_name="sale.order"
_columns = {
'project':fields.char('Project', size=64, required=False),
#'purchase_order_reference':fields.char('PO Ref.', size=64, required=False),
'warranty_id':fields.many2one('lc5.sale.order.warranty','Warranty', required=False),
'delivery_lead_time_id':fields.many2one('lc5.sale.order.dlt','Delivery Lead Time', required=False),
'payment_method_id':fields.many2one('lc5.sale.order.pm','Payment Method', required=False),
'terms_of_payment_id':fields.many2one('lc5.sale.order.top','Terms of Payment', required=False),
'intro':fields.text('Introduction', placeholder="Type your introduction..", required=False),
'closing':fields.text('Closing Message', placeholder="Type your closing message..", required=False),
'signature':fields.many2one('lc5.sale.order.signature','Signature', required=False),
'prepared_by_id':fields.many2one('res.users', 'Prepared By', required=False),
'verified_by_id':fields.many2one('res.users', 'Verified By', required=False),
'approved_by_id':fields.many2one('res.users', 'Approved By', required=False),
}
def _create_pickings_and_procurements(self, cr, uid, order, order_lines, picking_id=False, context=None):
print "here".upper()
"""Create the required procurements to supply sales order lines, also connecting
the procurements to appropriate stock moves in order to bring the goods to the
sales order's requested location.
If ``picking_id`` is provided, the stock moves will be added to it, otherwise
a standard outgoing picking will be created to wrap the stock moves, as returned
by :meth:`~._prepare_order_picking`.
Modules that wish to customize the procurements or partition the stock moves over
multiple stock pickings may override this method and call ``super()`` with
different subsets of ``order_lines`` and/or preset ``picking_id`` values.
:param browse_record order: sales order to which the order lines belong
:param list(browse_record) order_lines: sales order line records to procure
:param int picking_id: optional ID of a stock picking to which the created stock moves
will be added. A new picking will be created if ommitted.
:return: True
"""
move_obj = self.pool.get('stock.move')
picking_obj = self.pool.get('stock.picking')
procurement_obj = self.pool.get('procurement.order')
proc_ids = []
location_id = order.shop_id.warehouse_id.lot_stock_id.id
outpu |
treehopper-electronics/treehopper-sdk | Python/treehopper/api/__init__.py | Python | mit | 976 | 0.003074 | """Base Treehopper API for Python.
This module provides digital and analog I/O, hardware and software PWM, I2C, SPI, and UART support.
"""
## @namespace treehopper.api
from treehopper.api.interfaces import *
from treehopper.api.pin import Pin, PinMode, ReferenceLevel
from treehopper.api.device_commands import DeviceCommands
from treehopper.api.pwm import HardwarePwm, HardwarePwmFrequency
from treehopper.api.treehopper_usb import TreehopperUsb
from treehopper.api.find_boards import find_boards
from treehopper.api.i2c import I2C
from treehopper.api.spi import HardwareSpi
from treehopper.api.i2c import HardwareI2C
from treehopper.api.uart import HardwareUart
__all__ = ['find_boards', 'TreehopperUsb',
'Pin', 'PinMode', 'ReferenceLevel',
'HardwareSpi', 'Spi', 'ChipSelectMode', 'SpiMode', 'SpiBurstMode',
'HardwareI2C', 'I2C',
'HardwareUart','Uart', 'One | Wire',
| 'HardwarePwm', 'HardwarePwmFrequency',
]
|
strycore/megascops | video/tasks.py | Python | agpl-3.0 | 1,094 | 0 | # -*- coding: utf8 -*-
from __future__ import absolute_import
import os
import logging
from celery import task, current_task
from django.conf import settings
from .models import Video
from .quvi import Quvi
from .utils import download_thumbnail, celery_download, encode_videos
LOGGER = logging.getLogger(__name__)
@task
def fetch_video(quvi_dump, video_id):
""" Task to download the video.
The video parameter is an object of type Video, as defined in
megascops.models.Video
"""
quvi = Quvi(dump=quvi_dump)
video = Video.objects.get(pk=video_id)
if quvi.thumbnail_url:
video.thumbnail = download_thumbnail(quvi.thumbnail_url)
dest_path = os.path.join(settings.ME | DIA_ROOT, video.path)
celery_download(quvi.stream.url, dest_path, current_task)
video.state = "READY"
video.save()
@task
def encode_task(video_id):
"""Encode video using avconv"""
try:
video = Video.objects.get(pk=video_id)
except Video.DoesNotExist:
print "The requested video does not exist"
return
encode_videos | (video)
|
openstack/cinder | cinder/volume/drivers/huawei/huawei_driver.py | Python | apache-2.0 | 23,610 | 0 | # Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from oslo_log import log as logging
from oslo_utils import strutils
from cinder import coordination
from cinder import exception
from cinder.i18n import _
from cinder import interface
from cinder.volume import driver
from cinder.volume.drivers.huawei import common
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
@interface.volumedriver
class HuaweiISCSIDriver(common.HuaweiBaseDriver, driver.ISCSIDriver):
"""ISCSI driver for Huawei storage arrays.
Version history:
.. code-block:: none
1.0.0 - Initial driver
1.1.0 - Provide Huawei OceanStor storage 18000 driver
1.1.1 - Code refactor
CHAP support
Multiple pools support
ISCSI multipath support
SmartX support
Volume migration support
Volume retype support
2.0.0 - Rename to HuaweiISCSIDriver
2.0.1 - Manage/unmanage volume support
2.0.2 - Refactor HuaweiISCSIDriver
2.0.3 - Manage/unmanage snapshot support
2.0.5 - Replication V2 support
2.0.6 - Support iSCSI configuration in Replication
2.0.7 - Hypermetro support
Hypermetro consistency group support
Consistency group support
Cgsnapshot support
2.0.8 - Backup snapshot optimal path support
2.0.9 - Support reporting disk type of pool
"""
VERSION = "2.0.9"
def __init__(self, *args, **kwargs):
super(HuaweiISCSIDriver, self).__init__(*args, **kwargs)
def get_volume_stats(self, refresh=False):
"""Get volume status."""
data = self._get_volume_stats(refresh=False)
data['storage_protocol'] = 'iSCSI'
data['driver_version'] = self.VERSION
return data
@coordination.synchronized('huawei-mapping-{connector[host]}')
def initialize_connection(self, volume, connector):
"""Map a volume to a host and return target iSCSI information."""
lun_id, lun_type = self.get_lun_id_and_type(volume)
initiator_name = connector['initiator']
LOG.info(
'initiator name: %(initiator_name)s, '
'LUN ID: %(lun_id)s.',
{'initiator_name': initiator_name,
'lun_id': lun_id})
(iscsi_iqns,
target_ips,
portgroup_id) = self.client.get_iscsi_params(connector)
LOG.info('initialize_connection, iscsi_iqn: %(iscsi_iqn)s, '
'target_ip: %(target_ip)s, '
'portgroup_id: %(portgroup_id)s.',
{'iscsi_iqn': iscsi_iqns,
'target_ip': target_ips,
'portgroup_id': portgroup_id},)
# Create hostgroup if not exist.
host_id = self.client.add_host_with_check(connector['host'])
# Add initiator to the host.
self.client.ensure_initiator_added(initiator_name,
host_id)
hostgroup_id = self.client.add_host_to_hostgroup(host_id)
# Mapping lungroup and hostgroup to view.
self.client.do_mapping(lun_id, hostgroup_id,
host_id, portgroup_id,
lun_type)
hostlun_id = self.client.get_host_lun_id(host_id, lun_id,
lun_type)
LOG.info("initialize_connection, host lun id is: %s.",
hostlun_id)
chapinfo = self.client.find_chap_info(self.client.iscsi_info,
initiator_name)
# Return iSCSI properties.
properties = {}
properties['target_discovered'] = False
properties['volume_id'] = volume.id
multipath = connector.get('multipath', False)
hostlun_id = int(hostlun_id)
if not multipath:
properties['target_portal'] = ('%s:3260' % target_ips[0])
properties['target_iqn'] = iscsi_iqns[0]
properties['target_lun'] = hostlun_id
else:
properties['target_iqns'] = [iqn for iqn in iscsi_iqns]
properties['target_portals'] = [
'%s:3260' % ip for ip in target_ips]
properties['target_luns'] = [hostlun_id] * len(target_ips)
# If use CHAP, return CHAP info.
if chapinfo:
chap_username, chap_password = chapinfo.split(';')
properties['auth_method'] = 'CHAP'
properties['auth_username'] = chap_username
properties['auth_password'] = chap_password
LOG.info("initialize_connection success. Return data: %s.",
strutils.mask_password(properties))
return {'driver_volume_type': 'iscsi', 'data': properties}
@coordination.synchronized('huawei-mapping-{connector[host]}')
def terminate_connection(self, volume, connector, **kwargs):
"""Delete map between a volume and a host."""
lun_id, lun_type = self.get_lun_id_and_type(volume)
initiator_name = connector['initiator']
host_name = connector['host']
lungroup_id = None
LOG.info(
'terminate_connection: initiator name: %(ini)s, '
'LUN ID: %(lunid)s.',
{'ini': initiator_name,
'lunid': lun_id},)
portgroup = None
portgroup_id = None
view_id = None
left_lunnum = -1
ini = self.client.iscsi_info['initiators'].get(initiator_name)
if ini and ini.get('TargetPortGroup'):
portgroup = ini['TargetPortGroup']
if portgroup:
portgroup_id = self.client.get_tgt_port_group(portgroup)
host_id = huawei_utils.get_host_id(self.client, host_name)
if host_id:
mapping_view_name = constants.MAPPING_VIEW_PREFIX + host_id
view_id = self.client.find_mapping_view(mapping_view_name)
if view_id:
lungroup_id = self.client.find_lungroup_from_map(view_id)
# Remove lun from lungroup.
if lun_id and lungroup_id:
lungroup_ids = self.client.get_lungroupids_by_lunid(
lun_id, lun_type)
if lungroup_id in lungroup_ids:
self.client.remove_lun_from_lungroup(lungroup_id,
lun_id,
lun_type)
else:
LOG.warning("LUN is not in lungroup. "
"LUN ID: %(lun_id)s. "
"Lungroup id: %(lungroup_id)s.",
{"lun_id": lun_id,
"lungroup_id": lungroup_id})
# Remove portgroup from mapping view if no lun left in lungroup.
if lungroup_id:
left_lunnum = self.client.get_obj_count_from_lungroup(lungroup_id)
if portgroup_id and view_id and (int(left_lunnum) <= 0):
if self.client.is_portgroup_associated_to_view | (view_id,
portgroup_id):
self.client.delete_portgroup_mapping_view(view_id,
portgroup_id)
if view_id and | (int(left_lunnum) <= 0):
self.client.remove_chap(initiator_n |
randomdude999/mcinfo | mcinfo/recipes.py | Python | mit | 5,016 | 0.000199 | from __future__ import unicode_literals
class SmeltingRecipe:
def __init__(self, data):
self.input = data["input"]
def __str__(self):
return "Smelt {0}".format(self.input)
class BrewingRecipe:
def __init__(self, data):
self.base = data["base"]
self.modifier = data["modifier"]
def __str__(self):
return "Brew {0} into {1}".format(self.modifier, self.base)
class TradingRecipe:
def __init__(self, data):
self.price_min = data["price_min"]
self.price_max = data["price_max"]
self.single_price = (self.price_min == 1 and self.price_max == 1)
try:
self.secondary_item = data["secondary_item"]
self.secondary_count = data["secondary_count"]
self.has_secondary_item = True
except KeyError:
self.secondary_item = None
self.secondary_count = 0
self.has_secondary_item = False
self.out_count_min = data["out_count_min"]
self.out_count_max = data["out_count_max"]
self.single_out = (self.out_count_min == 1 and self.out_count_max == 1)
self.career = data["career"]
def __str__(self):
out = "Can be bought from {0} for ".format(self.career.capitalize())
if self.single_price:
out += "1 Emerald"
else:
out += "{0}-{1} Emeralds | ".format(self.price_min, self.price_max)
if self.has_secondary_item:
out += " and {0} {1}.".format(self.secondary_count,
| self.secondary_item)
else:
out += "."
if not self.single_out:
out += " Yields {0}-{1}.".format(self.out_count_min,
self.out_count_max)
return out
class ChestLootRecipe:
def __init__(self, data):
self.location = data["location"]
self.chance = data["chance"]
def __str__(self):
return "Can be found from {0} with {1}% chance.".format(
self.location, self.chance)
class MobDropRecipe:
def __init__(self, data):
self.drop_type = data["drop_type"]
self.source = data["source"]
if self.drop_type == "common":
self.min_count = data["min_count"]
self.max_count = data["max_count"]
self.drops_single = (self.min_count == 1 and self.max_count == 1)
elif self.drop_type == "rare":
self.drop_chance = data["drop_chance"]
if "extra_conditions" in data:
self.extra_conditions = data["extra_conditions"]
else:
self.extra_conditions = ""
def __str__(self):
if self.drop_type == "common":
if self.drops_single:
out = "{0} drops 1 on death".format(self.source)
else:
out = "{0} drops {1}-{2} on death".format(
self.source, self.min_count, self.max_count)
else:
out = "{0} has {1}% chance to drop on death".format(
self.source, self.drop_chance)
if self.extra_conditions:
out += ' ' + self.extra_conditions
out += '.'
return out
class CraftingRecipe:
def __init__(self, data):
if data["is_shaped"]:
self.item_map = data["item_map"]
self.grid = data["grid"]
self.grid_size = len(self.grid)
else:
self.items = data["items"]
self.is_shaped = data["is_shaped"]
def pretty_print_crafting(self):
out = ("+---" * self.grid_size) + "+\n"
for line in self.grid:
out += "|"
for col in line:
out += " {0} |".format(" " if col == "" else col)
out += "\n" + ("+---" * self.grid_size) + "+\n"
return out
def __str__(self):
if self.is_shaped:
out = self.pretty_print_crafting()
for k, v in sorted(self.item_map.items()):
out += "{0}: {1}\n".format(k, v)
else:
out = "Shapeless"
for x in sorted(self.items):
out += "\n* " + x
return out
type_map = {
"smelting": SmeltingRecipe,
"brewing": BrewingRecipe,
"trading": TradingRecipe,
"chest_loot": ChestLootRecipe,
"mob_loot": MobDropRecipe,
"crafting": CraftingRecipe
}
class Recipe:
def __init__(self, data):
if data["type"] not in type_map:
raise ValueError("No such crafting method")
self.method = data["type"]
self.recipe = type_map[self.method](data)
def __str__(self):
return str(self.recipe)
class RecipeCollection:
def __init__(self, data):
self.recipes = []
for x in data:
self.recipes.append(Recipe(x))
def __str__(self):
out = ""
for x in self.recipes:
out += "{0} recipe: \n{1}\n".format(x.method.capitalize(),
x.recipe)
return out
|
ThomasHabets/python-pyhsm | Tests/test_yubikey_validate.py | Python | bsd-2-clause | 5,120 | 0.007227 | # Copyright (c) 2011, Yubico AB
# All rights reserved.
import os
import sys
import string
import struct
import unittest
import pyhsm
from Crypto.Cipher import AES
from pyhsm.yubikey import modhex_encode, modhex_decode
import test_common
class YubiKeyEmu():
"""
Emulate the internal memory of a YubiKey.
"""
def __init__(self, user_id, use_ctr, timestamp, session_ctr):
if len(user_id) != pyhsm.defines.UID_SIZE:
raise pyhsm.exception.YHSM_WrongInputSize(
'user_id', pyhsm.defines.UID_SIZE, len(user_id))
self.user_id = user_id
self.use_ctr = use_ctr
self.timestamp = timestamp
self.session_ctr = session_ctr
self.rnd = struct.unpack('H', os.urandom(2))[0]
def pack(self):
"""
Return contents packed. Only add AES ECB encryption and modhex to
get your own YubiKey OTP.
"""
#define UID_SIZE 6
#typedef struct {
# uint8_t userId[UID_SIZE];
# uint16_t sessionCtr; # NOTE: this is use_ctr
# uint24_t timestamp;
# uint8_t sessionUse; # NOTE: this is session_ctr
# uint16_t rnd;
# uint16_t crc;
#} TICKET;
fmt = "< %is H HB B H" % (pyhsm.defines.UID_SIZE)
ts_high = (self.timestamp & 0x00ff0000) >> 16
ts_low = self.timestamp & 0x0000ffff
res = struct.pack(fmt, self.user_id, \
self.use_ctr, \
ts_low, ts_high, \
self.session_ctr, \
self.rnd)
crc = 0xffff - crc16(res)
return res + struct.pack('<H', crc)
def get_otp(self, key):
"""
Return an modhex encoded OTP given our current state.
"""
packed = self.pack()
obj = AES.new(key, AES.MODE_ECB)
ciphertext = obj.encrypt(packed)
return ciphertext
def from_key(self, public_id, key):
"""
Return what the YubiKey would have returned when the button was pressed.
"""
otp = self.get_otp(key)
from_key = modhex_encode(public_id.encode('hex')) + modhex_encode(otp.encode('hex'))
return from_key
class YubiKeyRnd(YubiKeyEmu):
""" YubiKeyEmu with everything but user_id randomized. """
def __init__(self, user_id):
timestamp, session_counter, session_use = struct.unpack('IHB', os.urandom(7))
YubiKeyEmu.__init__(self, user_id, sessio | n_counter, timestamp, session_use)
def crc16(data):
"""
Calculate an ISO13239 CRC checksum of the input buffer.
"""
m_crc = 0xffff
for this in data:
m_crc ^= ord(this)
for _ in range(8):
j = m_crc & 1
m_crc >>= 1
if j:
m_crc ^= 0x8408
return m_crc
class TestYubikeyValidate(test_common.YHSM_TestCase):
def setUp(self):
test_common.YHSM_TestCas | e.setUp(self)
self.yk_key = 'F' * 16 # 128 bit AES key
self.yk_uid = '\x4d\x01\x4d\x02\x4d\x4d'
self.yk_rnd = YubiKeyRnd(self.yk_uid)
self.yk_public_id = '4d4d4d4d4d4d'.decode('hex')
secret = pyhsm.aead_cmd.YHSM_YubiKeySecret(self.yk_key, self.yk_uid)
self.hsm.load_secret(secret)
#self.kh_generate = 0x06 # key handle 0x9 is allowed to generate aeads
#self.kh_validate = 0x1000 # key handle 0x1000 is allowed to validate aeads and have the same key as 0x9
# YubiHSM includes key handle id in AES-CCM of aeads, so we must use same
# key to generate and validate. Key 0x2000 has all flags.
self.kh_generate = 0x2000
self.kh_validate = 0x2000
self.aead = self.hsm.generate_aead(self.yk_public_id, self.kh_generate)
def test_validate_aead_cmp(self):
""" Test that the AEAD generated contains our secrets. """
secret = pyhsm.aead_cmd.YHSM_YubiKeySecret(self.yk_key, self.yk_uid)
cleartext = secret.pack()
self.assertTrue(self.hsm.validate_aead(self.yk_public_id, self.kh_validate, self.aead, cleartext))
wrong_cleartext = 'X' + cleartext[1:]
self.assertFalse(self.hsm.validate_aead(self.yk_public_id, self.kh_validate, self.aead, wrong_cleartext))
def test_validate_yubikey(self):
""" Test validate YubiKey OTP. """
from_key = self.yk_rnd.from_key(self.yk_public_id, self.yk_key)
self.assertTrue(pyhsm.yubikey.validate_yubikey_with_aead( \
self.hsm, from_key, self.aead.data, self.kh_validate))
def test_modhex_encode_decode(self):
""" Test modhex encoding/decoding. """
h = '4d014d024d4ddd5382b11195144da07d'
self.assertEquals(h, modhex_decode( modhex_encode(h) ) )
def test_split_id_otp(self):
""" Test public_id + OTP split function. """
public_id, otp, = pyhsm.yubikey.split_id_otp("ft" * 16)
self.assertEqual(public_id, '')
self.assertEqual(otp, "ft" * 16)
public_id, otp, = pyhsm.yubikey.split_id_otp("cc" + "ft" * 16)
self.assertEqual(public_id, 'cc')
self.assertEqual(otp, "ft" * 16)
|
manassolanki/erpnext | erpnext/healthcare/doctype/physician_service_unit_schedule/physician_service_unit_schedule.py | Python | gpl-3.0 | 280 | 0.007143 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, ple | ase see license.txt
from __f | uture__ import unicode_literals
from frappe.model.document import Document
class PhysicianServiceUnitSchedule(Document):
pass
|
grafke/Drone-workflow-controller | drone/job_runner/job_runner.py | Python | apache-2.0 | 3,220 | 0.00559 | import sys
from drone.actions.emr_launcher import launch_emr_task
from drone.actions.ssh_launcher import launch_ssh_task
from drone.job_runner.dependency_manager import dependencies_are_met
from drone.job_runner.job_progress_checker import check_running_job_progress
from drone.metadata.metadata import get_job_info, job_status, set_ready, set_running, set_failed
task_launcher = {'ssh': launch_ssh_task,
'emr': launch_emr_task}
def process(job_config, settings):
for job_id, schedule_time, execution_time, status, runs, uid in get_job_info(job_config.get('id'),
db_name=settings.metadata):
if status == job_status.get('failed'):
if (int(job_config.get('retry')) if job_config.get('retry') else 0) > int(runs):
settings.logger.debug(
'%s runs %s. set retries %s.' % (job_config.get('id'), runs, job_config.get('retry')))
if dependencies_are_met(job_config, schedule_time, settings):
set_ready(job_config.get('id'), schedule_time, db_name=settings.metadata)
settings.logger.info('Job "%s" "%s" set as ready' % (job_config.get('id'), schedule_time))
run(job_config, schedule_time, settings)
continue
else:
continue
else:
continue
elif status == job_status.get('running'):
check_running_job_progress(job_config, schedule_time, uid, settings)
continue
elif status == job_status.get('ready'):
run(job_ | config, schedule_time, s | ettings)
elif status == job_status.get('succeeded'):
continue
elif status == job_status.get('not_ready'):
if dependencies_are_met(job_config, schedule_time, settings):
set_ready(job_config.get('id'), schedule_time, db_name=settings.metadata)
settings.logger.info('Job "%s" "%s" set as ready' % (job_config.get('id'), schedule_time))
run(job_config, schedule_time, settings)
else:
continue
else:
settings.logger.error('Unknown job status "%s"' % status)
sys.exit(1)
def run(job_config, schedule_time, settings):
settings.logger.info('Starting job "%s" "%s"' % (job_config.get('id'), schedule_time))
job_type = job_config.get('type')
try:
assert job_type in settings.supported_job_types
except:
settings.logger.warning(
'Unsupported job type %s. Valid types are %s' % (job_type, str(settings.supported_job_types)))
task_lauched_successfully, uid = task_launcher.get(job_type)(job_config, schedule_time, settings)
if task_lauched_successfully:
set_running(job_config.get('id'), schedule_time, uid, db_name=settings.metadata)
settings.logger.info('Started job "%s" "%s"' % (job_config.get('id'), schedule_time))
else:
set_failed(job_config.get('id'), schedule_time, db_name=settings.metadata)
settings.logger.warning('Failed to start job "%s" "%s"' % (job_config.get('id'), schedule_time))
|
ndp-systemes/odoo-addons | stock_no_recompute/__openerp__.py | Python | agpl-3.0 | 1,399 | 0 | # -*- coding: utf8 -*-
#
# Copyr | ight (C) 2017 NDP Systèmes (<http://www.ndp-systemes.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in | the hope that it will be useful,
#
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
{
'name': 'Stock No Recompute Check',
'version': '0.1',
'author': 'NDP Systèmes',
'maintainer': 'NDP Systèmes',
'category': 'Warehouse',
'depends': ['stock'],
'description': """
Stock No Recompute Check
========================
This module removes the use of the recompute_pack_op field on stock pickings
so that parallel processing of stock moves can be performed without locking.
""",
'website': 'http://www.ndp-systemes.fr',
'data': [],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'license': 'AGPL-3',
'application': False,
}
|
Naeka/vosae-app | www/contacts/models/embedded/address.py | Python | agpl-3.0 | 2,877 | 0.000695 | # -*- coding:Utf-8 -*-
from django.utils.translation import pgettext_lazy
from mongoengine import EmbeddedDocument, fields
__all__ = (
'Address',
)
class Address(EmbeddedDocument):
"""An address wrapper which can be embedded in any object."""
TYPES = (
("HOME", pgettext_lazy("address type", "Home")),
("WORK", pgettext_lazy("address type", "Work")),
("DELIVERY", pgettext_lazy("address type", "Delivery")),
("BILLING", pgettext_lazy("address type", "Billing")),
("OTHER", pgettext_lazy("address type", "Other"))
)
label = fields.StringField(max_length=64)
type = fields.StringField(choices=TYPES)
postoffice_box = fields.StringField(max_length=64)
street_address = fields.StringField(required=True, max_length=128)
extended_address = fields.StringField(max_length=128)
postal_code = fields.StringField(max_length=16)
city = fields.StringField(max_length=64)
state = fields.StringField(max_length=64)
country = fields.StringField(max_length=64)
geo_point = fields.GeoPointField()
def __eq__(self, other):
"""Equal comparison should only be based on fields values"""
if isinstance(other, Address) and \
self.label == other.label and \
self.type == other.type and \
self.postoffice_box == other.postoffice_box and \
self.street_address == other.street_address and \
self.extended_address == other.extended_address and \
self.postal_code == other.postal_code and \
self.city == other.city and \
self.state == other.state and \
self.country == other.country and \
self.geo_point == other.geo_point:
return True
else:
return False
@staticmethod
def concat_fields(field1, field2):
"""
Method used in the :func:`~contacts.models.Address.get_formatted` method
to concatenate fields like state and country.
"""
if not field1 and not field2:
return None
if field1 and not field2:
return field1
if not field1 and field2:
return field2
if field1 and field2:
return ", ".join([field1, field2])
def get_formatted(self):
"""
Returns a concatenated list of :class:`~contacts.models.Address` attributes:
- Street addres | s
- Extended address, post office box
- Postal code, City
- State, Country
"""
ret = [
self.street_address,
Address.concat_fields(self.extended_address, self.postoffice_box),
Address.concat_fields(self.postal_code, self.city),
Address.concat_fields(self.state, self.country)
]
# Returns only non-blank lines
return [line for line i | n ret if line]
|
sharvanath/cassandra | bin/cqlsh.py | Python | apache-2.0 | 104,917 | 0.002678 | #!/bin/sh
# -*- mode: Python -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""":"
# bash code here; finds a suitable python interpreter and execs this file.
# prefer unqualified "python" if suitable:
python -c 'import sys; sys.exit(not (0x020700b0 < sys.hexversion < 0x03000000))' 2>/dev/null \
&& exec python "$0" "$@"
for pyver in 2.7; do
which python$pyver > /dev/null 2>&1 && exec python$pyver "$0" "$@"
done
echo "No appropriate python interpreter found." >&2
exit 1
":"""
from __future__ import with_statement
import cmd
import codecs
import ConfigParser
import csv
import getpass
import optparse
import os
import platform
import sys
import traceback
import warnings
import webbrowser
from StringIO import StringIO
from contextlib import contextmanager
from glob import glob
from uuid import UUID
if sys.version_info[0] != 2 or sys.version_info[1] != 7:
sys.exit("\nCQL Shell supports only Python 2.7\n")
# see CASSANDRA-10428
if platform.python_implementation().startswith('Jython'):
sys.exit("\nCQL Shell does not run on Jython\n")
UTF8 = 'utf-8'
CP65001 = 'cp65001' # Win utf-8 variant
description = "CQL Shell for Apache Cassandra"
version = "5.0.1"
readline = None
try:
# check if tty first, cause readline doesn't check, and only cares
# about $TERM. we don't want the funky escape code stuff to be
# output if not a tty.
if sys.stdin.isatty():
import readline
except ImportError:
pass
CQL_LIB_PREFIX = 'cassandra-driver-internal-only-'
CASSANDRA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
CASSANDRA_CQL_HTML_FALLBACK = 'https://cassandra.apache.org/doc/cql3/CQL-3.2.html'
# default location of local CQL.html
if os.path.exists(CASSANDRA_PATH + '/doc/cql3/CQL.html'):
# default location of local CQL.html
CASSANDRA_CQL_HTML = 'file://' + CASSANDRA_PATH + '/doc/cql3/CQL.html'
elif os.path.exists('/usr/share/doc/cassandra/CQL.html'):
# fallback to package file
CASSANDRA_CQL_HTML = 'file:///usr/share/doc/cassandra/CQL.html'
else:
# fallback to online version
CASSANDRA_CQL_HTML = CASSANDRA_CQL_HTML_FALLBACK
# On Linux, the Python webbrowser module uses the 'xdg-open' executable
# to open a file/URL. But that only works, if the current session has been
# opened from _within_ a desktop environment. I.e. 'xdg-open' will fail,
# if the session's been opened via ssh to a remote box.
#
# Use 'python' to get some information about the detected browsers.
# >>> import webbrowser
# >>> webbrowser._tryorder
# >>> webbrowser._browser
#
if len(webbrowser._tryorder) == 0:
CASSANDRA_CQL_HTML = CASSANDRA_CQL_HTML_FALLBACK
elif webbrowser._tryorder[0] == 'xdg-open' and os.environ.get('XDG_DATA_DIRS', '') == '':
# only on Linux (some OS with xdg-open)
webbrowser._tryorder.remove('xdg-open')
webbrowser._tryorder.append('xdg-open')
# use bundled libs for python-cql and thrift, if available. if there
# is a ../lib dir, use bundled libs there preferentially.
ZIPLIB_DIRS = [os.path.join(CASSANDRA_PATH, 'lib')]
myplatform = platform.system()
is_win = myplatform == 'Windows'
# Workaround for supporting CP65001 encoding on python < 3.3 (https://bugs.python.org/issue13216)
if is_win and sys.version_info < (3, 3):
codecs.register(lambda name: codecs.lookup(UTF8) if name == CP65001 else None)
if myplatform == 'Linux':
ZIPLIB_DIRS.append('/usr/share/cassandra/lib')
if os.environ.get('CQLSH_NO_BUNDLED', ''):
ZIPLIB_DIRS = ()
def find_zip(libprefix):
for ziplibdir in ZIPLIB_DIRS:
zips = glob(os.path.join(ziplibdir, libprefix + '*.zip'))
if zips:
return max(zips) # probably the highest version, if multiple
cql_zip = find_zip(CQL_LIB_PREFIX)
if cql_zip:
ver = os.path.splitext(os.path.basename(cql_zip))[0][len(CQL_LIB_PREFIX):]
sys.path.insert(0, os.path.join(cql_zip, 'cassandra-driver-' + ver))
third_parties = ('futures-', 'six-')
for lib in third_parties:
lib_zip = find_zip(lib)
if lib_zip:
sys.path.insert(0, lib_zip)
warnings.filterwarnings("ignore", r".*blist.*")
try:
import cassandra
except ImportError, e:
sys.exit("\nPython Cassandra driver not installed, or not on PYTHONPATH.\n"
'You might try "pip install cassandra-driver".\n\n'
'Python: %s\n'
'Module load path: %r\n\n'
'Error: %s\n' % (sys.executable, sys.path, e))
from cassandra.auth import PlainTextAuthProvider
from cassandra.cluster import Cluster
from cassandra.marshal import int64_unpack
from cassandra.metadata import (ColumnMetadata, KeyspaceMetadata,
TableMetadata, protect_name, protect_names)
from cassandra.policies import WhiteListRoundRobinPolicy
from cassandra.query import SimpleStatement, ordered_dict_factory, TraceUnavailable
from cassandra.util import datetime_from_timestamp
# cqlsh should run correctly when run out of a Cassandra source tree,
# out of an unpacked Cassa | ndra tarball, and after a proper package install.
cqlshlibdir = os.path.join(CASSANDRA_PATH, 'pylib')
if os.path.isdir(cqlshlibdir):
sys.path.insert(0, cqlshlibdir)
from cqlshlib import cql3handling, cqlhandling, pylexotron, sslhandling
from cqlshlib.copyutil import ExportTask, ImportTask
from cqlshlib.displaying import (ANSI_RESET, BLUE, COLUMN_NAME_COLORS, CYAN,
| RED, WHITE, FormattedValue, colorme)
from cqlshlib.formatting import (DEFAULT_DATE_FORMAT, DEFAULT_NANOTIME_FORMAT,
DEFAULT_TIMESTAMP_FORMAT, CqlType, DateTimeFormat,
format_by_type, formatter_for)
from cqlshlib.tracing import print_trace, print_trace_session
from cqlshlib.util import get_file_encoding_bomsize, trim_if_present
DEFAULT_HOST = '127.0.0.1'
DEFAULT_PORT = 9042
DEFAULT_SSL = False
DEFAULT_PROTOCOL_VERSION = 4
DEFAULT_CONNECT_TIMEOUT_SECONDS = 5
DEFAULT_REQUEST_TIMEOUT_SECONDS = 10
DEFAULT_FLOAT_PRECISION = 5
DEFAULT_DOUBLE_PRECISION = 5
DEFAULT_MAX_TRACE_WAIT = 10
if readline is not None and readline.__doc__ is not None and 'libedit' in readline.__doc__:
DEFAULT_COMPLETEKEY = '\t'
else:
DEFAULT_COMPLETEKEY = 'tab'
cqldocs = None
cqlruleset = None
epilog = """Connects to %(DEFAULT_HOST)s:%(DEFAULT_PORT)d by default. These
defaults can be changed by setting $CQLSH_HOST and/or $CQLSH_PORT. When a
host (and optional port number) are given on the command line, they take
precedence over any defaults.""" % globals()
parser = optparse.OptionParser(description=description, epilog=epilog,
usage="Usage: %prog [options] [host [port]]",
version='cqlsh ' + version)
parser.add_option("-C", "--color", action='store_true', dest='color',
help='Always use color output')
parser.add_option("--no-color", action='store_false', dest='color',
help='Never use color output')
parser.add_option("--browser", dest='browser', help="""The browser to use to display CQL help, where BROWSER can be:
- one of the supported browsers in https://docs.python.org/2/library/webbrowser.html.
- browser path followed by %s, example: /usr/bin/google-chrome-stable %s""")
parser.add_option('--ssl', action='store_true', help='Use SSL', default=False)
parser.add_option("-u", "--username", |
dchaplinsky/declarations.com.ua | declarations_site/cms_pages/models.py | Python | mit | 7,901 | 0.001194 | # coding: utf-8
from django.db import models
from django.utils.translation import get_language
from modelcluster.fields import ParentalKey
from wagtail.core.fields import RichTextField
from wagtail.core.models import Page, Orderable
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.admin.edit_handlers import InlinePanel, FieldPanel, PageChooserPanel
from catalog.models import Region, Office
from catalog.utils import TranslatedField, orig_translate_url
class StaticPage(Page):
title_en = models.CharField(
verbose_name="[EN] Заголовок",
max_length=255
)
body = RichTextField(verbose_name="[UA] Текст сторінки")
body_en = RichTextField(verbose_name="[EN] Текст сторінки")
template = "cms_pages/static_page.jinja"
ajax_template = "cms_pages/headless_static_page.jinja"
class Meta:
verbose_name = "Статична сторінка"
StaticPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('title_en', classname="full title"),
FieldPanel('body', classname="full"),
FieldPanel('body_en', classname="full"),
]
class RawHTMLPage(Page):
title_en = models.CharField(
verbose_name="[EN] Заголовок",
max_length=255
)
body = models.TextField(verbose_name="[UA] Текст сторінки")
body_en = models.TextField(verbose_name="[EN] Текст сторінки")
template = "cms_pages/static_page.jinja"
ajax_template = "cms_pages/headless_static_page.jinja"
class Meta:
verbose_name = "Raw-HTML сторінка"
RawHTMLPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('title_en', classname="full title"),
FieldPanel('body', classname="full"),
FieldPanel('body_en', classname="full"),
]
class NewsPage(Page):
lead = RichTextField(verbose_name="Лід", blank=True)
body = RichTextField(verbose_name="Текст новини")
date_added = models.DateTimeField(verbose_name="Опубліковано")
sticky = models.BooleanField(verbose_name="Закріпити новину",
default=False)
important = models.BooleanField(verbose_name="Важлива новина",
default=False)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
template = "cms_pages/news_page.jinja"
class Meta:
verbose_name = "Новина"
NewsPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('lead', classname="full"),
FieldPanel('body', classname="full"),
FieldPanel('date_added'),
FieldPanel('sticky'),
FieldPanel('important'),
ImageChooserPanel('image'),
]
class NewsIndexPage(Page):
template = "cms_pages/news_index_page.jinja"
def get_context(self, request, *args, **kwargs):
ctx = super(NewsIndexPage, self).get_context(request, *args, **kwargs)
latest_news = NewsPage.objects.live().order_by(
"-date_added")
ctx["latest_news"] = latest_news
return ctx
class Meta:
verbose_name = "Сторінка новин"
class LinkFields(models.Model):
caption = models.CharField(max_length=255, blank=True)
caption_en = models.CharField(max_length=255, blank=True)
translated_caption = TranslatedField(
'caption',
'caption_en',
)
link_external = models.CharField("External link", blank=True, max_length=255)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
extra_class = models.CharField(max_length=255, blank=True)
@property
def link(self):
if self.link_page:
return self.link_page.url
else:
if "declarations.com.ua" not in self.link_external:
return self.link_external
language = get_language()
return orig_translate_url(self.link_external, language, "uk")
panels = [
FieldPanel('caption'),
FieldPanel('caption_en'),
FieldPanel('link_external'),
FieldPanel('extra_class'),
PageChooserPanel('link_page')
]
class Meta:
abstract = True
class HomePageTopMenuLink(Orderable, LinkFields):
page = ParentalKey('cms_pages.HomePage', related_name='top_menu_links')
class HomePageBottomMenuLink(Orderable, LinkFields):
page = ParentalKey('cms_pages.HomePage', related_name='bottom_menu_links')
class HomePage(Page):
body = RichTextField(verbose_name="Текст сторінки")
news_count = models.IntegerField(
default=6,
verbose_name="Кількість новин на сторінку")
youtube_embed_link = models.CharField("Embed для youtube", max_length=255, blank=True)
youtube_embed_title = models.CharField("Заголовок youtube відео", max_length=255, blank=True)
branding_link = models.CharField("Посилання для переходу по кліку на брендінг", max_length=255, blank=True)
branding_slug = models.CharField("Ідентифікатор рекламної кампанії", max_length=255, blank=True)
branding_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+',
verbose_name="Зображення брендінгу"
)
template = "cms_pages/home.jinja"
class Meta:
verbose_name = "Головна сторінка"
HomePage.content_panels = [
FieldPanel('title', classname="full title"),
# TODO: remove once we'll finally give up with idea of embeded videos
# FieldPanel('youtube_embed_link', classname="title"),
# FieldPanel('youtube_embed_title', classname="title"),
FieldPanel('branding_link', classname="title"),
FieldPanel('branding_slug', classname="title"),
ImageChooserPanel('branding_image'),
FieldPanel('body', classname="full"),
FieldPanel('news_count'),
InlinePanel('top_me | nu_links', label="Меню зверху"),
InlinePanel('bottom_menu_links', label="Меню знизу"),
]
class Met | aData(models.Model):
region = models.ForeignKey(Region, blank=True, null=True, on_delete=models.SET_NULL)
office = models.ForeignKey(Office, blank=True, null=True, on_delete=models.SET_NULL)
title = models.CharField(max_length=255, blank=True)
description = models.TextField(blank=True)
class Meta:
unique_together = ("region", "office")
def __unicode__(self):
chunks = []
if self.region is not None:
chunks.append(self.region.region_name)
if self.office is not None:
chunks.append(self.office.name)
return ": ".join(chunks)
def __str__(self):
return self.__unicode__()
class PersonMeta(models.Model):
fullname = models.CharField("Повне ім'я", max_length=150)
year = models.IntegerField(
"Рік декларації", blank=True, null=True, choices=(
(2011, 2011),
(2012, 2012),
(2013, 2013),
(2014, 2014),
(2015, 2015),))
title = models.CharField(max_length=255, blank=True)
description = models.TextField(blank=True)
class Meta:
unique_together = ("year", "fullname")
def __unicode__(self):
chunks = []
if self.fullname is not None:
chunks.append(self.fullname)
if self.year is not None:
chunks.append(str(self.year))
return ": ".join(chunks)
def __str__(self):
return self.__unicode__()
|
killerbat00/pyclock | vars.py | Python | mit | 809 | 0.003708 | clock = [
" __ __________",
"/ \\____/ __ / /\\______",
"-==- -/ /=/`/ / / .\\- -/\\",
"=- -=-/ _` ` / /## .\\=-/\\\\",
"-- =-/ /_/`/ / / ### /_///\\",
" -= / /_/`/ / /## ##/ /\\/\\",
"___/ /_/`/ / / ### /=/// \\",
"==/ /_/`/ / /##'##/ /\\/",
"_/ _`_`_ / / ###'/-/// ",
"/_______/_/## ##/ /\\/ ",
"\\ ____ \\X\\### /-/// ",
"-\\ \\\\\\\\\\\\X\##///\\/",
"=-\\_______\\X\\////"]
numbers = [
'/\\',
'\\ \\',
'\/',
' | \\',
' \\',
' ',
'/\\',
' / ',
'\\/',
'/\\',
' /\\',
' /',
' \\',
'\\/\\',
' ',
'/ ' | ,
'\\/\\',
' /',
'/ ',
'\\/\\',
'\\/',
'/\\',
' \\',
' ',
'/\\',
'\\/\\',
' \\/',
'/\\',
'\\/\\',
' /'
]
|
forman/dectree | dectree/transpiler.py | Python | mit | 7,950 | 0.001887 | import os
import os.path
from collections import OrderedDict
from typing import List, Dict, Any, Tuple, Union
# noinspection PyPackageRequirements
import yaml # from pyyaml
import dectree.propfuncs as propfuncs
from .codegen import gen_code
from .types import TypeDefs
def transpile(src_file, out_file=None, **options: Dict[str, Any]) -> str:
"""
Generate a decision tree function by transpiling *src_file* to *out_file* using the given *options*.
Return the path to the generated file, if any, otherwise return ``None``.
:param src_file: A file descriptor or a path-like object to the decision tree definition source file (YAML format)
:param out_file: A file descriptor or a path-like object to the module output file (Python)
:param options: options, refer to `dectree --help`
:return: A path to the written module output file (Python) or None if *out_file* is a file descriptor
"""
try:
fd = open(src_file)
src_path = src_file
except TypeError:
fd = src_file
src_path = None
try:
src_code = yaml.load(fd)
finally:
if src_path:
fd.close()
if not src_code:
raise ValueError('Empty decision tree definition')
sections = ('types', 'inputs', 'outputs', 'rules')
if not all([section in src_code for section in sections]):
raise ValueError('Invalid decision tree definition: missing section {} or all of them'.format(sections))
for section in sections:
if not src_code[section]:
raise ValueError("Invalid decision tree definition: section '{}' is empty".format(section))
types = _normalize_types(_to_omap(src_code['types'], recursive=True))
input_defs = _to_omap(src_code['inputs'])
output_defs = _to_omap(src_code['outputs'])
rules = _normalize_rules(src_code['rules'])
src_options = dict(src_code.get('options') or {})
src_options.update(options or {})
py_code = gen_code(types, input_defs, output_defs, rules, **src_options)
if out_file:
try:
fd = open(out_file, 'w')
out_path = out_file
except TypeError:
fd = out_file
out_path = None
else:
assert src_path
dir_name = os.path.dirname(src_path)
base_name = os.path.splitext(os.path.basename(src_path))[0]
out_path = os.path.join(dir_name, base_name + ".py")
fd = open(out_path, mode='w')
fd.write(py_code)
if out_path is not None:
fd.close()
return out_path
def _normalize_types(types: Dict[str, Dict[str, str]]) -> TypeDefs:
type_defs = OrderedDict()
for type_name, type_properties in types.items():
type_def = {}
type_defs[type_name] = type_def
for prop_name, prop_value in type_properties.items():
try:
prop_result = eval(prop_value, vars(propfuncs), {})
except Exception:
raise ValueError('Illegal value for property "{}" of type "{}": {}'.format(prop_name,
type_name,
prop_value))
func_params, func_body = prop_result
type_def[prop_name] = prop_value, func_params, func_body
return type_defs
def _normalize_rules(raw_rules):
return [_normalize_rule(raw_rule) for raw_rule in raw_rules]
def _normalize_rule(raw_rule: Union[str, List]):
if isinstance(raw_rul | e, str):
raw_rule = _load_raw_rule(raw_rule)
return _parse_raw_rule(raw_rule)
def _parse_raw_rule(raw_rule: List[Union[Dict, List]]) -> List[Union[Tuple, List]]:
# print(raw_rule)
n = len(raw_rule)
parsed_rule = []
for i in range(n):
item = raw_rule[i]
stmt_part, stmt_body, assi | gnment = None, None, None
if isinstance(item, dict):
stmt_part, stmt_body = dict(item).popitem()
else:
assignment = item
if stmt_part:
stmt_tokens = stmt_part.split(None, 1)
if len(stmt_tokens) == 0:
raise ValueError('illegal rule part: {}'.format(stmt_part))
keyword = stmt_tokens[0]
if keyword == 'if':
if i != 0:
raise ValueError('"if" must be first in rule: {}'.format(stmt_part))
if len(stmt_tokens) != 2 or not stmt_tokens[1]:
raise ValueError('illegal rule part: {}'.format(stmt_part))
condition = stmt_tokens[1]
elif keyword == 'else':
if len(stmt_tokens) == 1:
if i < n - 2:
raise ValueError('"else" must be last in rule: {}'.format(stmt_part))
condition = None
else:
elif_stmt_tokens = stmt_tokens[1].split(None, 1)
if elif_stmt_tokens[0] == 'if':
keyword, condition = 'elif', elif_stmt_tokens[1]
else:
raise ValueError('illegal rule part: {}'.format(stmt_part))
elif keyword == 'elif':
if len(stmt_tokens) != 2 or not stmt_tokens[1]:
raise ValueError('illegal rule part: {}'.format(stmt_part))
condition = stmt_tokens[1]
else:
raise ValueError('illegal rule part: {}'.format(stmt_part))
if condition:
parsed_rule.append((keyword, condition, _parse_raw_rule(stmt_body)))
else:
parsed_rule.append((keyword, _parse_raw_rule(stmt_body)))
elif assignment:
# noinspection PyUnresolvedReferences
assignment_parts = assignment.split(None, 2)
if len(assignment_parts) != 3 \
or not assignment_parts[0].isidentifier() \
or assignment_parts[1] != '=' \
or not assignment_parts[2]:
raise ValueError('illegal rule part: {}'.format(stmt_part))
parsed_rule.append(('=', assignment_parts[0], assignment_parts[2]))
else:
raise ValueError('illegal rule part: {}'.format(stmt_part))
return parsed_rule
def _load_raw_rule(rule_code: str):
raw_lines = rule_code.split('\n')
yml_lines = []
for raw_line in raw_lines:
i = _count_leading_spaces(raw_line)
indent = raw_line[0:i]
content = raw_line[i:]
if content:
if content[0] != '#':
yml_lines.append(indent + '- ' + content)
else:
yml_lines.append(indent + content)
return yaml.load('\n'.join(yml_lines))
def _count_leading_spaces(s: str):
i = 0
for i in range(len(s)):
if not s[i].isspace():
return i
return i
def _to_omap(list_or_dict, recursive=False):
if not list_or_dict:
return list_or_dict
if _is_list_of_one_key_dicts(list_or_dict):
dict_copy = OrderedDict()
for item in list_or_dict:
key, item = dict(item).popitem()
dict_copy[key] = _to_omap(item) if recursive else item
return dict_copy
if recursive:
if isinstance(list_or_dict, list):
list_copy = []
for item in list_or_dict:
list_copy.append(_to_omap(item, recursive=True))
return list_copy
if isinstance(list_or_dict, dict):
dict_copy = OrderedDict()
for key, item in list_or_dict.items():
dict_copy[key] = _to_omap(item, recursive=True)
return dict_copy
return list_or_dict
def _is_list_of_one_key_dicts(l):
try:
for item in l:
# noinspection PyUnusedLocal
(k, v), = item.items()
except (AttributeError, TypeError):
return False
return True
|
davidbgk/udata | udata/search/result.py | Python | agpl-3.0 | 3,239 | 0 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import copy
import logging
from bson.objectid import ObjectId
from flask import request
from werkzeug.urls import Href
from elasticsearch_dsl.result import Response
from udata.utils import Paginable
log = logging.getLogger(__name__)
class SearchResult(Paginable, Response):
'''An ElasticSearch result wrapper for easy property access'''
def __init__(self, query, result, *args, **kwargs):
super(SearchResult, self).__init__(result, *args, **kwargs)
self.query = query
self._objects = None
self._facets = None
@property
def query_string(self):
return self.query._query
@property
def facets(self):
if self._facets is None:
self._facets = {}
for name, facet in self.query.facets.items():
self._facets[name] = facet.get_values(
self.get_aggregation(name),
self.query.filter_values.get(name, ())
)
return self._facets
@property
def total(self):
try:
return self.hits.total
except (KeyError, AttributeError):
return 0
@property
def max_score(self):
try:
return self.hits.max_score
except (KeyError, AttributeError):
return 0
@property
def page(self):
return (self.query.page or 1) if self.pages else 1
@property
def page_size(self):
return self.query.page_size
@property
def class_name(self):
return self.query.adapter.model.__name__
def get_ids(self):
try:
return [hit['_id'] for hit in self.hits.hits]
except KeyError:
return []
def get_objects(self):
if not self._objects:
ids = [ObjectId(id) for id in self.get_ids()]
objects = self.query.model.objects.in_bulk(ids)
self._objects = [objects.get(id) for id in ids]
# Filter out DBref ie. indexed object not found in DB
self._objects = [o for o in self._objects
if isinstance(o, self.query.model)]
return self._objects
@property
def objects(self):
return self.get_objects()
def __iter__(self):
for obj in self.get_objects():
yield obj
def __len__(self):
return len(self.hits.hits)
def __getitem__(self, index):
return self.get_objects()[index]
def get_aggregation(self, name):
'''
| Fetch an aggregation result given its name
As there is no way at this point know the aggregation type
(ie. bucket, pipeline or metric)
we guess it from the response attributes.
Only bucket and metric types are handled
'''
agg = self.aggregations[name]
if 'buckets' in agg:
return agg['buckets']
else:
return agg
def label_func(self, name):
| if name not in self.query.facets:
return None
return self.query.facets[name].labelize
def labelize(self, name, value):
func = self.label_func(name)
return func(value) if func else value
|
openstack/python-openstacksdk | openstack/image/_base_proxy.py | Python | apache-2.0 | 9,912 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import os
import six
from openstack import exceptions
from openstack import proxy
class BaseImageProxy(six.with_metaclass(abc.ABCMeta, proxy.Proxy)):
retriable_status_codes = [503]
_IMAGE_MD5_KEY = 'owner_specified.openstack.md5'
_IMAGE_SHA256_KEY = 'owner_specified.openstack.sha256'
_IMAGE_OBJECT_KEY = 'owner_specified.openstack.object'
# NOTE(shade) shade keys were owner_specified.shade.md5 - we need to add
# those to freshness checks so that a shade->sdk transition
# doesn't result in a re-upload
_SHADE_IMAGE_MD5_KEY = 'owner_specified.shade.md5'
_SHADE_IMAGE_SHA256_KEY = 'owner_specified.shade.sha256'
_SHADE_IMAGE_OBJECT_KEY = 'owner_specified.shade.object'
def create_image(
self, name, filename=None,
container=None,
md5=None, sha256=None,
disk_format=None, container_format=None,
disable_vendor_agent=True,
allow_duplicates=False, meta=None,
wait=False, timeout=3600,
data=None, validate_checksum=True,
**kwargs):
"""Upload an image.
:param str name: Name of the image to create. If it is a pathname
of an image, the name will be constructed from the extensionless
basename of the path.
:param str filename: The path to the file to upload, if needed.
(optional, defaults to None)
:param data: Image data (string or file-like object). It is mutually
exclusive with filename
:param str container: Name of the container in swift where images
should be uploaded for import if the cloud requires such a thing.
(optional, defaults to 'images')
:param str md5: md5 sum of the image file. If not given, an md5 will
be calculated.
:param str sha256: sha256 sum of the image file. If not given, an md5
will be calculated.
:param str disk_format: The disk format the image is in. (optional,
defaults to the os-client-config config value for this cloud)
:param str container_format: The container format the image is in.
(optional, defaults to the os-client-config config value for this
cloud)
:param bool disable_vendor_agent: Whether or not to append metadata
flags to the image to inform the cloud in question to not expect a
vendor agent to be runing. (optional, defaults to True)
:param allow_duplicates: If true, skips checks that enforce unique
image name. (optional, defaults to False)
:param meta: A dict of key/value pairs to use for metadata that
bypasses automatic type conversion.
:param bool wait: If true, waits for image to be created. Defaults to
true - however, be aware that one of the upload methods is always
synchronous.
:param timeout: Seconds to wait for image creation. None is forever.
:param bool validate_checksum: If true and cloud returns checksum,
compares return value with the one calculated or passed into this
call. If value does not match - raises exception. Default is
'false'
Additional kwargs will be passed to the image creation as additional
metadata for the image and will have all values converted to string
except for min_disk, min_ram, size and virtual_size which will be
converted to int.
If you are sure you have all of your data types correct or have an
advanced need to be explicit, use meta. If you are just a normal
consumer, using kwargs is likely the right choice.
If a value is in meta and kwargs, meta wins.
:returns: A ``munch.Munch`` of the Image object
:raises: SDKException if there are problems uploading
"""
if container is | None:
container = self._connection._OBJECT_AUTOCREATE_CONTAINER
if not meta:
meta = {}
if not disk_format:
disk_format = self._connection.c | onfig.config['image_format']
if not container_format:
# https://docs.openstack.org/image-guide/image-formats.html
container_format = 'bare'
if data and filename:
raise exceptions.SDKException(
'Passing filename and data simultaneously is not supported')
# If there is no filename, see if name is actually the filename
if not filename and not data:
name, filename = self._get_name_and_filename(
name, self._connection.config.config['image_format'])
if validate_checksum and data and not isinstance(data, bytes):
raise exceptions.SDKException(
'Validating checksum is not possible when data is not a '
'direct binary object')
if not (md5 or sha256) and validate_checksum:
if filename:
(md5, sha256) = self._connection._get_file_hashes(filename)
elif data and isinstance(data, bytes):
(md5, sha256) = self._connection._calculate_data_hashes(data)
if allow_duplicates:
current_image = None
else:
current_image = self.find_image(name)
if current_image:
props = current_image.get('properties', {})
md5_key = props.get(
self._IMAGE_MD5_KEY,
props.get(self._SHADE_IMAGE_MD5_KEY, ''))
sha256_key = props.get(
self._IMAGE_SHA256_KEY,
props.get(self._SHADE_IMAGE_SHA256_KEY, ''))
up_to_date = self._connection._hashes_up_to_date(
md5=md5, sha256=sha256,
md5_key=md5_key, sha256_key=sha256_key)
if up_to_date:
self.log.debug(
"image %(name)s exists and is up to date",
{'name': name})
return current_image
else:
self.log.debug(
"image %(name)s exists, but contains different "
"checksums. Updating.",
{'name': name})
if disable_vendor_agent:
kwargs.update(
self._connection.config.config['disable_vendor_agent'])
# If a user used the v1 calling format, they will have
# passed a dict called properties along
properties = kwargs.pop('properties', {})
properties[self._IMAGE_MD5_KEY] = md5 or ''
properties[self._IMAGE_SHA256_KEY] = sha256 or ''
properties[self._IMAGE_OBJECT_KEY] = '/'.join(
[container, name])
kwargs.update(properties)
image_kwargs = dict(properties=kwargs)
if disk_format:
image_kwargs['disk_format'] = disk_format
if container_format:
image_kwargs['container_format'] = container_format
if filename or data:
image = self._upload_image(
name, filename=filename, data=data, meta=meta,
wait=wait, timeout=timeout,
validate_checksum=validate_checksum,
**image_kwargs)
else:
image = self._create_image(**image_kwargs)
self._connection._get_cache(None).invalidate()
return image
@abc.abstractmethod
def _create_image(self, name, **image_kwargs):
pass
@abc.abstractmethod
def _upload_image(self, name, filename, data, me |
zuck/prometeo-erp | core/taxonomy/migrations/0001_initial.py | Python | lgpl-3.0 | 8,429 | 0.008067 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Result'
db.create_table('taxonomy_result', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('taxonomy', ['Result'])
# Adding model 'Tag'
db.create_table('taxonomy_tag', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)),
))
db.send_create_signal('taxonomy', ['Tag'])
# Adding model 'Category'
db.create_table('taxonomy_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='children', null=True, to=orm['taxonomy.Category'])),
('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)),
))
db.send_create_signal('taxonomy', ['Category'])
# Adding model 'Vote'
db.create_table('taxonomy_vote', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='poll_votes', to=orm['auth.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('taxonomy', ['Vote'])
# Adding unique constraint on 'Vote', fields ['owner', 'content_type', 'object_id']
db.create_unique('taxonomy_vote', ['owner_id', 'content_type_id', 'object_id'])
def backwards(self, orm):
# Removing unique constraint on 'Vote', fields ['owner', 'content_type', 'object_id']
db.delete_unique('taxonomy_vote', ['owner_id', 'content_type_id', 'object_id'])
# Deleting model 'Result'
db.delete_table('taxonomy_result')
# Deleting model 'Tag'
db.delete_table('taxonomy_tag')
# Deleting model 'Category'
db.delete_table('taxonomy_category')
# Deleting model 'Vote'
db.delete_table('taxonomy_vote')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique | _together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': ' | True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taxonomy.category': {
'Meta': {'ordering': "('title',)", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['taxonomy.Category']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'taxonomy.result': {
'Meta': {'object_name': 'Result'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taxonomy.tag': {
'Meta': {'ordering': "('title',)", 'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'taxonomy.vote': {
'Meta': {'unique_together': "(('owner', 'content_type', 'object_id'),)", 'object_name': 'Vote'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'obj |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.