repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
varogami/c4rdz
|
c4rdz-gui.py
|
Python
|
gpl-3.0
| 9,876
| 0.006683
|
#!/usr/bin/env python
import random, os.path
#import basic pygame modules
import pygame
from pygame.locals import *
#see if we can load more than standard BMP
if not pygame.image.get_extended():
raise SystemExit("Sorry, extended image module required")
#game constants
MAX_SHOTS = 2 #most player bullets onscreen
ALIEN_ODDS = 22 #chances a new alien appears
BOMB_ODDS = 60 #chances a new bomb will drop
ALIEN_RELOAD = 12 #frames between new aliens
SCREENRECT = Rect(0, 0, 640, 480)
SCORE = 0
main_dir = os.path.split(os.path.abspath(__file__))[0]
def load_image(file):
"loads an image, prepares it for play"
file = os.path.join(main_dir, 'data', file)
try:
surface = pygame.image.load(file)
except pygame.error:
raise SystemExit('Could not load image "%s" %s'%(file, pygame.get_error()))
return surface.convert()
def load_images(*files):
imgs = []
for file in files:
imgs.append(load_image(file))
return imgs
class dummysound:
def play(self): pass
def load_sound(file):
if not pygame.mixer: return dummysound()
file = os.path.join(main_dir, 'data', file)
try:
sound = pygame.mixer.Sound(file)
return sound
except pygame.error:
print ('Warning, unable to load, %s' % file)
return dummysound()
# each type of game object gets an init and an
# update function. the update function is called
# once per frame, and it is when each object should
# change it's current position and state. the Player
# object actually gets a "move" function instead of
# update, since it is passed extra information about
# the keyboard
class Player(pygame.sprite.Sprite):
speed = 10
bounce = 24
gun_offset = -11
images = []
def __init__(self):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect(midbottom=SCREENRECT.midbottom)
self.reloading = 0
self.origtop = self.rect.top
self.facing = -1
def move(self, direction):
if direction: self.facing = direction
self.rect.move_ip(direction*self.speed, 0)
self.rect = self.rect.clamp(SCREENRECT)
if direction < 0:
self.image = self.images[0]
elif direction > 0:
self.image = self.images[1]
self.rect.top = self.origtop - (self.rect.left//self.bounce%2)
def gunpos(self):
pos = self.facing*self.gun_offset + self.rect.centerx
return pos, self.rect.top
class Alien(pygame.sprite.Sprite):
speed = 13
animcycle = 12
images = []
def __init__(self):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect()
self.facing = random.choice((-1,1)) * Alien.speed
self.frame = 0
if self.facing < 0:
self.rect.right = SCREENRECT.right
def update(self):
self.rect.move_ip(self.facing, 0)
if not SCREENRECT.contains(self.rect):
self.facing = -self.facing;
self.rect.top = self.rect.bottom + 1
self.rect = self.rect.clamp(SCREENRECT)
self.frame = self.frame + 1
self.image = self.images[self.frame//self.animcycle%3]
class
|
Explosion(pygame.sprite.Sprite):
defaultlife = 12
animcycle = 3
images = []
def __init__(self, actor):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect(center=actor.rect.center)
self.life = self.defaultlife
def update(self):
self.life = self.life - 1
|
self.image = self.images[self.life//self.animcycle%2]
if self.life <= 0: self.kill()
class Shot(pygame.sprite.Sprite):
speed = -11
images = []
def __init__(self, pos):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect(midbottom=pos)
def update(self):
self.rect.move_ip(0, self.speed)
if self.rect.top <= 0:
self.kill()
class Bomb(pygame.sprite.Sprite):
speed = 9
images = []
def __init__(self, alien):
pygame.sprite.Sprite.__init__(self, self.containers)
self.image = self.images[0]
self.rect = self.image.get_rect(midbottom=
alien.rect.move(0,5).midbottom)
def update(self):
self.rect.move_ip(0, self.speed)
if self.rect.bottom >= 470:
Explosion(self)
self.kill()
class Score(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.font = pygame.font.Font(None, 20)
self.font.set_italic(1)
self.color = Color('white')
self.lastscore = -1
self.update()
self.rect = self.image.get_rect().move(10, 450)
def update(self):
if SCORE != self.lastscore:
self.lastscore = SCORE
msg = "Score: %d" % SCORE
self.image = self.font.render(msg, 0, self.color)
def main(winstyle = 0):
# Initialize pygame
pygame.init()
if pygame.mixer and not pygame.mixer.get_init():
print ('Warning, no sound')
pygame.mixer = None
# Set the display mode
winstyle = 0 # |FULLSCREEN
bestdepth = pygame.display.mode_ok(SCREENRECT.size, winstyle, 32)
screen = pygame.display.set_mode(SCREENRECT.size, winstyle, bestdepth)
#Load images, assign to sprite classes
#(do this before the classes are used, after screen setup)
img = load_image('hearts.jpg')
Player.images = [img, pygame.transform.flip(img, 1, 0)]
img = load_image('explosion1.gif')
Explosion.images = [img, pygame.transform.flip(img, 1, 1)]
Alien.images = load_images('alien1.gif', 'alien2.gif', 'alien3.gif')
Bomb.images = [load_image('bomb.gif')]
Shot.images = [load_image('shot.gif')]
#decorate the game window
icon = pygame.transform.scale(Alien.images[0], (32, 32))
pygame.display.set_icon(icon)
pygame.display.set_caption('Pygame Aliens')
pygame.mouse.set_visible(0)
#create the background, tile the bgd image
bgdtile = load_image('table.jpg')
background = pygame.Surface(SCREENRECT.size)
for x in range(0, SCREENRECT.width, bgdtile.get_width()):
background.blit(bgdtile, (x, 0))
screen.blit(background, (0,0))
pygame.display.flip()
#load the sound effects
boom_sound = load_sound('boom.wav')
shoot_sound = load_sound('car_door.wav')
if pygame.mixer:
music = os.path.join(main_dir, 'data', 'house_lo.wav')
pygame.mixer.music.load(music)
pygame.mixer.music.play(-1)
# Initialize Game Groups
aliens = pygame.sprite.Group()
shots = pygame.sprite.Group()
bombs = pygame.sprite.Group()
all = pygame.sprite.RenderUpdates()
lastalien = pygame.sprite.GroupSingle()
#assign default groups to each sprite class
Player.containers = all
Alien.containers = aliens, all, lastalien
Shot.containers = shots, all
Bomb.containers = bombs, all
Explosion.containers = all
Score.containers = all
#Create Some Starting Values
global score
alienreload = ALIEN_RELOAD
kills = 0
clock = pygame.time.Clock()
#initialize our starting sprites
global SCORE
player = Player()
Alien() #note, this 'lives' because it goes into a sprite group
if pygame.font:
all.add(Score())
while player.alive():
#get input
for event in pygame.event.get():
if event.type == QUIT or \
(event.type == KEYDOWN and event.key == K_ESCAPE):
return
keystate = pygame.key.get_pressed()
# clear/erase the last drawn sprites
all.clear(screen, background)
|
SergeyCherepanov/ansible
|
ansible/ansible/modules/database/postgresql/postgresql_idx.py
|
Python
|
mit
| 17,072
| 0.002284
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Andrey Klychkov (@Andersson007) <aaklychkov@mail.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: postgresql_idx
short_description: Create or drop indexes from a PostgreSQL database
description:
- Create or drop indexes from a PostgreSQL database.
- For more information see U(https://www.postgresql.org/docs/current/sql-createindex.html),
U(https://www.postgresql.org/docs/current/sql-dropindex.html).
version_added: '2.8'
options:
idxname:
description:
- Name of the index to create or drop.
type: str
required: true
aliases:
- name
db:
description:
- Name of database to connect to and where the index will be created/dropped.
type: str
aliases:
- login_db
session_role:
description:
- Switch to session_role after connecting.
The specified session_role must be a role that the current login_user is a member of.
- Permissions checking for SQL commands is carried out as though
the session_role were the one that had logged in originally.
type: str
schema:
description:
- Name of a database schema where the index will be created.
type: str
state:
description:
- Index state.
- I(state=present) implies the index will be created if it does not exist.
- I(state=absent) implies the index will be dropped if it exists.
type: str
default: present
choices: [ absent, present ]
table:
description:
- Table to create index on it.
- Mutually exclusive with I(state=absent).
type: str
required: true
columns:
description:
- List of index columns that need to be covered by index.
- Mutually exclusive with I(state=absent).
type: list
aliases:
- column
cond:
description:
- Index conditions.
- Mutually exclusive with I(state=absent).
type: str
idxtype:
description:
- Index type (like btree, gist, gin, etc.).
- Mutually exclusive with I(state=absent).
type: str
aliases:
- type
concurrent:
description:
- Enable or disable concurrent mode (CREATE / DROP INDEX CONCURRENTLY).
- Pay attention, if I(concurrent=no), the table will be locked (ACCESS EXCLUSIVE) during the building process.
For more information about the lock levels see U(https://www.postgresql.org/docs/current/explicit-locking.html).
- If the building process was interrupted for any reason when I(cuncurrent=yes), the index becomes invalid.
In this case it should be dropped and created again.
- Mutually exclusive with I(cascade=yes).
type: bool
default: yes
tablespace:
description:
- Set a tablespace for the index.
- Mutually exclusive with I(state=absent).
required: false
type: str
storage_params:
description:
- Storage parameters like fillfactor, vacuum_cleanup_index_scale_factor, etc.
- Mutually exclusive with I(state=absent).
type: list
cascade:
description:
- Automatically drop objects that depend on the index,
and in turn all objects that depend on those objects U(https://www.postgresql.org/docs/current/sql-dropindex.html).
- It used only with I(state=absent).
- Mutually exclusive with I(concurrent=yes)
type: bool
default: no
notes:
- The index building process can affect database performance.
- To avoid table locks on production databases, use I(concurrent=yes) (default behavior).
- The default authentication assumes that you are either logging in as or
sudo'ing to the postgres account on the host.
- This module uses psycopg2, a Python PostgreSQL database adapter. You must
ensure that psycopg2 is installed on the host before using this module.
- If the remote host is the PostgreSQL server (which is the default case), then
PostgreSQL must also be installed on the remote host.
- For Ubuntu-based systems, install the postgresql, libpq-dev, and python-psycopg2 packages
on the remote host before using this module.
requirements:
- psycopg2
author:
- Andrew Klychkov (@Andersson007)
extends_documentation_fragment: postgres
'''
EXAMPLES = r'''
- name: Create btree index if not exists test_idx concurrently covering columns id and name of table products
postgresql_idx:
db: acme
table: products
columns: id,name
name: test_idx
- name: Create btree index test_idx concurrently with tablespace called ssd and storage parameter
postgresql_idx:
db: acme
table: products
columns:
- id
- name
idxname: test_idx
tablespace: ssd
storage_params:
- fillfactor=90
- name: Create gis
|
t index test_gist_idx concurrently on column geo_data of table map
postgresql_idx:
db: somedb
table: map
idxtype: gist
columns: geo_data
idxname: test_gist_idx
# Note: for the example below pg_trgm extension must be installed for gin_trgm_ops
- name: Create gin index gin0_idx not concurrently on column comment of table test
postgre
|
sql_idx:
idxname: gin0_idx
table: test
columns: comment gin_trgm_ops
concurrent: no
idxtype: gin
- name: Drop btree test_idx concurrently
postgresql_idx:
db: mydb
idxname: test_idx
state: absent
- name: Drop test_idx cascade
postgresql_idx:
db: mydb
idxname: test_idx
state: absent
cascade: yes
concurrent: no
- name: Create btree index test_idx concurrently on columns id,comment where column id > 1
postgresql_idx:
db: mydb
table: test
columns: id,comment
idxname: test_idx
cond: id > 1
'''
RETURN = r'''
name:
description: Index name.
returned: always
type: str
sample: 'foo_idx'
state:
description: Index state.
returned: always
type: str
sample: 'present'
schema:
description: Schema where index exists.
returned: always
type: str
sample: 'public'
tablespace:
description: Tablespace where index exists.
returned: always
type: str
sample: 'ssd'
query:
description: Query that was tried to be executed.
returned: always
type: str
sample: 'CREATE INDEX CONCURRENTLY foo_idx ON test_table USING BTREE (id)'
storage_params:
description: Index storage parameters.
returned: always
type: list
sample: [ "fillfactor=90" ]
valid:
description: Index validity.
returned: always
type: bool
sample: true
'''
import traceback
PSYCOPG2_IMP_ERR = None
try:
import psycopg2
HAS_PSYCOPG2 = True
except ImportError:
HAS_PSYCOPG2 = False
PSYCOPG2_IMP_ERR = traceback.format_exc()
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.database import SQLParseError
from ansible.module_utils.postgres import postgres_common_argument_spec
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems
VALID_IDX_TYPES = ('BTREE', 'HASH', 'GIST', 'SPGIST', 'GIN', 'BRIN')
# ===========================================
# PostgreSQL module specific support methods.
#
class Index(object):
def __init__(self, module, cursor, schema, name):
self.name = name
if schema:
self.schema = schema
else:
self.schema = 'public'
self.module = module
self.cursor = cursor
self.info = {
'name': self.name,
'state': 'absent',
'schema': '',
'tblname': '',
'tblspace': '',
'valid': True,
'storage_params': [],
}
self.exists = False
self.__exists_in_db()
self.executed_query = ''
def get_info(self):
"""
Getter to refresh and return table info
"""
self.__exists_in_db()
return self.info
def __exists_in_db(self):
"""
Check index and collect info
"""
query = ("SELECT i.schemaname, i.tablename, i.tablespace, "
"pi.in
|
chintak/scikit-image
|
skimage/morphology/setup.py
|
Python
|
bsd-3-clause
| 2,125
| 0.000471
|
#!/usr/bin/env python
im
|
port os
from skimage._build import cython
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config =
|
Configuration('morphology', parent_package, top_path)
config.add_data_dir('tests')
cython(['ccomp.pyx'], working_path=base_path)
cython(['cmorph.pyx'], working_path=base_path)
cython(['_watershed.pyx'], working_path=base_path)
cython(['_skeletonize_cy.pyx'], working_path=base_path)
cython(['_pnpoly.pyx'], working_path=base_path)
cython(['_convex_hull.pyx'], working_path=base_path)
cython(['_greyreconstruct.pyx'], working_path=base_path)
config.add_extension('ccomp', sources=['ccomp.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('cmorph', sources=['cmorph.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_watershed', sources=['_watershed.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_skeletonize_cy', sources=['_skeletonize_cy.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_pnpoly', sources=['_pnpoly.c'],
include_dirs=[get_numpy_include_dirs(), '../_shared'])
config.add_extension('_convex_hull', sources=['_convex_hull.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_greyreconstruct', sources=['_greyreconstruct.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer='scikit-image Developers',
author='Damian Eads',
maintainer_email='scikit-image@googlegroups.com',
description='Morphology Wrapper',
url='https://github.com/scikit-image/scikit-image',
license='SciPy License (BSD Style)',
**(configuration(top_path='').todict())
)
|
MindPass/Code
|
Interface_graphique/mindmap/svgwrite-1.1.6/tests/test_svg.py
|
Python
|
gpl-3.0
| 802
| 0.009975
|
#!/usr/bin/env python
#coding:utf-8
# Auth
|
or: mozman --<mozman@gmx.at>
# Purpose: test svg element
# Created: 25.09.2010
# Copyright (C) 2010, Manfred Moitzi
# License: MIT License
import sys
import unittest
from svgwrite.container import SVG, Symbol
class TestSVG(unittest.TestCase):
def test_constructor(self):
svg = SVG(insert=(10,20), size=(100,200))
|
self.assertTrue(isinstance(svg, Symbol))
self.assertEqual(svg.tostring(), '<svg height="200" width="100" x="10" y="20"><defs /></svg>')
def test_add_svg_as_subelement(self):
svg = SVG(id='svg')
subsvg = SVG(id='subsvg')
svg.add(subsvg)
self.assertEqual(svg.tostring(), '<svg id="svg"><defs /><svg id="subsvg"><defs /></svg></svg>')
if __name__=='__main__':
unittest.main()
|
DanielGabris/radius_restserver
|
src/settings/__init__.py
|
Python
|
mit
| 149
| 0.020134
|
fro
|
m __future__ import absolute_import
from .dev import Dev # noqa
try:
from .production import Production # noqa
except Imp
|
ortError:
pass
|
Alexoner/web-crawlers
|
scripts/rails.ctrip.com/ctripRails/middlewares/__init__.py
|
Python
|
gpl-2.0
| 98
| 0
|
#!/usr/b
|
in/env python
# -*- coding: utf-8 -*
|
-
from .randomproxy import ProxyDownloaderMiddleware
|
openvenues/address_deduper
|
address_deduper/__init__.py
|
Python
|
mit
| 765
| 0.006536
|
from flask import Flask
import config
from db import db_from_config
from address_deduper.views import init_views
from a
|
ddress_normalizer.deduping.near_duplicates import *
def create_app(env, **kw):
app = Flask(__name__)
specified_config = kw.get('config')
if specified_config:
__import__('address_normalizer.' + specified_config)
config.current_env = env
conf = config.valid_configs.get(env)
if not conf:
sys.exit('Invalid config, choices are [%s]'
|
% ','.join(valid_configs.keys()))
app.config.from_object(conf)
app.url_map.strict_slashes = False
db = db_from_config(app.config)
AddressNearDupe.configure(db, geohash_precision=app.config['GEOHASH_PRECISION'])
init_views(app)
return app
|
Pulgama/supriya
|
supriya/ext/book.py
|
Python
|
mit
| 2,339
| 0.000855
|
import base64
import pathlib
import pickle
import textwrap
from docutils.nodes import FixedTextElement, General, SkipNode
from uqbar.book.extensions import Extension
from uqbar.strings import normalize
from supriya.ext import websafe_audio
from supriya.io import Player
class RenderExtension(Extension):
template = normalize(
"""
<audio controls src="{file_path}">
Your browser does not support the <code>audio</code> element.
</audio>
"""
)
class render_block(General, FixedTextElement):
pass
@classmethod
def setup_console(cls, console, monkeypatch):
monkeypatch.setattr(
Player,
"__call__",
lambda self: console.push_proxy(cls(self.renderable, self.render_kwargs)),
)
@classmethod
def setup_sphinx(cls, app):
app.add_node(
cls.render_block,
html=[cls.visit_block_html, None],
latex=[cls.visit_block_latex, None],
text=[cls.visit_block_text, cls.depart_block_text],
)
def __init__(self, renderable, render_kwargs):
self.renderable = pickle.loads(pickle.dumps(renderable))
self.render_kwargs = pickle.loads(pickle.dumps(render_kwargs))
def to_docutils(self):
code = "\n".join(
textwrap.wrap(
base64.b64encode(
p
|
ickle.dumps((self.renderable, self.render_kwargs))
).decode()
)
)
n
|
ode = self.render_block(code, code)
return [node]
@classmethod
def render(cls, node, output_path):
output_path.mkdir(exist_ok=True)
renderable, render_kwargs = pickle.loads(
base64.b64decode("".join(node[0].split()))
)
return websafe_audio(
renderable.__render__(render_directory_path=output_path, **render_kwargs)
)
@staticmethod
def visit_block_html(self, node):
absolute_file_path = RenderExtension.render(
node, pathlib.Path(self.builder.outdir) / "_images"
)
relative_file_path = (
pathlib.Path(self.builder.imgpath) / absolute_file_path.name
)
result = RenderExtension.template.format(file_path=relative_file_path)
self.body.append(result)
raise SkipNode
|
fxia22/ASM_xf
|
PythonD/site_python/numarray/random_array/__init__.py
|
Python
|
gpl-2.0
| 218
| 0.004587
|
__doc__ = """Random number arra
|
y generators for numarray.
This package was ported to numarray from Numeric's RandomArray and
provides functions to generate numarray
|
of random numbers.
"""
from RandomArray2 import *
|
linea-it/qlf
|
backend/framework/qlf/dashboard/migrations/0014_product.py
|
Python
|
gpl-3.0
| 995
| 0.00402
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 20
|
19-01-29 16:08
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
|
('dashboard', '0013_auto_20181002_1939'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)),
('key', models.CharField(help_text='Metric Key', max_length=30)),
('mjd', models.FloatField(help_text='MJD', null=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_job', to='dashboard.Job')),
],
),
]
|
yingyun/RendererEvaluator
|
utils/Shader2Char.py
|
Python
|
gpl-2.0
| 360
| 0.013889
|
#20140125 Cui.Yingyun
#Convert C style code to Char style
import sys
file = sys.stdin
if len(sys.argv) > 1:
|
file = open(sys.argv[1])
else:
prin
|
t "Input argument which specify shader program"
sys.exit(0);
lines = file.readlines()
print '\"\\'
for line in lines[:-1]:
print line.rstrip() + '\\n\\'
print lines[-1].rstrip() + '\\n\"'
file.close()
|
franck-talbart/codelet_tuning_infrastructure
|
ctr-common/plugins/34f27d92-f558-4121-b862-85216c54e18f/main.py
|
Python
|
gpl-3.0
| 2,154
| 0.010678
|
#************************************************************************
# Codelet Tuning Infrastructure
# Copyright (C) 2010-2015 Intel Corporation, CEA, GENCI, and UVSQ
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#************************************************************************
# Authors: Franck Talbart, Mathieu Bordet, Nicolas Petit
from cti_hapi import database_manager, database
from cti_hapi.main import HapiPlugin, hapi_command
import sys
class CategoryPlugin(HapiPlugin):
@hapi_command("list")
def list_cmd(self, params):
""" Returns a list of category
Args:
self: class of the plugin
params: working parameters
Returns:
Nothing
"""
db = database.Database()
result = database_manager.search({'NAME':["plugin_uid"], 'TYPE':"=", 'VAL':str(self.plugin_uid)},
db,
"category",
|
fields=["entry_info.entry_uid", "category.name"]
)
for r in result:
print r[0] + ":" + r[1]
#---------------------------------------------------------------------------
# By pass the authentification system
# Needed by the doc plugin
def
|
check_passwd(self):
return True
#---------------------------------------------------------------------------
if __name__ == "__main__":
p = CategoryPlugin()
exit(p.main(sys.argv))
|
sloe/analyseapp
|
models/menu.py
|
Python
|
apache-2.0
| 523
| 0.026769
|
response.title = settings.title
response.subtitle = settings.subtitle
response.meta.author = '%(author)s <%(author_email)s>' % settings
response.meta.keywords = settings.keywords
response.meta.description = settings.description
res
|
ponse.menu = [
(T('Index'),URL('d
|
efault','index')==URL(),URL('default','index'),[]),
(T('Video'),URL('default','video')==URL(),URL('default','video'),[]),
(T('Info'), False, "http://www.oarstack.com/2015/04/oarstack-analysis/", []),
]
response.google_analytics_id="UA-52135133-2"
|
thatguystone/vault
|
vault/cmd.py
|
Python
|
mit
| 136
| 0.029412
|
def
|
vault(args):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
if __name__ == "__main__":
vault(s
|
ys.argv)
|
anpolsky/phaxio-python
|
docs/source/conf.py
|
Python
|
apache-2.0
| 5,715
| 0.001225
|
# -*- coding: utf-8 -*-
#
# phaxio-
|
python documentation build configuration file, created by
# sphinx-quickstart on Sun Jan 8 20:17:15 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
|
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
# 'sphinx.ext.githubpages',
# 'sphinx.ext.autosectionlabel',
'sphinx.ext.autosummary'
]
autosummary_generate = True
autodoc_default_flags = ['members', 'undoc-members']
# skips dccumenting to_dict and to_str in model types
def skip_member(app, what, name, obj, skip, options):
if name in ['to_dict', 'to_str']:
return True
return skip
# skips all docstrings in model types, but leave the :rtype: tags so we have type information and links
def remove_module_docstring(app, what, name, obj, options, lines):
if name.startswith("phaxio.swagger_client"):
lines[:] = [x for x in lines if 'rtype' in x]
def setup(app):
app.connect('autodoc-skip-member', skip_member)
app.connect("autodoc-process-docstring", remove_module_docstring)
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'phaxio-python'
copyright = u'2017, Ari Polsky'
author = u'Ari Polsky'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'.2'
# The full version, including alpha/beta/rc tags.
release = u'.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'phaxio-pythondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'phaxio-python.tex', u'phaxio-python Documentation',
u'Ari Polsky', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'phaxio-python', u'phaxio-python Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'phaxio-python', u'phaxio-python Documentation',
author, 'phaxio-python', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
kpavel/docker-py
|
docker/auth/auth.py
|
Python
|
apache-2.0
| 6,366
| 0
|
# Copyright 2013 dotCloud inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import logging
import os
import six
from .. import errors
INDEX_NAME = 'docker.io'
INDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME)
DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')
LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'
log = logging.getLogger(__name__)
def resolve_repository_name(repo_name):
if '://' in repo_name:
raise errors.InvalidRepository(
'Repository name cannot contain a scheme ({0})'.format(repo_name)
)
index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository(
'Invalid index name ({0}). Cannot begin or end with a'
' hyphen.'.format(index_name)
)
return resolve_index_name(index_name), remote_name
def resolve_index_name(index_name):
index_name = convert_to_hostname(index_name)
if index_name == 'index.'+INDEX_NAME:
|
index_name = INDEX_NAME
return index_name
def split_repo_name(repo
|
_name):
parts = repo_name.split('/', 1)
if len(parts) == 1 or (
'.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost'
):
# This is a docker index repo (ex: username/foobar or ubuntu)
return INDEX_NAME, repo_name
return tuple(parts)
def resolve_authconfig(authconfig, registry=None):
"""
Returns the authentication data from the given auth configuration for a
specific registry. As with the Docker client, legacy entries in the config
with full URLs are stripped down to hostnames before checking for a match.
Returns None if no match was found.
"""
# Default to the public index server
registry = resolve_index_name(registry) if registry else INDEX_NAME
log.debug("Looking for auth entry for {0}".format(repr(registry)))
if registry in authconfig:
log.debug("Found {0}".format(repr(registry)))
return authconfig[registry]
for key, config in six.iteritems(authconfig):
if resolve_index_name(key) == registry:
log.debug("Found {0}".format(repr(key)))
return config
log.debug("No entry found")
return None
def convert_to_hostname(url):
return url.replace('http://', '').replace('https://', '').split('/', 1)[0]
def decode_auth(auth):
if isinstance(auth, six.string_types):
auth = auth.encode('ascii')
s = base64.b64decode(auth)
login, pwd = s.split(b':', 1)
return login.decode('utf8'), pwd.decode('utf8')
def encode_header(auth):
auth_json = json.dumps(auth).encode('ascii')
return base64.urlsafe_b64encode(auth_json)
def parse_auth(entries):
"""
Parses authentication entries
Args:
entries: Dict of authentication entries.
Returns:
Authentication registry.
"""
conf = {}
for registry, entry in six.iteritems(entries):
username, password = decode_auth(entry['auth'])
log.debug(
'Found entry (registry={0}, username={1})'
.format(repr(registry), repr(username))
)
conf[registry] = {
'username': username,
'password': password,
'email': entry.get('email'),
'serveraddress': registry,
}
return conf
def find_config_file(config_path=None):
environment_path = os.path.join(
os.environ.get('DOCKER_CONFIG'),
os.path.basename(DOCKER_CONFIG_FILENAME)
) if os.environ.get('DOCKER_CONFIG') else None
paths = [
config_path, # 1
environment_path, # 2
os.path.join(os.path.expanduser('~'), DOCKER_CONFIG_FILENAME), # 3
os.path.join(
os.path.expanduser('~'), LEGACY_DOCKER_CONFIG_FILENAME
) # 4
]
for path in paths:
if path and os.path.exists(path):
return path
return None
def load_config(config_path=None):
"""
Loads authentication data from a Docker configuration file in the given
root directory or if config_path is passed use given path.
Lookup priority:
explicit config_path parameter > DOCKER_CONFIG environment variable >
~/.docker/config.json > ~/.dockercfg
"""
config_file = find_config_file(config_path)
if not config_file:
log.debug("File doesn't exist")
return {}
try:
with open(config_file) as f:
data = json.load(f)
if data.get('auths'):
log.debug("Found 'auths' section")
return parse_auth(data['auths'])
else:
log.debug("Couldn't find 'auths' section")
f.seek(0)
return parse_auth(json.load(f))
except (IOError, KeyError, ValueError) as e:
# Likely missing new Docker config file or it's in an
# unknown format, continue to attempt to read old location
# and format.
log.debug(e)
log.debug("Attempting to parse legacy auth file format")
try:
data = []
with open(config_file) as f:
for line in f.readlines():
data.append(line.strip().split(' = ')[1])
if len(data) < 2:
# Not enough data
raise errors.InvalidConfigFile(
'Invalid or empty configuration file!'
)
username, password = decode_auth(data[0])
return {
INDEX_NAME: {
'username': username,
'password': password,
'email': data[1],
'serveraddress': INDEX_URL,
}
}
except Exception as e:
log.debug(e)
pass
log.debug("All parsing attempts failed - returning empty config")
return {}
|
siosio/intellij-community
|
python/testData/refactoring/changeSignature/newParameterWithSignatureDefaultMakesSubsequentExistingParametersUseKeywordArguments.before.py
|
Python
|
apache-2.0
| 82
| 0.02439
|
def foo(a, b
|
=None):
pass
foo("a", "b")
foo("a")
foo("a", b
|
="b")
foo("a", None)
|
plentific/django-encrypted-fields
|
encrypted_fields/tests.py
|
Python
|
mit
| 9,124
| 0.001316
|
# -*- coding: utf-8 -*-
import re
import unittest
import django
from django.conf import settings
from django.db import models, connection
from django.test import TestCase
from django.utils import timezone
from .fields import (
EncryptedCharField,
EncryptedTextField,
EncryptedDateTimeField,
EncryptedIntegerField,
EncryptedDateField,
EncryptedFloatField,
EncryptedEmailField,
EncryptedBooleanField,
)
from keyczar import keyczar, readers
# Test class that encapsulates some Keyczar functions.
# Requirements are to implement __init__, decrypt(), encrypt()
class TestCrypter(object):
def __init__(self, keyname, *args, **kwargs):
self.keydata = readers.FileReader(keyname)
self.crypter = keyczar.Crypter(self.keydata)
def encrypt(self, cleartext):
return self.crypter.Encrypt(cleartext)
def decrypt(self, ciphertext):
return self.crypter.Decrypt(ciphertext)
class TestModel(models.Model):
char = EncryptedCharField(max_length=255, null=True, blank=True)
prefix_char = EncryptedCharField(max_length=255, prefix='ENCRYPTED:::', blank=True)
decrypt_only = EncryptedCharField(max_length=255, decrypt_only=True, blank=True)
short_char = EncryptedCharField(
max_length=50, null=True, enforce_max_length=True, blank=True)
text = EncryptedTextField(null=True, blank=True)
datetime = EncryptedDateTimeField(null=True, blank=True)
integer = EncryptedIntegerField(null=True, blank=True)
date = EncryptedDateField(null=True, blank=True)
floating = EncryptedFloatField(null=True, blank=True)
email = EncryptedEmailField(null=True, blank=True)
boolean = EncryptedBooleanField(default=False, blank=True)
char_custom_crypter = EncryptedCharField(
max_length=255, null=True,crypter_klass=TestCrypter, blank=True)
class FieldTest(TestCase):
IS_POSTGRES = settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql_psycopg2'
def get_db_value(self, field, model_id):
cursor = connection.cursor()
cursor.execute(
'select {0} '
'from encrypted_fields_testmodel '
'where id = {1};'.format(field, model_id)
)
return cursor.fetchone()[0]
def test_char_field_encrypted_custom(self):
plaintext = 'Oh hi, test reader!'
model = TestModel()
model.char_custom_crypter = plaintext
model.save()
ciphertext = self.get_db_value('char_custom_crypter', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.char_custom_crypter, plaintext)
def test_prefix_char_field_encrypted(self):
plaintext = 'Oh hi, test reader!'
model = TestModel()
model.prefix_char = plaintext
model.save()
ciphertext = self.get_db_value('prefix_char', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
self.assertTrue(ciphertext.startswith('ENCRYPTED:::'))
def test_decrypt_only_field(self):
known_plaintext = 'Oh hi, test reader!'
known_ciphertext = (
'ADQA_82aYN2v_PzXcNPZprS-Ak_xbPmHj8TRuj8sU74ydIJeWtgpKK'
'Irmvw9ZnZCRpXRfZ6blOaBWhjsw62nNu7vQXWJXMCdmw'
)
model = TestModel()
model.decrypt_only = known_ciphertext
model.save()
plaintext = self.get_db_value('decrypt_only', model.id)
self.assertEquals(plaintext, known_plaintext)
def test_decrypt_only_plaintext(self):
known_plaintext = 'I am so plain and ordinary'
model = TestModel()
model.decrypt_only = known_plaintext
model.save()
plaintext = self.get_db_value('decrypt_only', model.id)
self.assertEquals(plaintext, known_plaintext)
def test_char_field_encrypted(self):
plaintext = 'Oh hi, test reader!'
model = TestModel()
model.char = plaintext
model.save()
ciphertext = self.get_db_value('char', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.char, plaintext)
def test_unicode_encrypted(self):
plaintext = u'Oh hi, test reader! 🐱'
model = TestModel()
model.char = plaintext
model.save()
ciphertext = self.get_db_value('char', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.char, plaintext)
def test_short_char_field_encrypted(self):
""" Test the max_length validation
|
of an encrypted char field """
plaintext = 'Oh hi, test reader!'
model = TestModel()
model.short_char = plaintext
self.assertRaises(ValueError, model.save)
def test_text_field_encrypted(self):
plaintext = 'Oh hi, t
|
est reader!' * 10
model = TestModel()
model.text = plaintext
model.save()
ciphertext = self.get_db_value('text', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('test' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.text, plaintext)
def test_datetime_field_encrypted(self):
plaintext = timezone.now()
model = TestModel()
model.datetime = plaintext
model.save()
ciphertext = self.get_db_value('datetime', model.id)
# Django's normal date serialization format
self.assertTrue(re.search('^\d\d\d\d-\d\d-\d\d', ciphertext) is None)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.datetime, plaintext)
def test_integer_field_encrypted(self):
plaintext = 42
model = TestModel()
model.integer = plaintext
model.save()
ciphertext = self.get_db_value('integer', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertNotEqual(plaintext, str(ciphertext))
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.integer, plaintext)
def test_date_field_encrypted(self):
plaindate = timezone.now().date()
model = TestModel()
model.date = plaindate
model.save()
ciphertext = self.get_db_value('date', model.id)
fresh_model = TestModel.objects.get(id=model.id)
self.assertNotEqual(ciphertext, plaindate.isoformat())
self.assertEqual(fresh_model.date, plaindate)
def test_float_field_encrypted(self):
plaintext = 42.44
model = TestModel()
model.floating = plaintext
model.save()
ciphertext = self.get_db_value('floating', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertNotEqual(plaintext, str(ciphertext))
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.floating, plaintext)
def test_email_field_encrypted(self):
plaintext = 'aron.jones@gmail.com' # my email address, btw
model = TestModel()
model.email = plaintext
model.save()
ciphertext = self.get_db_value('email', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertTrue('aron' not in ciphertext)
fresh_model = TestModel.objects.get(id=model.id)
self.assertEqual(fresh_model.email, plaintext)
def test_boolean_field_encrypted(self):
plaintext = True
model = TestModel()
model.boolean = plaintext
model.save()
ciphertext = self.get_db_value('boolean', model.id)
self.assertNotEqual(plaintext, ciphertext)
self.assertNotEqual(True, ciphertext)
self.assertNotEqual('True', ciphertext)
self.assertNotEqual('true', ciphertext)
self.assertNot
|
RuiNascimento/krepo
|
script.module.lambdascrapers/lib/lambdascrapers/sources_ lambdascrapers/en/series9.py
|
Python
|
gpl-2.0
| 7,221
| 0.008725
|
# -*- coding: UTF-8 -*-
'''
series9 scraper for Exodus forks.
Nov 9 2018 - Checked
Updated and refactored by someone.
Originally created by others.
'''
import re,traceback,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import directstream
from resources.lib.modules import log_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['seriesonline.io','series9.io','gomovies.pet']
self.base_link = 'https://series9.co/'
self.search_link = '/movie/search/%s'
def matchAlias(self, title, aliases):
try:
for alias in aliases:
if cleantitle.get(title) == cleantitle.get(alias['title']):
return True
except:
return False
def movie(self, imdb, title, localtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': title})
url = {'imdb': imdb, 'title': title, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('Series9 - Exception: \n' + str(failure))
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
aliases.append({'country': 'us', 'title': tvshowtitle})
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'aliases': aliases}
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('Series9 - Exception: \n' + str(failure))
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('Series9 - Exception: \n' + str(failure))
return
def searchShow(self, title, season, aliases, headers):
try:
title = cleantitle.normalize(title)
search = '%s Season %01d' % (title, int(season))
url = urlparse.urljoin(self.base_link, self.search_link % cleantitle.geturl(search))
r = client.request(url, headers=headers, timeout='15')
r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'})
r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'))
r = [(i[0], i[1], re.findall('(.*?)\s+-\s+Season\s+(\d)', i[1])) for i in r]
r = [(i[0], i[1], i[2][0]) for i in r if len(i[2]) > 0]
url = [i[0] for i in r if self.matchAlias(i[2][0], aliases) and i[2][1] == season][0]
url = urlparse.urljoin(self.base_link, '%s/watching.html' % url)
return url
except:
failure = traceback.format_exc()
log_utils.log('Series9 - Exception: \n' + str(failure))
return
def searchMovie(self, title, year, aliases, headers):
try:
title = cleantitle.normalize(title)
url = urlparse.urljoin(self.base_link, self.search_link % cleantitle.geturl(title))
r = client.request(url, headers=headers, timeout='15')
r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'})
r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'))
results = [(i[0], i[1], re.findall('\((\d{4})', i[1])) for i in r]
try:
r = [(i[0], i[1], i[2][0]) for i in results if len(i[2]) > 0]
url = [i[0] for i in r if self.matchAlias(i[1], aliases) and (year == i[2])][0]
except:
url = None
pass
if (url == None):
url = [i[0] for i in results if self.matchAlias(i[1], aliases)][0]
url = urlparse.urljoin(self.base_link, '%s/watching.html' % url)
return url
except:
failure = traceback.format_exc()
log_utils.log('Series9 - Exception: \n' + str(failure))
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
aliases = eval(data['aliases'])
headers = {}
if 'tvshowtitle' in data:
ep = data['episode']
url = '%s/film/%s-season-%01d/watching.html?ep=%s' % (self.base_link, cleantitle.geturl(data['tvshowtitle']), int(data['season']), ep)
r = client.request(url, headers=headers, timeout='10', output='geturl')
if url ==
|
None:
url = self.searchShow(data['tvshowtitle'], data['season'], aliases, headers)
else:
url = self.searchMovie(data['title'], data['year'], aliases, headers)
if url == None: raise Exception()
r = client.request(url, headers=headers, timeout='10')
r = client.parseDOM(r, 'div', attrs={'class': 'les-content'})
if 'tvshowtitle' in data:
ep = data['epis
|
ode']
links = client.parseDOM(r, 'a', attrs={'episode-data': ep}, ret='player-data')
else:
links = client.parseDOM(r, 'a', ret='player-data')
for link in links:
if '123movieshd' in link or 'seriesonline' in link:
r = client.request(link, headers=headers, timeout='10')
r = re.findall('(https:.*?redirector.*?)[\'\"]', r)
for i in r:
try: sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
except: pass
else:
try:
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(link.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': 'SD', 'language': 'en', 'url': link, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
failure = traceback.format_exc()
log_utils.log('Series9 - Exception: \n' + str(failure))
return sources
def resolve(self, url):
if "google" in url:
return directstream.googlepass(url)
else:
return url
|
stonier/ecto
|
test/benchmark/metrics.py
|
Python
|
bsd-3-clause
| 4,501
| 0.004888
|
#!/usr/bin/env python
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import ecto
import ecto_test
import sys
def test_nodelay():
plasm = ecto.Plasm()
ping = ecto_test.Ping("Ping")
metrics = ecto_test.Metrics("Metrics", queue_size=10)
plasm.connect(ping[:] >> metrics[:])
sched = ecto.Scheduler(plasm)
sched.execute(niter=10000)
print "Hz:", metrics.outputs.hz, " Latency in seconds: %f" % metrics.outputs.latency_seconds
# these are kinda loose
assert metrics.outputs.hz > 5000
assert metrics.outputs.latency_seconds < 0.0001
def test_20hz():
plasm = ecto.Plasm()
ping = ecto_test.Ping("Ping")
throttle = ecto_test.Throttle("Throttle", rate=20)
metrics = ecto_test.Metrics("Metrics", queue_size=10)
plasm.connect(ping[:] >> throttle[:],
throttle[:] >> metrics[:])
sched = ecto.Scheduler(plasm)
sched.execute(niter=100)
print "Hz:", metrics.outputs.hz, " Latency in seconds: %f" % metrics.outputs.latency_seconds
# these are kinda loose
assert 19 < metrics.outputs.hz < 21
assert 0.04 < metrics.outputs.latency_seconds < 0.06
def makeplasm(n_nodes):
plasm = ecto.Plasm()
ping = ecto_test.Ping("Ping")
throttle = ecto_test.Sleep("Sleep_0", seconds=1.0/n_nodes)
plasm.connect(ping[:] >> throttle[:])
for j in range(n_nodes-1): # one has already been added
throttle_next = ecto_test.Sleep("Sleep_%u" % (j+1), seconds=1.0/n_nodes)
plasm.connect(throttle, "out", throttle_next, "in")
throttle = throttle_next
metrics = ecto_test.Metrics("Metrics", queue_size=4)
plasm.connect(throttle[:] >> metrics[:])
# o = open('graph.dot', 'w')
# print >>o, plasm.viz()
# o.close()
# print "\n", plasm.viz(), "\n"
return (plasm, metrics)
def test_st(niter, n_nodes):
(plasm, metrics) = makeplasm(n_nodes)
#sched = ecto.Scheduler(plasm)
#sched.execute(niter)
sched = ecto.Scheduler(plasm)
sched.execute(niter)
print "Hz:", metrics.outputs.hz, " Latency in seconds:", metrics.outputs.lat
|
ency_seconds
assert 0.95 < metrics.outputs.hz < 1.05
assert 0.95 < metrics.outputs.latency_seconds < 1.05
#
# It is hard to test the middle cases, i.e. if you have one thread
# per node, things should run at n_nodes hz and 1 second latency but
# if there are less than that, things are somewhere in the middle.
# Also your latency tends to be worse as you have to wait for the
# graph to "fill up"
#
def t
|
est_tp(niter, n_nodes):
(plasm, metrics) = makeplasm(n_nodes)
sched = ecto.Scheduler(plasm)
sched.execute(niter=niter)
print "Hz:", metrics.outputs.hz, " Latency in seconds:", metrics.outputs.latency_seconds
assert n_nodes * 0.95 < metrics.outputs.hz < n_nodes * 1.05
assert 0.9 < metrics.outputs.latency_seconds < 1.1
test_nodelay()
test_20hz()
test_st(5, 5)
test_st(5, 12)
test_tp(20, 15)
test_tp(20, 10)
test_tp(20, 5)
|
kgao/MediaDrop
|
mediadrop/lib/auth/tests/static_query_test.py
|
Python
|
gpl-3.0
| 1,790
| 0.006145
|
# -*- coding: utf-8 -*-
# This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2014 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code in this file is dual licensed under the MIT license or
# the GPLv3 or (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
from mediadrop.lib.auth.query_result_proxy import StaticQuery
from mediadrop.lib.test.pythonic_testcase import *
class StaticQueryTest(PythonicTestCase):
def setUp(self):
self.query = StaticQuery([1, 2, 3, 4, 5])
def test_can_return_all_items(self):
assert_equals([1, 2, 3, 4, 5], self.query.all())
def test_can_return_all_items_with_iteration(self):
assert_equals([1, 2, 3, 4, 5], list(self.query))
def test_can_use_offset(self):
assert_equals([3, 4, 5], self.query.offset(2).all())
def test_can_build_static_query(self):
assert_equals([1, 2], list(self.query.limit(2)))
def test_knows_number_of_items(self):
all_items = self.query.offset(1).all()
assert_length(4, all_items)
assert_equals(4, self.query.count())
assert_equals(4, len(self.query))
def test_supports_slicing(self):
assert_equals([3, 4, 5], self.query[2:])
assert_equals(3, self.query.offset(1)[2])
def test_can_return_first_item(self):
assert_equals(1, self.query.first())
|
list(self.query) # consume all other items
as
|
sert_none(self.query.first())
import unittest
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(StaticQueryTest))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
JShadowMan/package
|
python/module/calc/calc.py
|
Python
|
mit
| 239
| 0.016736
|
#!/usr/bin/python35
def add(x, y):
return x + y
def dec(x, y):
return x -
|
y
def div(x, y):
if y == 0:
return 0
return x / y
def mult(x, y):
return x * y
if __name__ == '__main__':
p
|
rint('Module: Calc')
|
RobSpectre/garfield
|
garfield/phone_numbers/migrations/0002_phonenumber_related_sim.py
|
Python
|
mit
| 626
| 0.001597
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-11-01 20:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(
|
mig
|
rations.Migration):
initial = True
dependencies = [
('phone_numbers', '0001_initial'),
('sims', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='phonenumber',
name='related_sim',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phone_numbers', to='sims.Sim'),
),
]
|
SanketDG/networkx
|
networkx/exception.py
|
Python
|
bsd-3-clause
| 1,828
| 0.005476
|
# -*- coding: utf-8 -*-
"""
**********
Exceptions
**********
Base exceptions and errors for NetworkX.
"""
__author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult(dschult@colgate.edu)\nLoïc
|
Séguin-C. <loicseguin@gmail.com>"""
# Copyright (C) 2004-2016 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
#
# Exception handling
# the root of all Exceptions
class NetworkXException(Exception):
"""Base class for exceptions in NetworkX."""
class NetworkXError(NetworkXException):
"""Exception for a serious error in NetworkX"""
class NetworkXPointl
|
essConcept(NetworkXException):
"""Harary, F. and Read, R. "Is the Null Graph a Pointless Concept?"
In Graphs and Combinatorics Conference, George Washington University.
New York: Springer-Verlag, 1973.
"""
class NetworkXAlgorithmError(NetworkXException):
"""Exception for unexpected termination of algorithms."""
class NetworkXUnfeasible(NetworkXAlgorithmError):
"""Exception raised by algorithms trying to solve a problem
instance that has no feasible solution."""
class NetworkXNoPath(NetworkXUnfeasible):
"""Exception for algorithms that should return a path when running
on graphs where such a path does not exist."""
class NetworkXNoCycle(NetworkXUnfeasible):
"""Exception for algorithms that should return a cycle when running
on graphs where such a cycle does not exist."""
class NetworkXUnbounded(NetworkXAlgorithmError):
"""Exception raised by algorithms trying to solve a maximization
or a minimization problem instance that is unbounded."""
class NetworkXNotImplemented(NetworkXException):
"""Exception raised by algorithms not implemented for a type of graph."""
|
nerdvegas/rez
|
example_packages/hello_world/package.py
|
Python
|
apache-2.0
| 381
| 0.002625
|
name = "hello_world"
version = "1.0.0"
authors = [
"ajohns"
]
descriptio
|
n = \
"""
Pyth
|
on-based hello world example package.
"""
tools = [
"hello"
]
requires = [
"python"
]
uuid = "examples.hello_world_py"
build_command = 'python {root}/build.py {install}'
def commands():
env.PYTHONPATH.append("{root}/python")
env.PATH.append("{root}/bin")
|
sephiroth6/nodeshot
|
nodeshot/networking/net/models/ip.py
|
Python
|
gpl-3.0
| 2,430
| 0.002881
|
from netfields import InetAddressField, CidrAddressField
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from nodeshot.core.base.models import BaseAccessLevel
from ..managers import NetAccessLevelManager
from choices import IP_PROTOCOLS
class Ip(BaseAccessLevel):
""" IP Address Model """
interface = models.ForeignKey('net.Interface', verbose_name=_('interface'))
address = InetAddressField(verbose_name=_('ip address'), unique=True, db_index=True)
protocol = models.CharField(_('IP Protocol Version'), max_length=4, choices=IP_PROTOCOLS, default=IP_PROTOCOLS[0][0], blank=True)
netmask = CidrAddressField(_('netmask (CIDR, eg: 10.40.0.0/24)'), blank=True, null=True)
objects = NetAccessLevelManager()
class Meta:
app_label = 'net'
permissions = (('can_view_ip', 'Can view ip'),)
verbose_name = _('ip address')
verbose_name_plural = _('ip addresses')
def __unicode__(self):
return '%s: %s' % (self.protocol, self.address)
def clean(self, *args, **kwargs):
""" TODO """
# netaddr.IPAddress('10.40.2.1') in netaddr.IPNetwork('10.40.0.0/24')
pass
d
|
ef save(self, *args, **kwargs):
"""
Determines ip protocol version automatically.
Stores address in interface shortcuts for convenience.
"""
self.protocol = 'ipv%d' % self.address.versi
|
on
# save
super(Ip, self).save(*args, **kwargs)
# TODO: do we really need this?
# save shortcut on interfaces
#ip_cached_list = self.interface.ip_addresses
## if not present in interface shorctus add it to the list
#if str(self.address) not in ip_cached_list:
# # recalculate cached_ip_list
# recalculated_ip_cached_list = []
# for ip in self.interface.ip_set.all():
# recalculated_ip_cached_list.append(str(ip.address))
# # rebuild string in format "<ip_1>, <ip_2>"
# self.interface.data['ip_addresses'] = recalculated_ip_cached_list
# self.interface.save()
@property
def owner(self):
return self.interface.owner
if 'grappelli' in settings.INSTALLED_APPS:
@staticmethod
def autocomplete_search_fields():
return ('address__icontains',)
|
ferdhika31/smarttraffic
|
hmmtagger/java2python_runtime.py
|
Python
|
gpl-3.0
| 108
| 0
|
def ternary(cond
|
1, result1, result2
|
):
if cond1:
return result1
else:
return result2
|
anaviltripathi/pgmpy
|
pgmpy/inference/dbn_inference.py
|
Python
|
mit
| 18,559
| 0.00167
|
from itertools import tee, chain, combinations
from collections import defaultdict
from pgmpy.factors import Factor
from pgmpy.factors.Factor import factor_product
from pgmpy.inference import Inference, BeliefPropagation
class DBNInference(Inference):
def __init__(self, model):
"""
Class for performing inference using Belief Propagation method
for the input Dynamic Bayesian Network.
For the exact inference implementation, the interface algorithm
is used which is adapted from [1].
Parameters:
----------
model: Dynamic Bayesian Network
Model for which inference is to performed
Examples:
--------
>>> from pgmpy.factors import TabularCPD
>>> from pgmpy.models import DynamicBayesianNetwork as DBN
>>> from pgmpy.inference import DBNInference
>>> dbnet = DBN()
>>> dbnet.add_edges_from([(('Z', 0), ('X', 0)), (('X', 0), ('Y', 0)),
... (('Z', 0), ('Z', 1))])
>>> z_start_cpd = TabularCPD(('Z', 0), 2, [[0.5, 0.5]])
>>> x_i_cpd = TabularCPD(('X', 0), 2, [[0.6, 0.9],
... [0.4, 0.1]],
... evidence=[('Z', 0)],
... evidence_card=2)
>>> y_i_cpd = TabularCPD(('Y', 0), 2, [[0.2, 0.3],
... [0.8, 0.7]],
... evidence=[('X', 0)],
... evidence_card=2)
>>> z_trans_cpd = TabularCPD(('Z', 1), 2, [[0.4, 0.7],
... [0.6, 0.3]],
... evidence=[('Z', 0)],
... evidence_card=2)
>>> dbnet.add_cpds(z_start_cpd, z_trans_cpd, x_i_cpd, y_i_cpd)
>>> dbnet.initialize_initial_state()
>>> dbn_inf = DBNInference(dbnet)
>>> dbn_inf.start_junction_tree.nodes()
[(('X', 0), ('Z', 0)), (('X', 0), ('Y', 0))]
>>> dbn_inf.one_and_half_junction_tree.nodes()
[(('Z', 1), ('Z', 0)),
(('Y', 1), ('X', 1)),
(('Z', 1), ('X', 1))]
References:
----------
[1] Dynamic Bayesian Networks: Representation, Inference and Learning
by Kevin Patrick Murphy
http://www.cs.ubc.ca/~murphyk/Thesis/thesis.pdf
Public Methods:
--------------
forward_inference
backward_inference
query
"""
super(DBNInference, self).__init__(model)
self.interface_nodes_0 = model.get_interface_nodes(time_slice=0)
self.interface_nodes_1 = model.get_interface_nodes(time_slice=1)
start_markov_model = self.start_bayesian_model.to_markov_model()
one_and_half_markov_model = self.one_and_half_model.to_markov_model()
combinations_slice_0 = tee(combinations(self.interface_nodes_0, 2), 2)
combinations_slice_1 = combinations(self.interface_nodes_1, 2)
start_markov_model.add_edges_from(combinations_slice_0[0])
one_and_half_markov_model.add_edges_from(chain(combinations_slice_0[1], combinations_slice_1))
self.one_and_half_junction_tree = one_and_half_markov_model.to_junction_tree()
self.start_junction_tree = start_markov_model.to_junction_tree()
self.start_interface_clique = self._get_clique(self.start_junction_tree, self.interface_nodes_0)
self.in_clique = self._get_clique(self.one_and_half_junction_tree, self.interface_nodes_0)
self.out_clique = self._get_clique(self.one_and_half_junction_tree, self.interface_nodes_1)
def _shift_nodes(self, nodes, time_slice):
"""
Shifting the nodes to a certain required timeslice.
Parameters:
----------
nodes: list, array-like
List of node names.
nodes that are to be shifted to some other time slice.
time_slice: int
time slice where to shift the nodes.
"""
return [(node[0], time_slice) for node in nodes]
def _get_clique(self, junction_tree, nodes):
"""
Extracting the cliques from the junction tree which are a subset of
the given nodes.
Parameters:
----------
junction_tree: Junction tree
from which the nodes are to be extracted.
nodes: iterable container
A container of nodes (list, dict, set, etc.).
"""
return [clique for clique in junction_tree.nodes() if set(nodes).issubset(clique)][0]
def _get_evidence(self, evidence_dict, time_slice, shift):
"""
Getting the evidence belonging to a particular timeslice.
Parameters:
----------
evidence: dict
a dict key, value pair as {var: state_of_var_observed}
None if no evidence
time: int
the evidence corresponding to the time slice
shift: int
shifting the evidence corresponding to the given time slice.
"""
if evidence_dict:
return {(node[0], shift): evidence_dict[node] for node in evidence_dict if node[1] == time_slice}
def _marginalize_factor(self, nodes, factor):
"""
Marginalizing the factor selectively for a set of variables.
Parameters:
----------
nodes: list, array-like
A container of nodes (list, dict, set, etc.).
factor: factor
factor which is to be marginalized.
"""
marginalizing_nodes = list(set(factor.scope()).difference(nodes))
return factor.marginalize(marginalizing_nodes, inplace=False)
def _update_belief(self, belief_prop, clique, clique_potential, message=None):
"""
Method for updating the belief.
Parameters:
----------
belief_prop: Belief Propagation
Belief Propagation which needs to be updated.
in_clique: clique
The factor which needs to be updated corresponding to the input clique.
out_clique_potential: factor
Multiplying factor which will be multiplied to the factor corresponding to the clique.
"""
old_factor = belief_prop.junction_tree.get_factors(clique)
belief_prop.junction_tree.remove_factors(old_factor)
if message:
|
if message.scope() and clique_potential.scope():
new_factor = old_factor * message
new_factor = new_factor / clique_pot
|
ential
else:
new_factor = old_factor
else:
new_factor = old_factor * clique_potential
belief_prop.junction_tree.add_factors(new_factor)
belief_prop.calibrate()
def _get_factor(self, belief_prop, evidence):
"""
Extracts the required factor from the junction tree.
Parameters:
----------
belief_prop: Belief Propagation
Belief Propagation which needs to be updated.
evidence: dict
a dict key, value pair as {var: state_of_var_observed}
"""
final_factor = factor_product(*belief_prop.junction_tree.get_factors())
if evidence:
for var in evidence:
if var in final_factor.scope():
final_factor.reduce([(var, evidence[var])])
return final_factor
def _shift_factor(self, factor, shift):
"""
Shifting the factor to a certain required time slice.
Parameters:
----------
factor: Factor
The factor which needs to be shifted.
shift: int
The new timeslice to which the factor should belong to.
"""
new_scope = self._shift_nodes(factor.scope(), shift)
return Factor(new_scope, factor.cardinality, factor.values)
def forward_inference(self, variables, evidence=None, args=None):
"""
Forward inference method using belief propagation.
Parameters:
----------
variables: list
list of variables for which you want to compute the probability
evidence: dict
|
punalpatel/st2
|
st2common/st2common/validators/api/reactor.py
|
Python
|
apache-2.0
| 3,644
| 0.003293
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from apscheduler.triggers.cron import CronTrigger
from st2common.exceptions.apivalidation import ValueValidationException
from st2common.constants.triggers import SYSTEM_TRIGGER_TYPES
from st2common.constants.triggers import CRON_TIMER_TRIGGER_REF
from st2common.util import schema as util_schema
import st2common.operators as criteria_operators
__all__ = [
'validate_criteria',
'validate_trigger_parameters'
]
allowed_operators = criteria_operators.get_allowed_operators()
def validate_criteria(criteria):
if not isinstance(criteria, dict):
raise ValueValidationException('Criteria should be a dict.')
for key, value in six.iteritems(criteria):
operator = value.get('type', None)
if operator is None:
raise ValueValidationException('Operator not specified for field: ' + key)
if operator not in allowed_operators:
raise ValueValidationException('For field: ' + key + ', operator ' + operator +
' not in list of allowed operators: ' +
str(allowed_operators.keys()))
pattern = value.get('pattern', None)
if pattern is None:
raise ValueValidationException('For field: ' + key + ', no pattern specified ' +
'for operator ' + operator)
def validate_trigger_parameters(trigger_type_ref, parameters):
"""
This function validates parameters for system triggers (e.g. webhook and timers).
Note: Eventually we should also validate parameters for user defined triggers which correctly
specify JSON schema for the parameters.
:param trigger_type_ref: Reference of a trigger type.
:type trigger_type_ref: ``str``
:param parameters: Trigger parameters.
:type parameters: ``dict``
"""
if not trigger_type_ref:
return None
if trigger_type_ref not in SYSTEM_TRIGGER_TYPES:
# Not a system trigger, skip validation for now
return None
parameters_schema = SYSTEM_TRIGGER_TYPES[trigger_type_ref]['parameters_schema']
cleaned = util_schema.validate(instance=parameters, schema=parameters_schema,
cls=util_schema.CustomValidator, use_default=True,
allow_default_none=True)
# Additional validation for CronTimer trigger
# TODO: If we need to add more checks like this we should consider abstracting this ou
|
t.
if trigger_type_ref == CRON_TIMER_TRIGGER_REF:
# Validate that the user provided parameters are valid. This is required since JSON schema
# allows arbitrary strings, but not any arbitrary string is a valid CronTrigger argument
# Note: Constructor throws ValueError on invalid parameters
CronTrigger(**parameters)
|
return cleaned
|
sn3p/Orrery
|
data/data_to_json.py
|
Python
|
mit
| 4,117
| 0.005101
|
#!/usr/bin/env python
"""
Extract minor planet orbital elements and discovery dates to json.
Orbital elements are extracted from the file MPCORB.DAT:
https://minorplanetcenter.net/iau/MPCORB/MPCORB.DAT
Discovery dates are extracted from the file NumberedMPs.txt:
https://minorplanetcenter.net/iau/lists/NumberedMPs.txt
Usage:
======
./data_to_json.py [-h] [-c] [N]
Parse orbital and discovery data to json.
positional arguments:
N maximum number of results
optional arguments:
-h, --help show this help message and exit
-c, --compact output as compact json format
TODO:
=====
- Get range between discovery dates
- Create an API (python server)
"""
OUTPUT_FILE = 'catalog.json'
MPCORB_FILE = 'MPCORB.DAT'
NUMMPS_FILE = 'NumberedMPs.txt'
import os, sys, json, argparse
from time import time
from datetime import datetime
from itertools import izip
from operator import itemgetter
# Change working directory to the module path
os.chdir(os.path.dirname(os.path.realpath(__file__)))
# Datetime to Julian date
def dt2jd(dt):
dt = dt - datetime(2000, 1, 1)
return dt.days + (dt.seconds + dt.microseconds / 1000000) / 86400 + 2451544.5
# Packed date to Datetime
def pd2dt(pd):
y = int(str(int(pd[0], 36)) + pd[1:3])
m = int(pd[3], 36)
d = int(pd[4], 36)
return datetime(y, m, d)
# Packed to Julian date
def pd2jd(pd):
dt = pd2dt(pd)
return dt2jd(dt)
def main(argv):
# Parse argumanets
parser = argparse.ArgumentParser(description='Parse orbital and discovery data to json.')
parser.add_argument('amount', metavar='N', type=int, nargs='?', help='maximum number of results')
parser.add_argument('-c', '--compact', action='store_true', dest='compact', help='output as compact json format')
args = parser.parse_args()
print 'Extracting MPC discovery dates and orbital elements ...'
start_time = time()
# Extract the discovery dates from NumberedMPs.txt
# For a description of the format see https://minorplanetcenter.net/iau/lists/NumberedMPs000001.html
mpcs_disc = {}
for line in open(NUMMPS_FILE):
nr = int(line[1:7].strip().replace('(', ''))
# Extract the discovery date (YYYY MM DD) and convert it to Julian date
date = datetime.strptime(line[41:51], '%Y %m %d')
mpcs_disc[nr] = dt2jd(date)
"""
Extract the orbital elements from MPCORB.DAT
For a description of the format see https://minorplanetcenter.net/iau/info/MPOrbitFormat.html
The following columns are extracted:
epoch = Date for which the information is valid (packed date)
a = Semi-major axis (AU)
e = Orbital eccentricity (0..1)
i = Inclination to the ecliptic (degrees)
W = Longitude of ascending node (degrees)
w = Argument of perihelion (degrees)
M = Mean anomaly (degrees)
n = Mean daily motion (degrees per day)
"""
|
mpcs = []
count = 0
for line in open(MPCORB_FILE):
nr = line[167:173].strip().replace('(', '')
if not nr: continue
nr = int(nr)
# Skip if discovery date is missing
if nr not in m
|
pcs_disc:
print 'Skipping MPC #%d (no discovery date found)' % (nr)
continue
# Extract the orbital elements
_, _, _, epoch, M, w, W, i, e, n, a, _ = line.split(None, 11)
mpc = (mpcs_disc[nr], pd2jd(epoch), float(a), float(e), float(i), float(W), float(w), float(M), float(n))
mpcs.append(mpc)
# Maximum requested reached?
count += 1
if count == args.amount: break
# Sort by discovery date
mpcs.sort(key=itemgetter(0))
if args.compact:
output = mpcs
else:
keys = ['disc', 'epoch', 'a', 'e', 'i', 'W', 'w', 'M', 'n']
output = [dict(izip(keys, mpc)) for mpc in mpcs]
with open(OUTPUT_FILE, 'w') as outfile:
json.dump(output, outfile)
# json.dump(output, outfile, indent=2, separators=(',', ':'))
print 'Finished extracting %d MPCs in %s seconds.' % (len(mpcs), time()-start_time)
if __name__ == '__main__':
main(sys.argv[1:])
|
stormi/tsunami
|
src/secondaires/navigation/equipage/volontes/relacher_gouvernail.py
|
Python
|
bsd-3-clause
| 3,566
| 0.000563
|
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPEC
|
IAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la volonté RelacherGouvernai
|
l"""
import re
from secondaires.navigation.equipage.ordres.relacher_gouvernail import \
RelacherGouvernail as OrdreRelacherGouvernail
from secondaires.navigation.equipage.ordres.long_deplacer import LongDeplacer
from secondaires.navigation.equipage.volonte import Volonte
class RelacherGouvernail(Volonte):
"""Classe représentant une volonté.
Cette volonté demande simplement au matelot qui tient le gouvernail
de le relâcher. Comme la plupart des volontés, le matelot est
encouragé à retourner dans sa salle d'affectation après coup.
"""
cle = "relacher_gouvernail"
ordre_court = re.compile(r"^rg$", re.I)
ordre_long = re.compile(r"^relacher\s+gouvernail?$", re.I)
def choisir_matelots(self, exception=None):
"""Retourne le matelot le plus apte à accomplir la volonté."""
navire = self.navire
equipage = navire.equipage
gouvernail = self.navire.gouvernail
if gouvernail is None or gouvernail.tenu is None:
return None
personnage = gouvernail.tenu
matelot = equipage.get_matelot_depuis_personnage(personnage)
return matelot
def executer(self, matelot):
"""Exécute la volonté."""
if matelot is None:
self.terminer()
return
navire = self.navire
ordres = []
matelot.invalider_ordres("virer")
relacher = OrdreRelacherGouvernail(matelot, navire)
ordres.append(relacher)
ordres.append(self.revenir_affectation(matelot))
self.ajouter_ordres(matelot, ordres)
def crier_ordres(self, personnage):
"""On fait crier l'ordre au personnage."""
msg = "{} s'écrie : relâchez la barre !".format(
personnage.distinction_audible)
self.navire.envoyer(msg)
@classmethod
def extraire_arguments(cls, navire):
"""Extrait les arguments de la volonté."""
return ()
|
gloaec/trifle
|
src/trifle/server/views/__init__.py
|
Python
|
gpl-3.0
| 200
| 0.02
|
from trifle.server.views.fronte
|
nd import frontend
from trifle.server.views.api import api
from trifle.server.views.monit
|
or import monitor
from trifle.server.views.configure import configure
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/social/backends/mailru.py
|
Python
|
agpl-3.0
| 1,693
| 0
|
"""
Mail.ru OAuth2 backend, docs at:
http://psa.matiasaguirre.net/docs/backends/mailru.html
"""
from hashlib import md5
from social.p3 import unquote
from social.backends.oauth import BaseOAuth2
class MailruOAuth2(BaseOAuth2):
"""Mail.ru authentication backend"""
name = 'mailru-oauth2'
ID_KEY = 'uid'
AUTHORIZATION_URL = 'https://connect.mail.ru/oauth/authorize'
ACCESS_TOKEN_URL = 'https://connect.mail.ru/oauth/token'
ACCESS_TOKEN_METHOD = 'POST'
EXTRA_DATA = [('refresh_token', 'refresh_token'),
('expires_in', 'expires')]
def get_user_details(self, response):
"""Return user details from Mail.ru request"""
fullname, first_name, last_name = self.get_user_names(
first_name=unquote(response['first_name']),
last_name=unquote(response['last_name'])
)
return {'username': unquote(response['nick']),
'email': unquote
|
(response['email']),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
def user_data(self, access_token, *args, **kwargs):
"""Return user data from Mail.ru REST API"""
key, secret = self.get_key_and_se
|
cret()
data = {'method': 'users.getInfo',
'session_key': access_token,
'app_id': key,
'secure': '1'}
param_list = sorted(list(item + '=' + data[item] for item in data))
data['sig'] = md5(
(''.join(param_list) + secret).encode('utf-8')
).hexdigest()
return self.get_json('http://www.appsmail.ru/platform/api',
params=data)[0]
|
RicostruzioneTrasparente/rt-scrapers
|
providers/Halley.py
|
Python
|
gpl-3.0
| 6,427
| 0.012916
|
# Halley provider definition
# Mandatory imports
from .Provider import Provider
from rfeed import *
# Optional imports
import requests, mimetypes, logging
logging.basicConfig(level=logging.INFO)
from bs4 import BeautifulSoup as bs
# Halley provider class inherit from the Provider one defined in providers/Provider.py file
#
# Inherited attributes:
# - output_format
# - tz
# - language
# - feed_base_url
# - docs_base_url
# - specs_base_url
# - options
#
# Inherited methods:
# -
|
dt
#
# Specific methods to customize:
# - opts: using options from csv file properly
# - urls: extract single item urls from index page
# - item: extract and structure data from single item page
#
# WARNING: class name is also the value of provider column in elen
|
co_albi.csv
#
class Halley(Provider):
# Mandatory attributes
input_format = "DD/MM/YYYY"
# Optional attributes
# ...
# Transform and prepare options from CSV row (options column)
def opts(self, opt):
self.options["base_url"] = "http://halleyweb.com/%s/mc/" % opt
return self # Mandatory for chaining
# Scrape index page and return single item urls
def urls(self):
index_page_url = self.options["base_url"] + "mc_gridev_messi_datigrid.php"
index_page_response = requests.get(index_page_url)
# Manage exceptions and return consistent values
if index_page_response.status_code != 200:
logging.warning("Index page %s unavailable!" % index_page_url)
return []
# Parsing with BeautifulSoup
index_page_soup = bs(index_page_response.content,"lxml")
logging.info("Scraping %s:" % index_page_url)
# Very simple scraping of single item urls
for row in index_page_soup.findAll("row"):
single_page_id = row['id'].strip()
single_page_url = self.options["base_url"] + "mc_gridev_dettaglio.php?id_pubbl=%s" % single_page_id
yield single_page_url
# Scrape a single item page from its url and return structured data as Item() instance (from rfeed)
def item(self,single_page_url):
single_page_response = requests.get(single_page_url)
if single_page_response.status_code != 200 or "non può essere visualizzato" in single_page_response.text:
print("Single page %s unavailable!" % single_page_url)
return None # None items are dropped in final feed
single_page_soup = bs(single_page_response.content,"lxml")
logging.debug("- Scraping %s" % single_page_url)
### MAIN SCRAPING LOGIC ###
contents = []
for cell in single_page_soup.select("td"):
if cell.findAll('a'):
contents.append([])
for a in cell.findAll('a'):
if a.get('onlick'):
href = a['onclick'].replace("window.open('","").replace("');","")
elif a.get('href'):
href = a['href']
else:
href = ""
contents[-1].append({
"content": self.clean_string(a.text),
"href": self.options["base_url"] + self.clean_string(href)
})
else:
contents.append(self.clean_string(cell.text).strip(':'))
document = dict([tuple(contents[i:i+2]) for i in range(0,len(contents),2)])
document["Documento"] = document["Documento"] if "Documento" in document and isinstance(document["Documento"],list) else []
document["Allegati"] = document["Allegati"] if "Allegati" in document and isinstance(document["Allegati"],list) else []
### END SCRAPING LOGIC ###
# Return scraping data as an Item() instance
return Item(
title = document["Oggetto Atto"],
link = single_page_url,
description = document["Oggetto Atto"],
pubDate = self.format_datetime(document.get("Data Atto") or document.get("Data Inizio Pubblicazione")),
guid = Guid(single_page_url),
categories = [
c
for c in [
Category(
domain = self.specs_base_url + "#" + "item-category-uid",
category = "%s/%s" % (document["Anno di Pubblicazione"], document["Numero Pubblicazione"])
),
Category(
domain = self.specs_base_url + "#" + "item-category-type",
category = document["Tipo Atto"]
) if document.get("Tipo Atto") else None,
Category(
domain = self.specs_base_url + "#" + "item-category-pubStart",
category = self.format_datetime(document.get("Data Inizio Pubblicazione") or document.get("Data Atto"))
),
Category(
domain = self.specs_base_url + "#" + "item-category-pubEnd",
category = self.format_datetime(document["Data Fine Pubblicazione"])
) if document.get("Data Fine Pubblicazione") else None,
Category(
domain = self.specs_base_url + "#" + "item-category-unit",
category = document["Mittente"]
) if document.get("Mittente") else None
]
if c is not None
],
enclosure = [
Enclosure(
url = enclosure["href"],
length = 3000,
type = mimetypes.guess_type(enclosure["content"])[0] or "application/octet-stream"
)
for enclosure in document["Documento"] + document["Allegati"]
]
)
# Simple and generic wrapper around item() method if a list of urls is passed
# Unavailable items are filtered out
def items(self, single_page_urls):
for single_page_url in single_page_urls:
try:
item = self.item(single_page_url)
except Exception as e:
logging.warning("Error scraping page %s: %s" % ( single_page_url , e ))
continue
if item:
yield item
# Public method called by scraper.py
def scrape(self):
return self.items(self.urls())
|
eshijia/magnum
|
magnum/common/pythonk8sclient/swagger_client/models/v1_persistent_volume_claim_list.py
|
Python
|
apache-2.0
| 5,511
| 0.001633
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
class V1PersistentVolumeClaimList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'kind': 'str',
'api_version': 'str',
'metadata': 'V1ListMeta',
'items': 'list[V1PersistentVolumeClaim]'
}
self.attribute_map = {
'kind': 'kind',
'api_version': 'apiVersion',
'metadata': 'metadata',
'items': 'items'
}
self._kind = None
self._api_version = None
self._metadata = None
self._items = None
@property
def kind(self):
"""
Gets the kind of this V1PersistentVolumeClaimList.
kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:return: The kind of this V1PersistentVolumeClaimList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1PersistentVolumeClaimList.
kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:param kind: The kind of this V1PersistentVolumeClaimList.
:type: str
"""
self._kind = kind
@property
def api_version(self):
"""
Gets the api_version of this V1PersistentVolumeClaimList.
version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources
:return: The api_version of this V1PersistentVolumeClaimList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1PersistentVolumeClaimList.
version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources
:param api_version: The api_version of this V1PersistentVolumeClaimList.
:type: str
"""
self._api_version = api_version
@property
def metadata(self):
"""
Gets the metadata of this V1PersistentVolumeClaimList.
|
standard list metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds
:return: The metadata of this V1PersistentVolumeClaimList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1PersistentVolumeClaimList.
standard list metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventi
|
ons.md#types-kinds
:param metadata: The metadata of this V1PersistentVolumeClaimList.
:type: V1ListMeta
"""
self._metadata = metadata
@property
def items(self):
"""
Gets the items of this V1PersistentVolumeClaimList.
a list of persistent volume claims; see http://releases.k8s.io/v1.0.4/docs/persistent-volumes.md#persistentvolumeclaims
:return: The items of this V1PersistentVolumeClaimList.
:rtype: list[V1PersistentVolumeClaim]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1PersistentVolumeClaimList.
a list of persistent volume claims; see http://releases.k8s.io/v1.0.4/docs/persistent-volumes.md#persistentvolumeclaims
:param items: The items of this V1PersistentVolumeClaimList.
:type: list[V1PersistentVolumeClaim]
"""
self._items = items
def to_dict(self):
"""
Return model properties dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Return model properties str
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
|
ta2-1/pootle
|
pootle/apps/pootle_app/management/commands/set_filetype.py
|
Python
|
gpl-3.0
| 2,012
| 0.001988
|
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
from django.core.management.base import CommandError
from pootle_format.models import Format
from pootle_project.models import Project
from . import PootleCommand
class Command(PootleCommand):
help = "Manage
|
Store formats."
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'filetype',
action='store',
help="File type to set")
parser.add_argument(
'--from-filetype',
action='store',
help="Only convert Stores of this file type")
par
|
ser.add_argument(
'--matching',
action='store',
help="Glob match Store path excluding extension")
def get_projects(self):
if not self.projects:
return Project.objects.all()
return Project.objects.filter(code__in=self.projects)
def get_filetype(self, name):
try:
return Format.objects.get(name=name)
except Format.DoesNotExist:
raise CommandError("Unrecognized filetype '%s'" % name)
def handle_all(self, **options):
filetype = self.get_filetype(options["filetype"])
from_filetype = (
options["from_filetype"]
and self.get_filetype(options["from_filetype"])
or None)
for project in self.get_projects():
# add the filetype to project, and convert the stores
project.filetype_tool.add_filetype(filetype)
project.filetype_tool.set_filetypes(
filetype,
from_filetype=from_filetype,
matching=options["matching"])
|
szaghi/MaTiSSe
|
release/MaTiSSe-1.2.0/matisse/matisse.py
|
Python
|
gpl-3.0
| 4,821
| 0.010786
|
#!/usr/bin/env python
"""
MaTiSSe.py, Markdown To Impressive Scientific Slides
"""
from __future__ import print_function
import argparse
import os
import sys
from matisse_config import MatisseConfig
from presentation import Presentation
__appname__ = "MaTiSSe.py"
__description__ = "MaTiSSe.py, Markdown To Impressive Scientific Slides"
__long_description__ = "MaTiSSe.py, Markdown To Impressive Scientific Slides. It is a very simple and stupid-to-use (KISS) presentation maker based on simple markdown syntax producing high quality first-class html/css presentation with great support for scientific contents."
__version__ = "1.2.0"
__author__ = "Stefano Zaghi"
__author_email__ = "stefano.zaghi@gmail.com"
__license__ = "GNU General Public License v3 (GPLv3)"
__url__ = "https://github.com/szaghi/MaTiSSe"
__sample__ = r"""
---
theme:
- backround: black
---
# Part 1
## Section 1
### Subsection 1
#### Slide 1
##### A H5 heading
Lorem ipsum dolor sit amet...
##### Math
$$
x=\frac{-b\pm\sqrt{b^2-4ac}}{2a}
$$
$note
$content{Just a note enviroment}
$endnote
"""
def main():
"""Main function."""
cliparser = argparse.ArgumentParser(prog=__appname__, description='MaTiSSe.py, Markdown To Impressive Scientific Slides')
cliparser.add_argument('-v', '--version', action='version', help='Show version', version='%(prog)s ' + __version__)
cliparser.add_argument('-i', '--input', required=False, action='store', default=None, help='Input file name of markdown source to be parsed')
cliparser.add_argument('-o', '--output', required=False, action='store', default=None, help='Output directory name containing the presentation files')
cliparser.add_argument('-t', '--theme', required=False, action='store', default=None, help='Select a builtin theme for initializing a new sample presentation')
cliparser.add_argument('-hs', '--highlight-style', required=False, action='store', default='github.css', help='Select the highlight.js style (default github.css); select "disable" to disable highligth.js', metavar='STYLE.CSS')
cliparser.add_argument('-s', '--sample', required=False, action='store', default=None, help='Generate a new sample presentation as skeleton of your one')
cliparser.add_argument('--toc-at-chap-beginning', required=False, action='store', default=None, help='Insert Table of Contents at each chapter beginning (default no): to activate indicate the TOC depth', metavar='TOC-DEPTH')
cliparser.add_argument('--toc-at-sec-beginning', required=False, action='store', default=None, help='Insert Table of Contents at each section beginning (default no): to activate indicate the TOC depth', metavar='TOC-DEPTH')
cliparser.add_argument('--toc-at-subsec-beginning', required=False, action='store', default=None, help='Insert Table of Contents at each subsection beginning (default no): to activate indicate the TOC depth', metavar='TOC-DEPTH')
cliparser.add_argument('--print-preamble', required=False, action='store_true', default=None, help='Print the preamble data as parsed from source')
cliparser.add_argument('--print-css', required=False, action='store_true', default=None, help='Print the css as parsed from source (if done)')
cliparser.add_argument('--print-options', required=False, action='store_true', default=None, help='Print the available options for each presentation element')
cliparser.add_argument('--print-highlight-styles', required=False, action='store_true', default=None, help='Print the available highlight.js style (default github.css)')
cliparser.add_argument('--print-themes', required=False, action='store_true', default=None, help='Print the list of the builtin themes')
cliparser.add_argument('--verbose', required=False, action='store_true', default=False, help='More verbose printing messages (default no)')
cliparser.add_argument('--online-MathJax', required=False, action='store_true', default=None, help='Use online rendering of LaTeX equations by means of online MathJax service; default use offline, local copy of MathJax engine')
cliargs = cliparser.parse_args()
config = MatisseConfig(cliargs=cliargs)
if cliargs.input:
if not os.path.exists(cliargs.input):
sys.stderr.write('Error: input file "' + cliargs.input + '" not found!')
sys.exit(1)
else:
with open(cliarg
|
s.input, 'r') as mdf:
|
source = mdf.read()
presentation = Presentation()
if config.verbose:
print('Parsing source ' + cliargs.input)
presentation.parse(config=config, source=source)
if cliargs.output:
output = cliargs.output
else:
output = os.path.splitext(os.path.basename(cliargs.input))[0]
output = os.path.normpath(output)
config.make_output_tree(output=output)
presentation.save(config=config, output=output)
if __name__ == '__main__':
main()
|
wevote/WeVoteServer
|
quick_info/models.py
|
Python
|
mit
| 40,696
| 0.002408
|
# quick_info/models.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
# Diagrams here: https://docs.google.com/drawings/d/1fEs_f2-4Du9knJ8FXn6PQ2BcmXL4zSkMYh-cp75EeLE/edit
from ballot.models import OFFICE, CANDIDATE, POLITICIAN, MEASURE, KIND_OF_BALLOT_ITEM_CHOICES
from django.db import models
from exception.models import handle_exception, handle_record_found_more_than_one_exception,\
handle_record_not_saved_exception
import wevote_functions.admin
from wevote_functions.functions import convert_to_int, positive_value_exists
from wevote_settings.models import fetch_next_we_vote_id_quick_info_integer, \
fetch_next_we_vote_id_quick_info_master_integer, fetch_site_unique_id_prefix
# Language Codes: http://www.mcanerin.com/en/articles/meta-language.asp
# Country Codes: http://www.mcanerin.com/en/articles/ccTLD.asp
SPANISH = 'es'
ENGLISH = 'en'
TAGALOG = 'tl'
VIETNAMESE = 'vi'
CHINESE = 'zh'
LANGUAGE_CHOICES = (
(ENGLISH, 'English'),
(SPANISH, 'Spanish'),
(TAGALOG, 'Tagalog'),
(VIETNAMESE, 'Vietnamese'),
(CHINESE, 'Chinese'),
)
NOT_SPECIFIED = 'not_specified'
BALLOTPEDIA = 'ballotpedia'
DIRECT_ENTRY = 'direct'
WIKIPEDIA = 'wikipedia'
SOURCE_SITE_CHOICES = (
(NOT_SPECIFIED, 'Not Specified'),
(BALLOTPEDIA, 'Ballotpedia'),
(DIRECT_ENTRY, 'Direct Entry'),
(WIKIPEDIA, 'Wikipedia'),
)
logger = wevote_functions.admin.get_logger(__name__)
class QuickInfo(models.Model):
"""
The information that shows when you click an info icon next to a ballot item
"""
# We are relying on built-in Python id field
# The we_vote_id identifier is unique across all We Vote sites, and allows us to share our org info with other
# organizations
# It starts with "wv" then we add on a database specific identifier like "3v" (WeVoteSetting.site_unique_id_prefix)
# then the string "info", and then a sequential integer like "123".
# We keep the last value in WeVoteSetting.we_vote_id_last_quick_info_integer
we_vote_id = models.CharField(
verbose_name="we vote permanent id", max_length=255, default=None, null=True, blank=True, unique=True)
# The language that this text is in
language = models.CharField(max_length=5, choices=LANGUAGE_CHOICES, default=ENGLISH)
info_text = models.TextField(null=True, blank=True)
info_html = models.TextField(null=True, blank=True)
ballot_item_display_name = models.CharField(verbose_name="text name for ballot item for quick display",
max_length=255, null=True, blank=True)
# See also more_info_credit_text
more_info_credit = models.CharField(max_length=15, choices=SOURCE_SITE_CHOICES, default=NOT_SPECIFIED,
null=True, blank=True)
# A link to any location with more information about this quick information
more_info_url = models.URLField(blank=True, null=True, verbose_name='url with more the full entry for this info')
last_updated = models.DateTimeField(verbose_name='date entered', null=True, auto_now=T
|
rue) # TODO Convert to date_last_changed
# The unique id of the last person who edited this entry.
last_editor_we_vote_id = models.CharField(
verbose_name="last editor we vote id", max_length=255, null=True, blank=True, unique=False)
# This is the office that the quick_info refers to.
# Either contest_measure is filled, contest_office OR candidate, but not all three
contest_office_we_vote_id = model
|
s.CharField(
verbose_name="we vote permanent id for the contest_office", max_length=255, null=True, blank=True, unique=False)
# This is the candidate/politician that the quick_info refers to.
# Either candidate is filled, contest_office OR contest_measure, but not all three
candidate_campaign_we_vote_id = models.CharField(
verbose_name="we vote permanent id for the candidate", max_length=255, null=True,
blank=True, unique=False)
# Useful for queries based on Politicians
politician_we_vote_id = models.CharField(
verbose_name="we vote permanent id for politician", max_length=255, null=True,
blank=True, unique=False)
# This is the measure/initiative/proquick_info that the quick_info refers to.
# Either contest_measure is filled, contest_office OR candidate, but not all three
contest_measure_we_vote_id = models.CharField(
verbose_name="we vote permanent id for the contest_measure", max_length=255, null=True,
blank=True, unique=False)
# There are many ballot items that don't have (or need) a custom quick_info entry, and can reference a general
# entry. This field is the we_vote_id of the master quick_info entry that has the general text.
quick_info_master_we_vote_id = models.CharField(
verbose_name="we vote id of other entry which is the master", max_length=255, default=None, null=True,
blank=True, unique=True)
# The unique ID of the election containing this contest. (Provided by Google Civic)
google_civic_election_id = models.PositiveIntegerField(
verbose_name="google civic election id", default=0, null=True, blank=True)
def __unicode__(self):
return self.we_vote_id
class Meta:
ordering = ('last_updated',)
# We override the save function so we can auto-generate we_vote_id
def save(self, *args, **kwargs):
# Even if this organization came from another source we still need a unique we_vote_id
if self.we_vote_id:
self.we_vote_id = self.we_vote_id.strip().lower()
if self.we_vote_id == "" or self.we_vote_id is None: # If there isn't a value...
# ...generate a new id
site_unique_id_prefix = fetch_site_unique_id_prefix()
next_local_integer = fetch_next_we_vote_id_quick_info_integer()
# "wv" = We Vote
# site_unique_id_prefix = a generated (or assigned) unique id for one server running We Vote
# "info" = tells us this is a unique id for a quick_info entry
# next_integer = a unique, sequential integer for this server - not necessarily tied to database id
self.we_vote_id = "wv{site_unique_id_prefix}info{next_integer}".format(
site_unique_id_prefix=site_unique_id_prefix,
next_integer=next_local_integer,
)
super(QuickInfo, self).save(*args, **kwargs)
def is_english(self):
if self.language == ENGLISH:
return True
return False
def is_spanish(self):
if self.language == SPANISH:
return True
return False
def is_vietnamese(self):
if self.language == VIETNAMESE:
return True
return False
def is_chinese(self):
if self.language == CHINESE:
return True
return False
def is_tagalog(self):
if self.language == TAGALOG:
return True
return False
def get_kind_of_ballot_item(self):
if positive_value_exists(self.contest_office_we_vote_id):
return OFFICE
elif positive_value_exists(self.candidate_campaign_we_vote_id):
return CANDIDATE
elif positive_value_exists(self.politician_we_vote_id):
return POLITICIAN
elif positive_value_exists(self.contest_measure_we_vote_id):
return MEASURE
return None
def get_ballot_item_we_vote_id(self):
if positive_value_exists(self.contest_office_we_vote_id):
return self.contest_office_we_vote_id
elif positive_value_exists(self.candidate_campaign_we_vote_id):
return self.candidate_campaign_we_vote_id
elif positive_value_exists(self.politician_we_vote_id):
return self.politician_we_vote_id
elif positive_value_exists(self.contest_measure_we_vote_id):
return self.contest_measure_we_vote_id
return None
def more_info_credit_text(self):
if self.more_info_credit == BALLOTPEDIA:
return "Courtesy of Ballotpedia.org"
if self.more_info_credit == WIKIPEDIA:
|
pybuilder/pybuilder
|
src/unittest/python/graph_utils_tests.py
|
Python
|
apache-2.0
| 2,592
| 0.003472
|
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011
|
-2020 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "A
|
S IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase
from pybuilder.graph_utils import Graph
class GraphUtilsTests(TestCase):
def test_should_find_trivial_cycle_in_graph_when_there_is_one(self):
graph_with_trivial_cycle = Graph({"a": "a"})
self.assertIsNotNone(graph_with_trivial_cycle.assert_no_trivial_cycles_present())
def test_should_find_trivial_cycle_in_graph_when_there_are_two(self):
graph_with_trivial_cycles = Graph({"a": "a", "b": "b"})
self.assertIsNotNone(graph_with_trivial_cycles.assert_no_trivial_cycles_present())
def test_should_find_trivial_cycle_in_graph_when_searching_for_cycles(self):
graph_with_trivial_cycle = Graph({"a": "a"})
self.assertIsNotNone(graph_with_trivial_cycle.assert_no_cycles_present())
def test_should_not_find_trivial_cycles_in_graph_when_there_are_none(self):
graph_without_trivial_cycle = Graph({"a": "b", "b": "c", "d": "e"})
graph_without_trivial_cycle.assert_no_trivial_cycles_present()
def test_should_not_find_cycles_in_graph_when_there_are_none(self):
graph_without_cycle = Graph({"a": "b", "b": "c", "d": "e"})
graph_without_cycle.assert_no_cycles_present()
def test_should_find_simple_nontrivial_cycle_in_graph_when_there_is_one(self):
graph_with_simple_cycle = Graph({"a": "b", "b": "a"})
self.assertIsNotNone(graph_with_simple_cycle.assert_no_cycles_present())
def test_should_find_long_nontrivial_cycle_in_graph_when_there_is_one(self):
graph_with_long_cycle = Graph({"a": "b", "b": "c", "c": "d", "d": "b"})
self.assertIsNotNone(graph_with_long_cycle.assert_no_cycles_present())
def test_should_find_long_nontrivial_cycle_in_graph_when_there_are_two(self):
graph_with_long_cycle = Graph({"a": "b", "b": "c", "c": "a", "d": "e", "e": "f", "f": "d"})
self.assertIsNotNone(graph_with_long_cycle.assert_no_cycles_present())
|
skipzone/Illumicone
|
simulator/coneLayouts/cone.py
|
Python
|
gpl-3.0
| 1,276
| 0.007053
|
#!/usr/bin/env python
from __future__ import division
import math
import optparse
import sys
#-------------------------------------------------------------------------------
# Illumicone simulator, based on code from: https://github.com/zestyping/openpixelcontrol
NUM_STRINGS = 48
PIXELS_PER_STRING = 100
SCALE = 7 # You can also think of this as the length of the strands. Use 7 for cone_med, 10 for cone_lg.
print "scale: " + str(SCALE)
PIXEL_DISTANCE = SCALE / PIXELS_PER_STRING
print "\npixel distance: " + str(PIXEL_DISTANCE)
HEIGHT = math.sqrt(SCALE * SCALE / 2)
print "\ncone height: " + str(HEIGHT)
MIN_RADIUS = HEIGHT * .1 # i.e. the radius of the little circle on top
print "\nmin radius: " + str(MIN_RADIUS)
#-------------------------------------------------------------------------------
result = ['['
|
]
theta = 0
for s in range(NUM_STRINGS
|
):
theta = 2 * math.pi * s / NUM_STRINGS
for p in range(PIXELS_PER_STRING):
z = HEIGHT - PIXEL_DISTANCE * p
radius = PIXEL_DISTANCE * p + MIN_RADIUS
x = math.cos(theta) * radius
y = math.sin(theta) * radius
result.append(' {"point": [%.4f, %.4f, %.4f]},' % (x, y, z))
# trim off last comma
result[-1] = result[-1][:-1]
result.append(']')
print '\n'.join(result)
|
deanishe/alfred-fakeum
|
src/libs/faker/providers/color/__init__.py
|
Python
|
mit
| 5,864
| 0
|
# coding=utf-8
from __future__ import unicode_literals
from collections import OrderedDict
from .. import BaseProvider
localized = True
class Provider(BaseProvider):
all_colors = OrderedDict((
("AliceBlue", "#F0F8FF"),
("AntiqueWhite", "#FAEBD7"),
("Aqua", "#00FFFF"),
("Aquamarine", "#7FFFD4"),
("Azure", "#F0FFFF"),
("Beige", "#F5F5DC"),
("Bisque", "#FFE4C4"),
("Black", "#000000"),
("BlanchedAlmond", "#FFEBCD"),
("Blue", "#0000FF"),
("BlueViolet", "#8A2BE2"),
("Brown", "#A52A2A"),
("BurlyWood", "#DEB887"),
("CadetBlue", "#5F9EA0"),
("Chartreuse", "#7FFF00"),
("Chocolate", "#D2691E"),
("Coral", "#FF7F50"),
("CornflowerBlue", "#6495ED"),
("Cornsilk", "#FFF8DC"),
("Crimson", "#DC143C"),
("Cyan", "#00FFFF"),
("DarkBlue", "#00008B"),
("DarkCyan", "#008B8B"),
("DarkGoldenRod", "#B8860B"),
("DarkGray", "#A9A9A9"),
("DarkGreen", "#006400"),
("DarkKhaki", "#BDB76B"),
("DarkMagenta", "#8B008B"),
("DarkOliveGreen", "#556B2F"),
("DarkOrange", "#FF8C00"),
("DarkOrchid", "#9932CC"),
("DarkRed", "#8B0000"),
("DarkSalmon", "#E9967A"),
("DarkSeaGreen", "#8FBC8F"),
("DarkSlateBlue", "#483D8B"),
("DarkSlateGray", "#2F4F4F"),
("DarkTurquoise", "#00CED1"),
("DarkViolet", "#9400D3"),
("DeepPink", "#FF1493"),
("DeepSkyBlue", "#00BFFF"),
("DimGray", "#696969"),
("DodgerBlue", "#1E90FF"),
("FireBrick", "#B22222"),
("FloralWhite", "#FFFAF0"),
("ForestGreen", "#228B22"),
("Fuchsia", "#FF00FF"),
("Gainsboro", "#DCDCDC"),
("GhostWhite", "#F8F8FF"),
("Gold", "#FFD700"),
("GoldenRod", "#DAA520"),
("Gray", "#808080"),
("Green", "#008000"),
("GreenYellow", "#ADFF2F"),
("HoneyDew", "#F0FFF0"),
("HotPink", "#FF69B4"),
("IndianRed", "#CD5C5C"),
("Indigo", "#4B0082"),
("Ivory", "#FFFFF0"),
("Khaki", "#F0E68C"),
("Lavender", "#E6E6FA"),
("LavenderBlush", "#FFF0F5"),
("LawnGreen", "#7CFC00"),
("LemonChiffon", "#FFFACD"),
("LightBlue", "#ADD8E6"),
("LightCoral", "#F08080"),
("LightCyan", "#E0FFFF"),
("LightGoldenRodYellow", "#FAFAD2"),
("LightGray", "#D3D3D3"),
("LightGreen", "#90EE90"),
("LightPink", "#FFB6C1"),
("LightSalmon", "#FFA07A"),
("LightSeaGreen", "#20B2AA"),
("LightSkyBlue", "#87CEFA"),
("LightSlateGray", "#778899"),
("LightSteelBlue", "#B0C4DE"),
("LightYellow", "#FFFFE0"),
("Lime", "#00FF00"),
("LimeGreen", "#32CD32"),
("Linen", "#FAF0E6"),
("Magenta", "#FF00FF"),
("Maroon", "#800000"),
("MediumAquaMarine", "#66CDAA"),
("MediumBlue", "#0000CD"),
("MediumOrchid", "#BA55D3"),
("MediumPurple", "#9370DB"),
("MediumSeaGreen", "#3CB371"),
("MediumSlateBlue", "#7B68EE"),
("MediumSpringGreen", "#00FA9A"),
("MediumTurquoise", "#48D1CC"),
("MediumVioletRed", "#C71585"),
("MidnightBlue", "#191970"),
("MintCream", "#F5FFFA"),
("MistyRose", "#FFE4E1"),
("Moccasin", "#FFE4B5"),
("NavajoWhite", "#FFDEAD"),
("Navy", "#000080"),
("OldLace", "#FDF5E6"),
("Olive", "#808000"),
("OliveDrab", "#6B8E23"),
("Orange", "#FFA500"),
("OrangeRed", "#FF4500"),
("Orchid", "#DA70D6"),
("PaleGoldenRod", "#EEE8AA"),
("PaleGreen", "#98FB98"),
("PaleTurquoise", "#AFEEEE"),
("PaleVioletRed", "#DB7093"),
("PapayaWhip", "#FFEFD5"),
("PeachPuff", "#FFDAB9"),
("Peru", "#CD853F"),
("Pink", "#FFC0CB"),
("Plum", "#DDA0DD"),
|
("PowderBlue", "#B0E0E6"),
("Purple", "#800080"),
("Red", "#FF0000"),
("RosyBrown", "#BC8F8F"),
("RoyalBlue", "#4169E1"),
("SaddleBrown", "#8B4513"),
("Salmon", "#FA8072"),
("SandyBr
|
own", "#F4A460"),
("SeaGreen", "#2E8B57"),
("SeaShell", "#FFF5EE"),
("Sienna", "#A0522D"),
("Silver", "#C0C0C0"),
("SkyBlue", "#87CEEB"),
("SlateBlue", "#6A5ACD"),
("SlateGray", "#708090"),
("Snow", "#FFFAFA"),
("SpringGreen", "#00FF7F"),
("SteelBlue", "#4682B4"),
("Tan", "#D2B48C"),
("Teal", "#008080"),
("Thistle", "#D8BFD8"),
("Tomato", "#FF6347"),
("Turquoise", "#40E0D0"),
("Violet", "#EE82EE"),
("Wheat", "#F5DEB3"),
("White", "#FFFFFF"),
("WhiteSmoke", "#F5F5F5"),
("Yellow", "#FFFF00"),
("YellowGreen", "#9ACD32"),
))
safe_colors = (
'black', 'maroon', 'green', 'navy', 'olive',
'purple', 'teal', 'lime', 'blue', 'silver',
'gray', 'yellow', 'fuchsia', 'aqua', 'white',
)
def color_name(self):
return self.random_element(self.all_colors.keys())
def safe_color_name(self):
return self.random_element(self.safe_colors)
def hex_color(self):
return "#{0}".format(
("%x" %
self.random_int(
1, 16777215)).ljust(
6, '0'))
def safe_hex_color(self):
color = ("%x" % self.random_int(0, 255)).ljust(3, '0')
return "#{0}{0}{1}{1}{2}{2}".format(*color)
def rgb_color(self):
return ','.join(map(str, (self.random_int(0, 255) for _ in range(3))))
def rgb_css_color(self):
return 'rgb(%s)' % ','.join(
map(str, (self.random_int(0, 255) for _ in range(3))))
|
liangtianyou/ST
|
stserver/libcommon/san/stssan.py
|
Python
|
gpl-3.0
| 644
| 0.019169
|
#-*- coding: utf-8 -*-
import traceback
from libc
|
ommon import utils
from libcommon import commonlib
from libcommon.logger import stsdebug
#----------------------------------
# 获取san服务是否运行
#----------------------------------
def get_san_status():
san_status = False
try:
retcode,proc = utils.cust_popen([commonlib.ISCSI_SCST,'status'])
result = proc.stdout.read()
if utils.list_match(result,['iSCSI-SCST target is running','iscsi-scstd .* is running']):
|
san_status = True
except:
stsdebug.write(stsdebug.get_line(),"stssan",traceback.print_exc())
return san_status
|
Nasdin/ReinforcementLearning-AtariGame
|
A3CModel.py
|
Python
|
bsd-3-clause
| 2,624
| 0.001143
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
def norm_col_init(weights, std=1.0):
x = torch.randn(weights.size())
x *= std / torch.sqrt((x**2).sum(1, keepdim=True))
return x
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
weight_shape = list(m.weight.data.size())
fan_in = np.prod(weight_shape[1:4])
fan_out = np.prod(weight_shape[2:4]) * weight_shape[0]
w_bound = np.sqrt(6. / (fan_in + fan_out))
m.weight.data.uniform_(-w_bound, w_bound)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
weight_shape = list(m.weight.data.size())
fan_in = weight_shape[1]
fan_out = weight_shape[0]
w_bound
|
= np.sqrt(6. / (fan_in + fan_out))
m.weight.data.uniform_(-w_bound, w_bound)
m.bias.data.fill_(0)
class A3Clstm(torch.nn.Module):
def __init__(self, num_inputs, action_space):
super(A3Clstm, self).__init__()
# convolutional neural networks
self.conv1 = nn.Conv2d(num_inputs, 32, 5, stride=1, padding=2)
self.maxp1 = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(32, 32, 5, stride=1, padding=1)
self.maxp2 =
|
nn.MaxPool2d(2, 2)
self.conv3 = nn.Conv2d(32, 64, 4, stride=1, padding=1)
self.maxp3 = nn.MaxPool2d(2, 2)
self.conv4 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
self.maxp4 = nn.MaxPool2d(2, 2)
# LSTM Cells
self.lstm = nn.LSTMCell(1024, 512)
num_outputs = action_space.n
# The critic layer
self.critic_linear = nn.Linear(512, 1)
# The actor layer
self.actor_linear = nn.Linear(512, num_outputs)
self.apply(weights_init)
self.actor_linear.weight.data = norm_col_init(
self.actor_linear.weight.data, 0.01)
self.actor_linear.bias.data.fill_(0)
self.critic_linear.weight.data = norm_col_init(
self.critic_linear.weight.data, 1.0)
self.critic_linear.bias.data.fill_(0)
self.lstm.bias_ih.data.fill_(0)
self.lstm.bias_hh.data.fill_(0)
self.train()
# forward propagation
def forward(self, inputs):
inputs, (hx, cx) = inputs
x = F.relu(self.maxp1(self.conv1(inputs)))
x = F.relu(self.maxp2(self.conv2(x)))
x = F.relu(self.maxp3(self.conv3(x)))
x = F.relu(self.maxp4(self.conv4(x)))
x = x.view(x.size(0), -1)
hx, cx = self.lstm(x, (hx, cx))
x = hx
return self.critic_linear(x), self.actor_linear(x), (hx, cx)
|
levilucio/SyVOLT
|
mbeddr2C_MM/Contracts/HAssignmentInstance_CompleteLHS.py
|
Python
|
mit
| 94,636
| 0.01379
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import uuid
class HAssignmentInstance_CompleteLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HAssignmentInstance_CompleteLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HAssignmentInstance_CompleteLHS, self).__init__(name='HAssignmentInstance_CompleteLHS', num_nodes=0, edges=[])
# Set the graph attributes
self["mm__"] = []
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """"""
self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'AssignmentInstance')
# Nodes that represent match classes
# match class AssemblyConnector() node
self.add_node()
self.vs[0]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_label__"] = """1"""
self.vs[0]["MT_dirty__"] = False
self.vs[0]["mm__"] = """MT_pre__AssemblyConnector"""
self.vs[0]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class InstancePortRef() node
self.add_node()
self.vs[1]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[1]["MT_label__"] = """2"""
self.vs[1]["MT_dirty__"] = False
self.vs[1]["mm__"] = """MT_pre__InstancePortRef"""
self.vs[1]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class ComponentInstance() node
self.add_node()
self.vs[2]["MT_pre__attr1"] = """
#=========================================
|
======================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by:
|
this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_label__"] = """3"""
self.vs[2]["MT_dirty__"] = False
self.vs[2]["mm__"] = """MT_pre__ComponentInstance"""
self.vs[2]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class RequiredPort() node
self.add_node()
self.vs[3]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_label__"] = """4"""
self.vs[3]["MT_dirty__"] = False
self.vs[3]["mm__"] = """MT_pre__RequiredPort"""
self.vs[3]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class AtomicComponent() node
self.add_node()
self.vs[4]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["MT_label__"] = """5"""
self.vs[4]["MT_dirty__"] = False
self.vs[4]["mm__"] = """MT_pre__AtomicComponent"""
self.vs[4]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class InstancePortRef() node
self.add_node()
self.vs[5]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[5]["MT_label__"] = """6"""
self.vs[5]["MT_dirty__"] = False
self.vs[5]["mm__"] = """MT_pre__InstancePortRef"""
self.vs[5]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class ComponentInstance() node
self.add_node()
self.vs[6]["MT_pre__attr1"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[6]["MT_label__"] = """7"""
self.vs[6]["MT_dirty__"] = False
self.vs[6]["mm__"] = """MT_pre__ComponentInstance"""
self.vs[6]["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'')
# match class ProvidedPort() node
self.add_node()
self.vs[7]["MT_pre__attr1"] = """
#===================================================
|
fedora-conary/conary
|
config/components/invariant/test.py
|
Python
|
apache-2.0
| 625
| 0
|
#
# Copyright (c) SAS Institute Inc.
#
# L
|
icensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language gov
|
erning permissions and
# limitations under the License.
#
filters = ('test', ('%(testdir)s/',))
|
uucastine/soundbooth
|
soundbooth/urls.py
|
Python
|
bsd-3-clause
| 658
| 0.00304
|
from django.conf import settings
from django.conf.urls impo
|
rt include, url
from django.views.generic import TemplateView
from django.contrib import admin
admin.autodiscover()
from booth.views import HomepageView
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('allauth.urls')),
url(r'^', include('booth.urls', namespace='booth')),
|
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url("^$", HomepageView.as_view(), name="homepage")
]
if settings.DEBUG:
import debug_toolbar
urlpatterns.append(
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
malramsay64/MD-Molecules-Hoomd
|
test/figures_test.py
|
Python
|
mit
| 1,269
| 0.003155
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Test function from the generation of figures."""
import math
import gsd.hoomd
from hypothesis import given
from hypothesis.strategies import floats
from statdyn.analysis.order import compute_voronoi_neighs
from statdyn.figures import colour
from statdyn.figures.configuration import plot, snapshot2data
@given(floats(min_value=-math.pi, max_value=math.pi))
def test_colour_orientation(orientation):
"""Ensure hex values being returned by colour_orientation."""
int(colour.colour_orientation(orientation)[1:], 16)
de
|
f test_plot():
with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj:
plot(trj[0], repeat=True, offset=True)
def test_snapshot2data():
with gsd.hoomd.open('test/
|
data/trajectory-13.50-3.00.gsd') as trj:
snapshot2data(trj[0])
def test_order():
with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj:
order_list = compute_voronoi_neighs(trj[0].configuration.box,
trj[0].particles.position)
plot(trj[0], repeat=True, offset=True, order_list=order_list)
|
mpetyx/palmdrop
|
venv/lib/python2.7/site-packages/cms/migrations/0036_auto__add_field_cmsplugin_changed_date.py
|
Python
|
apache-2.0
| 19,523
| 0.003739
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Dummy migration
pass
def backwards(self, orm):
# Dummy migration
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'})
},
'auth.permission': {
'Meta': {
'ordering': "('content_type__app_label', 'content_type__model', 'codename')",
'unique_together': "(('content_type', 'codename'),)",
'object_name': 'Permission'},
'codename': (
'django.db.models.fields.CharField', [], {'max_length': '100'}),
|
'content_type': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['contenttypes.ContentType']"}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': (
|
'django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [],
{'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Group']", 'symmetrical': 'False',
'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [],
{'max_length': '30', 'blank': 'True'}),
'password': (
'django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': (
'django.db.models.fields.related.ManyToManyField', [],
{'to': "orm['auth.Permission']", 'symmetrical': 'False',
'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [],
{'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'default': 'datetime.datetime.now'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [],
{'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.CMSPlugin']", 'null': 'True',
'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [],
{'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [],
{'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_moderate': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': (
'django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [],
{'symmetrical': 'False', 'to': "orm['sites.Site']",
'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [],
{'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('site', 'tree_id', 'lft')",
'object_name': 'Page'},
'changed_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now': 'True', 'blank': 'True'}),
'created_by': (
'django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [],
{'auto_now_add': 'True', 'blank': 'True'}),
'id': (
'django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [],
{'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [],
{'db_index': 'True'}),
'limit_visibility_in_menu': (
'django.db.models.fields
|
kg-bot/SupyBot
|
plugins/GUI/frontend/frontend.py
|
Python
|
gpl-3.0
| 9,802
| 0.00153
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
###
# Copyright (c) 2011, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
# Standard library
from __future__ import print_function
import threading
import hashlib
import socket
import time
import sys
import re
# Third-party modules
from PyQt4 import QtCore, QtGui
# Local modules
import connection
import window
# FIXME: internationalize
_ = lambda x:x
refreshingTree = threading.Lock()
class ConfigurationTreeRefresh:
def __init__(self, eventsManager, window):
if not refreshingTree.acquire(False):
return
self._eventsManager = eventsManager
parentItem = QtGui.QStandardItemModel()
window.connect(parentItem, QtCore.SIGNAL('itemClicked()'),
window.configurationItemActivated)
window.configurationTree.setModel(parentItem)
self.items = {'supybot': parentItem}
hash_ = eventsManager.sendCommand('config search ""')
eventsManager.hook(hash_, self.slot)
def slot(self, reply):
"""Slot called when a childs list is got."""
childs = reply.split(', ')
for child in childs:
if '\x02' in child:
hash_ = self._eventsManager.sendCommand('more')
self._eventsManager.hook(hash_, self.slot)
break
elif ' ' in child:
refreshingTree.release()
break
splitted = child.split('.')
parent, name = '.'.join(splitted[0:-1]), splitted[-1]
item = QtGui.QStandardItem(name)
item.name = QtCore.QString(child)
self.items[parent].appendRow(item)
self.items[child] = item
class Connection(QtGui.QTabWidget, connection.Ui_connection):
"""Represents the connection dialog."""
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.setupUi(self)
def accept(self):
"""Signal called when the button 'accept' is clicked."""
self.state.text = _('Connecting...')
if not self._connect():
self.state.text = _('Connection failed.')
return
self.state.text = _('Connected. Loading GUI...')
window = Window(self._eventsManager)
window.show()
window.commandEdit.setFocus()
self._eventsManager.callbackConnectionClosed = window.connectionClosed
self._eventsManager.defaultCallback = window.replyReceived
self.hide()
def _connect(self):
"""Connects to the server, using the filled fields in the GUI.
Return wheter or not the connection succeed. Note that a successful
connection with a failed authentication is interpreted as successful.
"""
server = str(self.editServer.text()).split(':')
username = str(self.editUsername.text())
password = str(self.editPassword.text())
assert len(server) == 2
assert re.match('[0-9]+', server[1])
assert ' ' not in username
assert ' ' not in password
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server[1] = int(server[1])
try:
sock.connect(tuple(server))
except socket.error:
return False
sock.settimeout(0.01)
self._eventsManager = EventsManager(sock)
self._eventsManager.sendCommand('identify %s %s' %
(username, password))
return True
def reject(self):
"""Signal called when the button 'close' is clicked."""
exit()
class Window(QtGui.QTabWidget, window.Ui_window):
"""Represents the main window."""
def __init__(self, eventsManager, parent=None):
QtGui.QWidget.__init__(self, parent)
self._eventsManager = eventsManager
self.setupUi(self)
self.connect(self.commandEdit, QtCore.SIGNAL('returnPressed()'),
self.commandSendHandler)
self.connect(self.commandSend, QtCore.SIGNAL('clicked()'),
self.commandSendHandler)
self.connect(self.refreshConfigurationTree, QtCore.SIGNAL('clicked()'),
self._refreshConfigurationTree)
def commandSendHandler(self):
"""Slot called when the user clicks 'Send' or presses 'Enter' in the
raw commands tab."""
command = self.commandEdit.text()
self.commandEdit.clear()
try:
# No hooking, because the callback would be the default callback
self._eventsManager.sendCommand(command)
s = _('<-- ') + command
except socket.error:
s = _('(not sent) <-- ') + command
self.commandsHistory.appendPlainText(s)
def replyReceived(self, reply):
"""Called by the events manager when a reply to a raw command is
received."""
self.commandsHistory.appendPlainText(_('--> ') + reply.decode('utf8'))
def connectionClosed(self):
"""Called by the events manager when a special message has to be
displayed."""
self.commandsHistory.appendPlainText(_('* con
|
nection closed *'))
self.commandEdit.readOnly = True
self._eventsManager.stop()
def _refreshConfigurationTree(self):
"""Slot called when the user clicks 'Refresh' under the configuration
tree."""
ConfigurationTreeRefresh(self._eventsManager, self)
def configurati
|
onItemActivated(self, item):
print(repr(item))
class EventsManager(QtCore.QObject):
"""This class handles all incoming messages, and call the associated
callback (using hook() method)"""
def __init__(self, sock):
self._sock = sock
self.defaultCallback = lambda x:x
self._currentLine = ''
self._hooks = {} # FIXME: should be cleared every minute
self._timerGetReplies = QtCore.QTimer()
self.connect(self._timerGetReplies, QtCore.SIGNAL('timeout()'),
self._getReplies);
self._timerGetReplies.start(100)
self._timerCleanHooks = QtCore.QTimer()
self.connect(self._timerCleanHooks, QtCore.SIGNAL('timeout()'),
self._cleanHooks);
self._timerCleanHooks.start(100)
def _getReplies(self):
"""Called by the QTimer; fetches the messages and calls the hooks."""
currentLine = self._currentLine
self.currentLine = ''
if not '\n' in currentLine:
try:
data = self._sock.recv(65536)
if not data: # Frontend closed connection
self.callbackConnectionClosed()
return
|
maxalbert/Pytess
|
pytess/__init__.py
|
Python
|
mit
| 1,265
| 0.003162
|
"""
# Pytess
Pure Python tessellation of points into polygons, including
Delauney/Thiessin, and Voronoi polygons. Built as a
convenient user interface for Bill Simons/Ca
|
rson Farmer python port of
Steven Fortune C++ version of a Delauney triangulator.
## Platforms
Tested on Python version 2.x.
## Dependencies
Pure Python, no dependencies.
## Installing it
Pytess is installed with pip from the commandline:
pip install pytess
## Usage
To triangulate a set of points, simply do:
import pytess
points = [(1,1), (5,5), (3,5), (8,1)]
triangles = pytess.
|
triangulate(points)
And for voronoi diagrams:
import pytess
points = [(1,1), (5,5), (3,5), (8,1)]
voronoipolys = pytess.voronoi(points)
## More Information:
- [Home Page](http://github.com/karimbahgat/Pytess)
- [API Documentation](http://pythonhosted.org/Pytess)
## License:
This code is free to share, use, reuse,
and modify according to the MIT license, see license.txt
## Credits:
I just made it more convenient to use for end-users and uploaded it to PyPi.
The real credit goes to Bill Simons/Carson Farmer and Steven Fortune for
implementing the algorithm in the first place.
Karim Bahgat (2015)
"""
__version__ = "0.1.0"
from .main import *
|
kyuridenamida/atcoder-tools
|
atcodertools/fileutils/load_text_file.py
|
Python
|
mit
| 103
| 0
|
def load_text_file(text_file: str) -> str:
with open(text
|
_file, 'r') as f:
return f.
|
read()
|
hotpoor-for-Liwei/hj_hackathon_201607
|
vendor/qiniu/services/processing/pfop.py
|
Python
|
mit
| 1,697
| 0.001394
|
# -*- coding: utf-8 -*-
from qiniu import config
from qiniu import http
class PersistentFop(object):
"""持久化处理类
该类用于主动触发异步持久化操作,具体规格参考:
http://developer.qiniu.com/docs/v6/api/reference/fop/pfop/pfop.html
Attributes:
auth: 账号管理密钥对,Auth对象
bucket: 操作资源所在空间
pipeline: 多媒体处理队列,详见 https://portal.qiniu.com/mps/pipeline
notify_url: 持久化处理结果通知URL
"""
def __init__(self, auth, bucket, pipeline=None, notify_url=None):
"""初始化持久化处理类"""
self.auth = auth
|
self.buc
|
ket = bucket
self.pipeline = pipeline
self.notify_url = notify_url
def execute(self, key, fops, force=None):
"""执行持久化处理:
Args:
key: 待处理的源文件
fops: 处理详细操作,规格详见 http://developer.qiniu.com/docs/v6/api/reference/fop/
force: 强制执行持久化处理开关
Returns:
一个dict变量,返回持久化处理的persistentId,类似{"persistentId": 5476bedf7823de4068253bae};
一个ResponseInfo对象
"""
ops = ';'.join(fops)
data = {'bucket': self.bucket, 'key': key, 'fops': ops}
if self.pipeline:
data['pipeline'] = self.pipeline
if self.notify_url:
data['notifyURL'] = self.notify_url
if force == 1:
data['force'] = 1
url = 'http://{0}/pfop'.format(config.get_default('default_api_host'))
return http._post_with_auth(url, data, self.auth)
|
hequn8128/flink
|
flink-python/pyflink/ml/tests/test_params.py
|
Python
|
apache-2.0
| 6,487
| 0.001233
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distr
|
ibuted with
|
this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import array
import unittest
from pyflink import keyword
from pyflink.ml.api.param import ParamInfo, TypeConverters, Params
from pyflink.ml.lib.param.colname import HasSelectedCols, HasOutputCol
class ParamsTest(unittest.TestCase):
def test_default_behavior(self):
params = Params()
not_optinal = ParamInfo("a", "", is_optional=False)
with self.assertRaises(ValueError):
params.get(not_optinal)
# get optional without default param
optional_without_default = ParamInfo("a", "")
with self.assertRaises(ValueError):
params.get(optional_without_default)
def test_get_optional_param(self):
param_info = ParamInfo(
"key",
"",
has_default_value=True,
default_value=None,
type_converter=TypeConverters.to_string)
params = Params()
self.assertIsNone(params.get(param_info))
val = "3"
params.set(param_info, val)
self.assertEqual(val, params.get(param_info))
params.set(param_info, None)
self.assertIsNone(params.get(param_info))
def test_remove_contains_size_clear_is_empty(self):
param_info = ParamInfo(
"key",
"",
has_default_value=True,
default_value=None,
type_converter=TypeConverters.to_string)
params = Params()
self.assertEqual(params.size(), 0)
self.assertTrue(params.is_empty())
val = "3"
params.set(param_info, val)
self.assertEqual(params.size(), 1)
self.assertFalse(params.is_empty())
params_json = params.to_json()
params_new = Params.from_json(params_json)
self.assertEqual(params.get(param_info), val)
self.assertEqual(params_new.get(param_info), val)
params.clear()
self.assertEqual(params.size(), 0)
self.assertTrue(params.is_empty())
def test_to_from_json(self):
import jsonpickle
param_info = ParamInfo(
"key",
"",
has_default_value=True,
default_value=None,
type_converter=TypeConverters.to_string)
param_info_new = jsonpickle.decode(jsonpickle.encode(param_info))
self.assertEqual(param_info_new, param_info)
params = Params()
val = "3"
params.set(param_info, val)
params_new = Params.from_json(params.to_json())
self.assertEqual(params_new.get(param_info), val)
class ParamTypeConversionTests(unittest.TestCase):
"""
Test that param type conversion happens.
"""
def test_list(self):
l = [0, 1]
for lst_like in [l, range(2), tuple(l), array.array('l', l)]:
converted = TypeConverters.to_list(lst_like)
self.assertEqual(type(converted), list)
self.assertListEqual(converted, l)
def test_list_float_or_list_int(self):
l = [0, 1]
for lst_like in [l, range(2), tuple(l), array.array('l', l)]:
converted1 = TypeConverters.to_list_float(lst_like)
converted2 = TypeConverters.to_list_int(lst_like)
self.assertEqual(type(converted1), list)
self.assertEqual(type(converted2), list)
self.assertListEqual(converted1, l)
self.assertListEqual(converted2, l)
def test_list_string(self):
l = ["aa", "bb"]
for lst_like in [l, tuple(l)]:
converted = TypeConverters.to_list_string(lst_like)
self.assertEqual(type(converted), list)
self.assertListEqual(converted, l)
def test_float(self):
data = 1.45
converted = TypeConverters.to_float(data)
self.assertEqual(type(converted), float)
self.assertEqual(converted, data)
def test_int(self):
data = 1234567890
converted = TypeConverters.to_int(data)
self.assertEqual(type(converted), int)
self.assertEqual(converted, data)
def test_string(self):
data = "1234567890"
converted = TypeConverters.to_string(data)
self.assertEqual(type(converted), str)
self.assertEqual(converted, data)
def test_boolean(self):
data = True
converted = TypeConverters.to_boolean(data)
self.assertEqual(type(converted), bool)
self.assertEqual(converted, data)
class MockVectorAssembler(HasSelectedCols, HasOutputCol):
@keyword
def __init__(self, *, selected_cols=None, output_col=None):
self._params = Params()
kwargs = self._input_kwargs
self._set(**kwargs)
def get_params(self):
return self._params
class TestWithParams(unittest.TestCase):
def test_set_params_with_keyword_arguments(self):
assembler = MockVectorAssembler(selected_cols=["a", "b"], output_col="features")
params = assembler.get_params()
self.assertEqual(params.size(), 2)
self.assertEqual(assembler.get(HasSelectedCols.selected_cols), ["a", "b"])
self.assertEqual(assembler.get(HasOutputCol.output_col), "features")
def test_set_params_with_builder_mode(self):
assembler = MockVectorAssembler()\
.set_selected_cols(["a", "b"])\
.set_output_col("features")
params = assembler.get_params()
self.assertEqual(params.size(), 2)
self.assertEqual(assembler.get(HasSelectedCols.selected_cols), ["a", "b"])
self.assertEqual(assembler.get(HasOutputCol.output_col), "features")
|
google/earthengine-community
|
samples/python/apidocs/ee_dictionary_aside.py
|
Python
|
apache-2.0
| 1,056
| 0.002841
|
# Copyright 2021 The Google Earth Engine Community Authors
#
# Licensed under the Apache License, Ve
|
rsion 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
#
|
distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START earthengine__apidocs__ee_dictionary_aside]
# A dictionary (e.g. results of ee.Image.reduceRegion of an S2 image).
dic = {
'B1': 182,
'B2': 219,
'B3': 443
}
def print_dic(dic):
"""Prints the dictionary."""
print('ee.Dictionary from client-side dictionary:', dic.getInfo())
# Print a message when constructing the ee.Dictionary.
ee_dic = ee.Dictionary(dic).aside(print_dic)
# [END earthengine__apidocs__ee_dictionary_aside]
|
hjanime/VisTrails
|
vistrails/core/vistrail/port_spec.py
|
Python
|
bsd-3-clause
| 20,455
| 0.002982
|
###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT L
|
IABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
from itertools import izip
import operator
from vistrails.core.data_structures.bijectivedict import Bidict
from vistrails.core.mo
|
dules.utils import create_port_spec_string, parse_port_spec_string
from vistrails.core.system import get_vistrails_basic_pkg_id, \
get_module_registry
from vistrails.core.utils import enum, VistrailsInternalError
from vistrails.core.vistrail.port_spec_item import PortSpecItem
from vistrails.db.domain import DBPortSpec, IdScope
from ast import literal_eval
import unittest
import copy
PortEndPoint = enum('PortEndPoint',
['Invalid', 'Source', 'Destination'])
################################################################################
class PortSpec(DBPortSpec):
port_type_map = Bidict([('input', 'destination'),
('output', 'source'),
('invalid', 'invalid')])
end_point_map = Bidict([('source', PortEndPoint.Source),
('destination', PortEndPoint.Destination),
('invalid', PortEndPoint.Invalid)])
##########################################################################
# Constructors and copy
def __init__(self, *args, **kwargs):
signature = None
if 'signature' in kwargs:
signature = kwargs['signature']
del kwargs['signature']
sigstring = None
if 'sigstring' in kwargs:
sigstring = kwargs['sigstring']
del kwargs['sigstring']
defaults = None
if 'defaults' in kwargs:
defaults = kwargs['defaults']
del kwargs['defaults']
labels = None
if 'labels' in kwargs:
labels = kwargs['labels']
del kwargs['labels']
values = None
if 'values' in kwargs:
values = kwargs['values']
del kwargs['values']
entry_types = None
if 'entry_types' in kwargs:
entry_types = kwargs['entry_types']
del kwargs['entry_types']
if 'items' in kwargs and 'portSpecItems' not in kwargs:
kwargs['portSpecItems'] = kwargs['items']
del kwargs['items']
if 'optional' not in kwargs:
kwargs['optional'] = 0 # False
elif not isinstance(kwargs['optional'], (int, long)):
if isinstance(kwargs['optional'], bool):
if kwargs['optional']:
kwargs['optional'] = 1
else:
kwargs['optional'] = 0
else:
raise VistrailsInternalError("Cannot parse 'optional' kw "
"-- must be an int or bool")
if 'min_conns' not in kwargs:
kwargs['min_conns'] = 0
elif kwargs['optional'] == 1 and kwargs['min_conns'] > 0:
raise VistrailsInternalError("A mandatory port cannot be set "
"to optional")
if 'max_conns' not in kwargs:
kwargs['max_conns'] = -1
if kwargs['min_conns'] >= 0 and kwargs['max_conns'] >= 0 and \
kwargs['min_conns'] > kwargs['max_conns']:
raise VistrailsInternalError("Minimum number of connections "
"cannot be greater than maximum "
"number of connections")
if 'sort_key' not in kwargs:
kwargs['sort_key'] = -1
if 'depth' not in kwargs:
kwargs['depth'] = 0
if 'id' not in kwargs:
kwargs['id'] = -1
if 'tooltip' in kwargs:
self._tooltip = kwargs['tooltip']
del kwargs['tooltip']
else:
self._tooltip = None
if 'docstring' in kwargs:
self._docstring = kwargs['docstring']
del kwargs['docstring']
else:
self._docstring = None
if 'shape' in kwargs:
self._shape = kwargs['shape']
del kwargs['shape']
else:
self._shape = None
DBPortSpec.__init__(self, *args, **kwargs)
if sum(1 for container in (self.port_spec_items, signature, sigstring)
if container) > 1:
raise ValueError("Please specify only one of portSpecItems,"
" signature, or sigstring kwargs.")
self.create_spec_items(self.port_spec_items, signature, sigstring,
defaults, labels, values, entry_types)
self._short_sigstring = None
# if signature is not None:
# self.create_entries(signature)
# if not self.sigstring and self._entries is not None:
# # create sigstring from entries
# self.create_sigstring_and_descriptors()
# DAKOOP: removed this---we will check in module_registry and pipeline
# validation, this way, we can let errors go all the way up
# elif self._entries is None and self.sigstring:
# # create entries from sigstring
# self.create_entries_and_descriptors()
# else:
# raise VistrailsInternalError("Need to specify signature or "
# "sigstring to create PortSpec")
# if self._entries is not None and self._tooltip is None:
# self.create_tooltip()
self.is_valid = True
def __copy__(self):
return PortSpec.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = DBPortSpec.do_copy(self, new_ids, id_scope, id_remap)
cp._short_sigstring = self._short_sigstring
cp._tooltip = self._tooltip
cp._shape = self._shape
cp._docstring = self._docstring
cp.is_valid = self.is_valid
cp.__class__ = PortSpec
# if cp._entries is not None:
# cp.create_tooltip()
return cp
@staticmethod
def convert(_port_spec):
if _port_spec.__class__ == PortSpec:
return
_port_spec.__class__ = PortSpec
for _port_spec_item in _port_spec.db_portSpecItems:
PortSpecItem.convert(_port_spec_item)
_port_spec._short_sigstring = None
_port
|
DarkFenX/Pyfa
|
gui/builtinStatsViews/resistancesViewFull.py
|
Python
|
gpl-3.0
| 9,855
| 0.003146
|
# =============================================================================
# Copyright (C) 2010 Diego Duclos
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful
|
,
# but WITHOUT ANY WARRANTY; with
|
out even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
# noinspection PyPackageRequirements
import wx
import gui.globalEvents as GE
import gui.mainFrame
from gui.bitmap_loader import BitmapLoader
from gui.pyfa_gauge import PyGauge
from gui.statsView import StatsView
from gui.utils import fonts
from gui.utils.numberFormatter import formatAmount
class ResistancesViewFull(StatsView):
name = "resistancesViewFull"
def __init__(self, parent):
StatsView.__init__(self)
self.parent = parent
self._cachedValues = []
self.showEffective = True
self.activeFit = None
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.mainFrame.Bind(GE.EFFECTIVE_HP_TOGGLED, self.ehpSwitch)
def getHeaderText(self, fit):
return "Resistances"
def getTextExtentW(self, text):
width, height = self.parent.GetTextExtent(text)
return width
def populatePanel(self, contentPanel, headerPanel):
contentSizer = contentPanel.GetSizer()
self.panel = contentPanel
self.headerPanel = headerPanel
# Custom header EHP
headerContentSizer = self.headerPanel.Parent.GetHeaderContentSizer()
self.stEff = wx.StaticText(headerPanel, wx.ID_ANY, "( Effective HP: ")
headerContentSizer.Add(self.stEff)
headerPanel.GetParent().AddToggleItem(self.stEff)
self.labelEhp = wx.StaticText(headerPanel, wx.ID_ANY, "0")
headerContentSizer.Add(self.labelEhp, 0)
headerPanel.GetParent().AddToggleItem(self.labelEhp)
stCls = wx.StaticText(headerPanel, wx.ID_ANY, " )")
headerPanel.GetParent().AddToggleItem(stCls)
headerContentSizer.Add(stCls)
# headerContentSizer.Add(wx.StaticLine(headerPanel, wx.ID_ANY), 1, wx.ALIGN_CENTER)
# Display table
col = 0
row = 0
sizerResistances = wx.GridBagSizer()
contentSizer.Add(sizerResistances, 0, wx.EXPAND, 0)
# Add an empty label, then the rest.
sizerResistances.Add(wx.StaticText(contentPanel, wx.ID_ANY), wx.GBPosition(row, col), wx.GBSpan(1, 1))
col += 1
toolTipText = {"em": "Electromagnetic resistance", "thermal": "Thermal resistance",
"kinetic": "Kinetic resistance", "explosive": "Explosive resistance"}
for damageType in ("em", "thermal", "kinetic", "explosive"):
bitmap = BitmapLoader.getStaticBitmap("%s_big" % damageType, contentPanel, "gui")
tooltip = wx.ToolTip(toolTipText[damageType])
bitmap.SetToolTip(tooltip)
sizerResistances.Add(bitmap, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER)
col += 1
self.stEHPs = wx.Button(contentPanel, style=wx.BU_EXACTFIT, label="EHP")
self.stEHPs.SetToolTip(wx.ToolTip("Click to toggle between effective HP and raw HP"))
self.stEHPs.Bind(wx.EVT_BUTTON, self.toggleEHP)
for i in range(4):
sizerResistances.AddGrowableCol(i + 1)
sizerResistances.Add(self.stEHPs, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER)
col = 0
row += 1
gaugeColours = (((38, 133, 198), (52, 86, 98)), ((198, 38, 38), (83, 65, 67)), ((163, 163, 163), (74, 90, 93)),
((198, 133, 38), (81, 83, 67)))
toolTipText = {"shield": "Shield resistance", "armor": "Armor resistance", "hull": "Hull resistance",
"damagePattern": "Incoming damage pattern"}
for tankType in ("shield", "armor", "hull", "separator", "damagePattern"):
if tankType != "separator":
bitmap = BitmapLoader.getStaticBitmap("%s_big" % tankType, contentPanel, "gui")
tooltip = wx.ToolTip(toolTipText[tankType])
bitmap.SetToolTip(tooltip)
sizerResistances.Add(bitmap, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER)
col += 1
else:
sizerResistances.Add(wx.StaticLine(contentPanel, wx.ID_ANY), wx.GBPosition(row, col), wx.GBSpan(1, 6),
wx.EXPAND | wx.ALIGN_CENTER)
row += 1
col = 0
continue
currGColour = 0
font = wx.Font(fonts.NORMAL, wx.SWISS, wx.NORMAL, wx.NORMAL, False)
for damageType in ("em", "thermal", "kinetic", "explosive"):
box = wx.BoxSizer(wx.HORIZONTAL)
sizerResistances.Add(box, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER)
# Fancy gauges addon
pgColour = gaugeColours[currGColour]
fc = pgColour[0]
bc = pgColour[1]
currGColour += 1
lbl = PyGauge(contentPanel, font, 100)
lbl.SetMinSize((48, 16))
lbl.SetBackgroundColour(wx.Colour(bc[0], bc[1], bc[2]))
lbl.SetBarColour(wx.Colour(fc[0], fc[1], fc[2]))
lbl.SetBarGradient()
lbl.SetFractionDigits(1)
setattr(self, "gaugeResistance%s%s" % (tankType.capitalize(), damageType.capitalize()), lbl)
box.Add(lbl, 0, wx.ALIGN_CENTER)
col += 1
box = wx.BoxSizer(wx.VERTICAL)
box.SetMinSize(wx.Size(self.getTextExtentW("WWWWk"), -1))
lbl = wx.StaticText(contentPanel, wx.ID_ANY, "0" if tankType != "damagePattern" else "")
box.Add(lbl, 0, wx.ALIGN_CENTER)
setattr(self, "labelResistance%sEhp" % tankType.capitalize(), lbl)
sizerResistances.Add(box, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER)
row += 1
col = 0
self.stEHPs.SetToolTip(wx.ToolTip("Click to toggle between effective HP and raw HP"))
def toggleEHP(self, event):
wx.PostEvent(self.mainFrame, GE.EffectiveHpToggled(effective=self.stEHPs.GetLabel() == "HP"))
def ehpSwitch(self, event):
event.Skip()
self.showEffective = event.effective
fitID = self.mainFrame.getActiveFit()
wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
def refreshPanel(self, fit):
# If we did anything intresting, we'd update our labels to reflect the new fit's stats here
if fit is None and not self.showEffective:
self.showEffective = True
wx.PostEvent(self.mainFrame, GE.EffectiveHpToggled(effective=True))
return
self.stEHPs.SetLabel("EHP" if self.showEffective else "HP")
self.activeFit = fit.ID if fit is not None else None
for tankType in ("shield", "armor", "hull"):
for damageType in ("em", "thermal", "kinetic", "explosive"):
if fit is not None:
resonanceType = tankType if tankType != "hull" else ""
resonance = "%s%sDamageResonance" % (resonanceType, damageType.capitalize())
resonance = resonance[0].lower() + resonance[1:]
resonance = (1 - fit.ship.getModifiedItemAttr(resonance)) * 100
else:
resonance = 0
lbl = getattr(self, "gaugeResistance%s%s" % (tankType.capitalize(), damageType.capitalize()))
lbl.SetValue(resonance)
ehp = (fit.ehp if self.showEffective else fit.hp
|
WarrenWeckesser/scipy
|
benchmarks/benchmarks/fftpack_pseudo_diffs.py
|
Python
|
bsd-3-clause
| 2,237
| 0
|
""" Benchmark functions for fftpack.pseudo_diffs module
"""
from numpy import arange, sin, cos, pi, exp, tanh, sign
from .common import Benchmark, safe_import
with safe_import():
from scipy.fftpack import diff, fft, ifft, tilbert, hilbert, shift, fftfreq
def direct_diff(x, k=1, period=None):
fx = fft(x)
n = len(fx)
if period is None:
period = 2*pi
w = fftfreq(n)*2j*pi/period*n
if k < 0:
w = 1 / w**k
w[0] = 0.0
else:
w = w**k
if n > 2000:
w[250:n-250] = 0.0
return ifft(w*fx).real
def direct_tilbert(x, h=1, period=None):
fx = fft(x)
n = len(fx)
if period is None:
period = 2*pi
w = fftfreq(n)*h*2*pi/period*n
w[0] = 1
w = 1j/tanh(w)
w[0] = 0j
return ifft(w*fx)
def direct_hilbert(x):
fx = fft(x)
n = len(fx)
w = fftfreq(n)*n
w = 1j*sign(w)
return ifft(w*fx)
def direct_shift(x, a, period=None):
n = len(x)
if period is None:
k = fftfreq(n)*1j*n
else:
k = fftfreq(n)*2j*pi/period*n
return ifft(fft(x)*exp(k*a)).real
class Bench(Benchmark):
params = [
[100, 256, 512, 1000, 1024, 2048, 2048*2, 2048*4],
['fft', 'direct'],
]
param_names = ['size', 'type']
def setup(self, size, type):
size = int(size)
x = arange(size)*2*pi/size
a = 1
self.a = a
if size < 2000:
self.f = sin(x)*cos(4*x)+exp(sin(3*x))
self.sf = sin(x+a)*cos(4*(x+a))+exp(sin(3*(x+a)))
else:
self.f = sin(x)*cos(4*x)
self.sf = sin(x+a)*cos(4*(x+a))
def time_diff(self, size, soltype):
if soltype == 'fft':
diff(self.f, 3)
else:
direct_diff(self.f, 3)
def time_tilbert(self, size, soltype):
|
if soltype == 'fft':
tilbert(self.f, 1)
else:
direct_tilbert(self.f, 1)
def time_hilbert(self, size, soltype):
if soltyp
|
e == 'fft':
hilbert(self.f)
else:
direct_hilbert(self.f)
def time_shift(self, size, soltype):
if soltype == 'fft':
shift(self.f, self.a)
else:
direct_shift(self.f, self.a)
|
alcemirsantos/algorithms-py
|
tests/data_stuctures/test_mockdata.py
|
Python
|
mit
| 400
| 0.0025
|
import unittest
from src.data_structures.mockdata import MockData
class TestMockData (unittest.TestCase):
def setUp(self):
self.data = Moc
|
kData()
def test_random_data(self):
data = MockData()
a_set = data.get_random_elements(10)
self.assertTrue(len(a_
|
set) == 10, "the data should have 10 elements!")
if __name__ == '__main__':
unittest.main()
|
fx2003/tensorflow-study
|
TensorFlow实战/《TensorFlow实战》代码/3_2_HelloWorld.py
|
Python
|
mit
| 1,784
| 0.001682
|
#%%
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
print(mnist.train.images.shape, mnist.train.labels.shape)
print(mnist.test.images.shape, mnist.test.labels.shape)
print(mnist.validation.images.shape, mnist.validation.labels.shape)
import tensorflow as tf
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, W) + b)
y_ = tf.placeh
|
older(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
tf.global_variables_initializer().run()
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
train_step.run({x: batch_xs, y_: batch_ys})
correct_prediction = tf.equal
|
(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(accuracy.eval({x: mnist.test.images, y_: mnist.test.labels}))
|
open-forcefield-group/smarty
|
smarty/__init__.py
|
Python
|
mit
| 392
| 0.002551
|
try:
import openeye
# These can only be imported if openeye tools are available
from smarty.atomtyper import *
from smarty.sampler import *
from smarty.utils import *
from smarty.sampler_smirky import *
except Exception as e:
print(e)
print('Warning: Cannot import openeye toolkit; not all functionality w
|
ill be available.')
from smarty.score_utils impor
|
t *
|
eflynch/pygamelan
|
pygamelan/core.py
|
Python
|
gpl-2.0
| 2,581
| 0.019372
|
#####################################################################
#
# core.py
#
# Copyright (c) 2015, Eran Egozy
#
# Released under the MIT L
|
icense (http://opensource.org/licenses/MIT)
#
########
|
#############################################################
import kivy
from kivy.app import App
from kivy.core.window import Window
from kivy.uix.widget import Widget
from kivy.clock import Clock
import traceback
class BaseWidget(Widget):
"""Has some common core functionality we want in all
our apps - handling key up/down, closing the app, and update on every frame.
The subclass of BaseWidget can optionally define these methods, which will
get called if defined:
def on_key_down(self, keycode, modifiers):
def on_key_up(self, keycode):
def on_close(self):
def on_update(self):
"""
def __init__(self, **kwargs):
super(BaseWidget, self).__init__(**kwargs)
if hasattr(self.__class__, 'on_init'):
Clock.schedule_once(self._init, 0)
# keyboard up / down messages
self.down_keys = []
kb = Window.request_keyboard(target=self, callback=None)
if hasattr(self.__class__, 'on_key_down'):
kb.bind(on_key_down=self._key_down)
if hasattr(self.__class__, 'on_key_up'):
kb.bind(on_key_up=self._key_up)
# get called when app is about to shut down
if hasattr(self.__class__, 'on_close'):
Window.bind(on_close=self._close)
# create a clock to poll us every frame
if hasattr(self.__class__, 'on_update'):
Clock.schedule_interval(self._update, 0)
def _key_down(self, keyboard, keycode, text, modifiers):
if not keycode[1] in self.down_keys:
self.down_keys.append(keycode[1])
self.on_key_down(keycode, modifiers)
def _key_up(self, keyboard, keycode):
if keycode[1] in self.down_keys:
self.down_keys.remove(keycode[1])
self.on_key_up(keycode)
def _close(self, *args):
self.on_close()
def _update(self, dt):
self.on_update()
g_terminate_funcs = []
def register_terminate_func(f) :
global g_terminate_funcs
g_terminate_funcs.append(f)
def run(widget):
"""Pass in a widget, and this will automatically run it. Will also
run termination functions (g_terminate_funcs) at the end of the run,
even if it was caused by a program crash
"""
class MainApp(App):
def build(self):
return widget()
try:
MainApp().run()
except:
traceback.print_exc()
global g_terminate_funcs
for t in g_terminate_funcs:
t()
|
pabigot/pyxb
|
tests/drivers/test-ctd-simple.py
|
Python
|
apache-2.0
| 1,997
| 0.007511
|
# -*- coding: utf-8 -*-
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
import pyxb.binding.generate
import pyxb.binding.datatypes as xsd
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../schemas/test-ctd-simple.xsd'))
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestCTDSimple (unittest.TestCase):
def testClause4 (self):
self.assertTrue(clause4._IsSimpleTypeContent())
self.assertTrue(clause4._TypeDefinition == xsd.string)
self.assertEqual(None, clause4._TypeDefinition._CF_length.value())
def testClause3 (self):
self.assertTrue(clause3._IsSimpleTypeContent())
self.assertTrue(issubclass(clause3, clause4))
self.assertTrue(clause3._TypeDefinition == xsd.string)
def testClause2 (self):
self.assertTrue(clause2._IsSimpleTypeContent())
self.assertTrue(issubclass(clause2, ctype))
self.assertTrue(issubclass(clause2._TypeDefinition, xsd.string))
self.assertEqual(6, clause2._TypeDefinition._CF_length.value())
def testClause1_1 (self):
self.assertTrue(clause1_1._IsSimpleTypeContent())
self.assertTrue(issubclass(clause1_1, clause4))
self.assertTrue(issubclass(clause1_1._TypeDefinition, xsd.string))
self.assertEqual(2, clause1_1._TypeDefinition._CF_minLength.value())
self.assertEqual(4, clause1_1._TypeDefinition._CF_
|
maxLength.value())
def testClause1_2 (self):
self.assertTrue(clause1_2._IsSimpleTypeContent())
self.assertTrue(
|
issubclass(clause1_2, clause4))
self.assertTrue(issubclass(clause1_2._TypeDefinition, xsd.string))
self.assertEqual(6, clause1_2._TypeDefinition._CF_length.value())
if __name__ == '__main__':
unittest.main()
|
opencord/maas
|
library/maas_item.py
|
Python
|
apache-2.0
| 6,217
| 0.006434
|
#!/usr/bin/python
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DOCUMENTATION = '''
---
module: maas_item
short_description: Manage MAAS Clusters Interfaces
options:
maas:
description:
- URL of MAAS server
default: http://localhost/MAAS/api/1.0/
key:
description:
- MAAS API key
required: yes
name:
description:
- name of the item
required: yes
state:
description:
- possible states for this item
choices: ['present', 'absent', 'query']
default: present
requirements: [ipaddress, requests_oauthlib, maasclient]
author: David Bainbridge
'''
EXAMPLES = '''
examples:
maas_item:
maas: http://my.maas.server.com/MAAS/api/1.0/
key: 'xBvr9dx5k7S52myufC:fqBXV7hJgXegNZDw9c:K8hsmL47XjAppfQy2pDVW7G49p6PELgp'
name: MyItem
state: present
maas_item:
maas: http://my.maas.server.com/MAAS/api/1.0/
key: 'xBvr9dx5k7S52myufC:fqBXV7hJgXegNZDw9c:K8hsmL47XjAppfQy2pDVW7G49p6PELgp'
name: MyDeadItem
state: absent
'''
import sys
import json
import ipaddress
import requests
from maasclient.auth import MaasAuth
from maasclient import MaasClient
# For some reason the maasclient doesn't provide a put method. So
# we will add it here
def put(client, url, params=None):
return requests.put(url=client.auth.api_url + url,
auth=client._oauth(), data=params)
# Attempt to interpret the given value as a JSON object, if that fails
# just return it as a string
def string_or_object(val):
try:
return json.loads(val)
except:
return val
# Return a copy of the given dictionary with any `null` valued entries
# removed
def remove_null(d_in):
d = d_in.copy()
to_remove = []
for k in d.keys():
if d[k] == None:
to_remove.append(k)
for k in to_remove:
del d[k]
return d
# Deterine if two dictionaries are different
def different(have, want):
have_keys = have.keys()
for key in want.keys():
if (key in have_keys and want[key] != have[key]) or key not in have_keys:
return True
return False
# Get an item from MAAS using its name, if not found return None
def get_item(maas, name):
res = maas.get('/items/%s/' % name)
if res.ok:
return json.loads(res.text)
return None
# Create an item based on the value given
def create_item(maas, item):
merged = item.copy()
# merged['op'] = 'new'
res = maas.post('/items/', merged)
if res.ok:
return { 'error': False, 'status': get_item(maas, merged['name']) }
return { 'error': True, 'status': string_or_object(res.text) }
# Delete an item based on the name
def delete_item(maas, name):
res = maas.delete('/items/%s/' % name)
if res.ok:
return { 'error': False }
return { 'error': True, 'status': string_or_object(res.text) }
def update_item(maas, have, want):
merged = have.copy()
merged.update(want)
res = put(maas, '/items/%s/' % merged['name'], merged)
if res.ok:
return { 'error': False, 'status': get_item(maas, merged['name']) }
return { 'error': True, 'status': string_or_object(res.text) }
def main():
module = AnsibleModule(
argument_spec = dict(
maas=dict(default='http://localhost/MAAS/api/1.0/'),
key=dict(required=True),
name=dict(required=True),
state=dict(default='present', choices=['present', 'absent', 'query'])
),
supports_check_mode = False
)
maas = module.params['maas']
key = module.params['key']
state = module.params['state']
# Construct a sparsely populate desired state
desired = remove_null({
'name': module.params['name'],
})
# Authenticate into MAAS
auth = MaasAuth(maas, key)
maas = MaasClient(auth)
# Attempt to get the item from MAAS
item = get_item(maas, desired['name'])
# Actions if the item does not currently exist
if not item:
if state == 'query':
# If this is a query, returne it is not found
module.exit_json(changed=False, found=False)
elif state == 'present':
# If this should be present, then attempt to create it
res = create_item(maas, desired)
if res['error']:
module.fail_json(msg=res['status'])
else:
module.exit_json(changed=True, item=res['status'])
else:
# If this should be absent, then we are done and in the desired state
module.exit_json(changed=False)
# Done with items does not exists actions
return
# Actions if the item does exist
if state == 'query':
# If this is a query, return the item
module.exit_json(changed=False, found=True, item=item)
elif state == 'present':
# If we want this to exists check to see if this is different and
# needs updated
if different(item, desired):
res = update_item(maas, item, desired)
if res[
|
'error']:
module.fail_json(msg=res['status'])
else:
module.exit_json(changed=True, item=res['status'])
else:
# No differences, to nothing to change
module.exit_json(changed=False, item=item)
else:
# If we don't want this item, then delete it
res = delete_item(maas, item['name'])
if res['error']:
module.fail_json(msg=res[
|
'status'])
else:
module.exit_json(changed=True, item=item)
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
if __name__ == '__main__':
main()
|
esben/setuptools_scm
|
setuptools_scm/git.py
|
Python
|
mit
| 1,196
| 0
|
from .utils import do, do_ex, trace
from .version import meta
from os.path import abspath, realpath
FILES_COMMAND = 'git ls-files'
DEFAULT_DESCRIBE = 'git describe --dirty --tags --long --match *.*'
def parse(root, describe_command=DEFAULT_DESCRIBE):
real_root, _, ret = do_ex('git rev-parse --show-toplevel', root)
if ret:
return
trace('real root', real_root)
if abspath(realpath(real_root)) != abspath(realpath(root)):
return
rev_node, _, ret = do_ex('git rev-parse --verify --quiet HE
|
AD', root)
if ret:
return meta('0.0')
rev_node = rev_node[:7]
out, err,
|
ret = do_ex(describe_command, root)
if '-' not in out and '.' not in out:
revs = do('git rev-list HEAD', root)
count = revs.count('\n')
if ret:
out = rev_node
return meta('0.0', distance=count + 1, node=out)
if ret:
return
dirty = out.endswith('-dirty')
if dirty:
out = out.rsplit('-', 1)[0]
tag, number, node = out.rsplit('-', 2)
number = int(number)
if number:
return meta(tag, distance=number, node=node, dirty=dirty)
else:
return meta(tag, dirty=dirty, node=node)
|
1and1/artifactory-debian
|
dput-webdav/webdav.py
|
Python
|
apache-2.0
| 17,864
| 0.004646
|
# -*- coding: utf-8 -*-
# pylint: disable=locally-disabled, star-args
""" WebDAV upload method for dput.
Install to "/usr/share/dput/webdav.py".
"""
from __future__ import with_statement
import re
import os
import sys
import cgi
import netrc
import socket
import fnmatch
import getpass
import httplib
import urllib2
import urlparse
import unittest
from contextlib import closing
from email import parser as rfc2822_parser
try:
import dputhelper
except ImportError:
sys.path.insert(0, "/usr/share/dput/helper")
import dputhelper
# Block size for upload streaming
CHUNK_SIZE = 16 * 1024
def trace(msg, **kwargs):
"""Emit log traces in debug mode."""
if trace.debug:
print("D: webdav: " + (msg % kwargs))
trace.debug = False
def log(msg, **kwargs):
"""Emit log message to stderr."""
sys.stdout.flush()
sys.stderr.write("webdav: " + (msg % kwargs) + "\n")
sys.stderr.flush()
def _resolve_credentials(fqdn, login):
"""Look up special forms of credential references."""
result = login
if "$" in result:
result = os.path.expandvars(result)
if result.startswith("netrc:"):
result = result.split(':', 1)[1]
if result:
result = os.path.abspath(os.path.expanduser(result))
accounts = netrc.netrc(result or None)
account = accounts.authenticators(fqdn)
if not account or not(account[0] or account[1]):
raise dputhelper.DputUploadFatalException("Cannot find account for host %s in %s netrc file" % (
fqdn, result or "default"))
# account is (login, account, password)
user, pwd = account[0] or account[1], account[2] or ""
result = "%s:%s" % (user, pwd)
else:
if result.startswith("file:"):
result = os.path.abspath(os.path.expanduser(result.split(':', 1)[1]))
with closing(open(result, "r")) as handle:
result = handle.read().strip()
try:
user, pwd = result.split(':', 1)
except ValueError:
user, pwd = result, ""
trace("Resolved login credentials to %(user)s:%(pwd)s", user=user, pwd='*' * len(pwd))
return result
class PromptingPasswordMgr(urllib2.HTTPPasswordMgr):
""" Custom password manager that prompts for a password once, if none is available otherwise.
Based on code in dput 0.9.6 (http method).
"""
def __init__(self, login):
urllib2.HTTPPasswordMgr.__init__(self)
self.login = login
def find_user_password(self, realm, authuri):
"""Prompt for a password once and remember it, unless already provided in the configuration."""
authuri = self.reduce_uri(authuri)[0]
authinfo = urllib2.HTTPPasswordMgr.find_user_password(self, realm, authuri)
if authinfo == (None, None):
credentials = self.login
if ':' in credentials:
authinfo = credentials.split(':', 1)
else:
password = getpass.getpass(" Password for %s:" % realm)
self.add_password(realm, authuri, credentials, password)
authinfo = credentials, password
return authinfo
def _distro2repo(distro, repo_mappings):
"""Map distribution names to repo names according to config settings."""
# Parse the mapping config
mappings = [(i.split('=', 1) if '=' in i else (i, i)) for i in repo_mappings.split()]
# Try to find a match
result = distro
for pattern, target in mappings:
if fnmatch.fnmatchcase(distro.lower(), pattern.lower()):
result = target
break
trace("Mapped distro '%(distro)s' to '%(repo)s'", distro=distro, repo=result)
return result
def _resolve_incoming(fqdn, login, incoming, changes=None, cli_params=None, repo_mappings=""):
"""Resolve the given `incoming` value to a working URL."""
# Build fully qualified URL
scheme, netloc, path, params, query, anchor = urlparse.urlparse(incoming, scheme="http", allow_fragments=True)
if scheme not in ("http", "https"):
raise dputhelper.DputUploadFatalException("Unsupported URL scheme '%s'" % scheme)
url = urlparse.urlunparse((scheme, netloc or fqdn, path.rstrip('/') + '/', params, query, None))
# Parse anchor to parameters
url_params = dict(cgi.parse_qsl(anchor or '', keep_blank_values=True))
# Read changes from stream or file
pkgdata = {}
if changes:
try:
changes.read # pylint: disable=maybe-no-member
except AttributeError:
with closing(open(changes, "r")) as handle:
changes = handle.read()
else:
changes = changes.read() # pylint: disable=maybe-no-member
if changes.startswith("-----BEGIN PGP SIGNED MESSAGE-----"):
# Let someone else check this, we don't care a bit; gimme the data already
trace("Extracting package metadata from PGP signed message...")
changes = changes.split("-----BEGIN PGP")[1].replace('\r', '').split('\n\n', 1)[1]
pkgdata = dict([(key.lower().replace('-', '_'), val.strip())
for key, val in rfc2822_parser.HeaderParser().parsestr(changes).items()
])
# Extend changes metadata
pkgdata["loginuser"] = login.split(':')[0]
if "version" in pkgdata:
pkgdata["upstream"] = re.split(r"[-~]", pkgdata["version"])[0]
pkgdata.update(dict(
fqdn=fqdn, repo=_distro2repo(pkgdata.get("distribution", "unknown"), repo_mappings),
))
pkgdata.update(cli_params or {}) # CLI options can overwrite anything
trace("Collected metadata:\n %(meta)s", meta="\n ".join(["%s = %s" % (key, val)
for key, val in sorted(pkgdata.items())
if '\n' not in val # only print 'simple' values
]))
# Interpolate `url`
try:
try:
url.format
except AttributeError:
url = url % pkgdata # Python 2.5
|
else:
url = url.format(**pkgdata) # Python 2.6+
except KeyError, exc:
raise dputhelper.DputUploadFatalException("Unknown key (%s) in incoming templates '%s'" % (exc, incoming))
trace("Resolved incoming to `%(url)s' params=%(params)r", url=ur
|
l, params=url_params)
return url, url_params
def _url_connection(url, method, skip_host=False, skip_accept_encoding=False):
"""Create HTTP[S] connection for `url`."""
scheme, netloc, path, params, query, _ = urlparse.urlparse(url)
result = conn = (httplib.HTTPSConnection if scheme == "https" else httplib.HTTPConnection)(netloc)
conn.debuglevel = int(trace.debug)
try:
conn.putrequest(method, urlparse.urlunparse((None, None, path, params, query, None)), skip_host, skip_accept_encoding)
conn.putheader("User-Agent", "dput")
conn.putheader("Connection", "close")
conn = None
finally:
if conn:
conn.close() # close in case of errors
return result
def _file_url(filepath, url):
"""Return URL for the given `filepath` in the DAV collection `url`."""
basename = os.path.basename(filepath)
return urlparse.urljoin(url.rstrip('/') + '/', basename)
def _dav_put(filepath, url, login, progress=None):
"""Upload `filepath` to given `url` (referring to a WebDAV collection)."""
fileurl = _file_url(filepath, url)
sys.stdout.write(" Uploading %s: " % os.path.basename(filepath))
sys.stdout.flush()
size = os.path.getsize(filepath)
with closing(open(filepath, 'r')) as handle:
if progress:
handle = dputhelper.FileWithProgress(handle, ptype=progress, progressf=sys.stdout, size=size)
trace("HTTP PUT to URL: %s" % fileurl)
try:
conn = _url_connection(fileurl, "PUT")
try:
conn.putheader("Authorization", 'Basic %s' % login.encode('base64').replace('\n', '').strip())
conn.putheader("Content-Length", str(size))
conn.endheaders()
conn.debuglevel = 0
while True:
data = handle.read(CHUNK_SIZE)
if not data:
break
|
achanda/refstack
|
refstack/decorators.py
|
Python
|
apache-2.0
| 409
| 0.002445
|
#-*- coding:
|
utf-8 -*-
# This file based on MIT licensed code at: https://github.com/imwilsonxu/fbone
from functools import wraps
from flask import abort
from flask.ext.login import current_user
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not current_user.is_admin():
|
abort(403)
return f(*args, **kwargs)
return decorated_function
|
lscsoft/gwdetchar
|
gwdetchar/omega/__init__.py
|
Python
|
gpl-3.0
| 1,099
| 0
|
# coding=utf-8
# Copyright (C) Duncan Macleod (2015)
#
# This file is part of the GW DetChar python package.
#
# GW DetChar is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of th
|
e License, or
# (at your option) any later version.
#
# GW DetChar is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY o
|
r FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GW DetChar. If not, see <http://www.gnu.org/licenses/>.
"""Methods and utilties for performing Omega pipline scans
See Chatterji 2005 [thesis] for details on the Q-pipeline.
"""
__author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'
__credits__ = 'Alex Urban <alexander.urban@ligo.org>'
# -- imports ------------------------------------------------------------------
# import pyomega utils
from .core import *
|
chocopoche/mangopay2-python-sdk
|
setup.py
|
Python
|
mit
| 1,575
| 0.001905
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'DESCRIPTION.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='jestocke-mangopaysdk',
version='3.0.6',
description='A client library written in python to work with mangopay v2 api',
long_description='This SDK is a client library for interacting with the Mangopay API.',
url='https://github.com/Mangopay/mangopay2-python-sdk',
author='Mangopay (www.mangopay.com)',
author_email='support@mangopay.com',
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approv
|
ed :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
keywords='mangopay api development emoney sdk',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires
|
=['requests', 'simplejson', 'blinker', 'six' ],
extras_require={
'dev': ['responses', 'nose', 'coverage', 'httplib2',
'pyopenssl', 'ndg-httpsclient', 'pyasn1', 'exam'],
'test': ['responses', 'nose', 'coverage', 'httplib2',
'pyopenssl', 'ndg-httpsclient', 'pyasn1', 'exam'],
},
entry_points={
'console_scripts': [
'sample=sample:main',
],
},
)
|
kaikai581/NOvA
|
xsec-2019/check_light_syst/check_entries/file_lists/pull_common_files.py
|
Python
|
gpl-2.0
| 1,096
| 0.014599
|
#!/usr/bin/env python
from __future__ import print_function
import os
import subprocess
syst_names = ['nominal','lightdown','lightup','ckv','calibneg','calibpos','calibshape']
flists = [syst_name+'.txt' for syst_name in syst_names]
def process_dataset(ds_idx):
flist = flists[ds_idx]
fns = []
with open(flist, 'r') as inf:
for line in inf:
fns.append(line.rstrip('\n'))
# create output directory
out_base = '/pnfs/nova/scratch/users/slin'
out_path = os.path.join(out_base,'numuccinc','evt_matched_cafs',syst_names[ds_idx])
if not o
|
s.path.exists(out_path):
os.makedirs(out_path)
for fn in fns:
bashout = subprocess.check_output('samweb locate-file {}'.format(fn), shell=True)
bashout = bashout.split('\n')
for line in bashout:
if 'dcache' in line:
location = line.split(':')[1]
cmd = 'rsync -v {} {}'.format(os.path.join(location, fn), out_path)
os.system(cmd)
if __name__ == '__main__':
for i in range(len(syst_nam
|
es)):
process_dataset(i)
|
hdoria/HnTool
|
HnTool/modules/php.py
|
Python
|
gpl-2.0
| 4,760
| 0.005252
|
# -*- coding: utf-8 -*-
#
# HnTool rules - php
# Copyright (C) 2009-2010 Candido Vieira <cvieira.br@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import os
import ConfigParser
import HnTool.modules.util
from HnTool.modules.rule import Rule as MasterRule
class Rule(MasterRule):
def __init__(self, options):
MasterRule.__init__(self, options)
self.short_name="php
|
"
self.long_name="Checks security problems on php config file"
self.type="config"
self.required_files = ['/etc/php5/apache2/php.ini', '/etc/php5/cli/php.ini', '/etc/php.ini']
def requires(self):
return self.required_files
def analyze(self, options):
check_results = self.chec
|
k_results
conf_files = self.required_files
for php_conf in conf_files:
if os.path.isfile(php_conf):
config = ConfigParser.ConfigParser()
try:
config.read(php_conf)
except ConfigParser.ParsingError, (errno, strerror):
check_results['info'].append('Could not parse %s: %s' % (php_conf, strerror))
continue
if not config.has_section('PHP'):
check_results['info'].append('%s is not a PHP config file' % (php_conf))
continue
if config.has_option('PHP', 'register_globals'):
rg = config.get('PHP', 'register_globals').lower()
if rg == 'on':
check_results['medium'].append('Register globals is on (%s)' % (php_conf))
elif rg == 'off':
check_results['ok'].append('Register globals is off (%s)' % (php_conf))
else:
check_results['info'].append('Unknown value for register globals (%s)' % (php_conf))
else:
check_results['info'].append('Register globals not found (%s)' % (php_conf))
if config.has_option('PHP', 'safe_mode'):
sm = config.get('PHP', 'safe_mode').lower()
if sm == 'on':
check_results['low'].append('Safe mode is on (fake security) (%s)' % (php_conf))
elif sm == 'off':
check_results['info'].append('Safe mode is off (%s)' % (php_conf))
else:
check_results['info'].append('Unknown value for safe mode (%s)' % (php_conf))
else:
check_results['info'].append('Safe mode not found (%s)' % (php_conf))
if config.has_option('PHP', 'display_errors'):
de = config.get('PHP', 'display_errors').lower()
if de == 'on':
check_results['medium'].append('Display errors is on (stdout) (%s)' % (php_conf))
elif de == 'off':
check_results['ok'].append('Display errors is off (%s)' % (php_conf))
elif de == 'stderr':
check_results['info'].append('Display errors set to stderr (%s)' % (php_conf))
else:
check_results['info'].append('Unknown value for display errors (%s)' % (php_conf))
else:
check_results['info'].append('Display errors not found (%s)' % (php_conf))
if config.has_option('PHP', 'expose_php'):
ep = config.get('PHP', 'expose_php').lower()
if ep == 'on':
check_results['low'].append('Expose PHP is on (%s)' % (php_conf))
elif ep == 'off':
check_results['ok'].append('Expose PHP is off (%s)' % (php_conf))
else:
check_results['info'].append('Unknown value for expose PHP (%s)' % (php_conf))
else:
check_results['info'].append('Expose PHP not found (%s)' % (php_conf))
return check_results
|
dmsimard/ansible
|
lib/ansible/modules/git.py
|
Python
|
gpl-3.0
| 53,677
| 0.002459
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: git
author:
- "Ansible Core Team"
- "Michael DeHaan"
version_added: "0.0.1"
short_description: Deploy software (or files) from git checkouts
description:
- Manage I(git) checkouts of repositories to deploy files or software.
options:
repo:
description:
- git, SSH, or HTTP(S) protocol address of the git repository.
type: str
required: true
aliases: [ name ]
dest:
description:
- The path of where the repository should be checked out. This
is equivalent to C(git clone [repo_url] [directory]). The repository
named in I(repo) is not appended to this path and the destination directory must be empty. This
parameter is required, unless I(clone) is set to C(no).
type: path
required: true
version:
description:
- What version of the repository to check out. This can be
the literal string C(HEAD),
|
a branch name, a tag name.
It can also be a I(SHA-1) hash, in which case I(refspec) needs
to be specified if the given revision is not already available.
type: str
default: "HEAD"
accept_hostkey:
description:
- If C(yes), ensure that "-o StrictHostKeyChecking=no" is
present as an ssh option.
type: bool
default: 'no'
version_added: "1.5"
accept_newhostkey:
|
description:
- As of OpenSSH 7.5, "-o StrictHostKeyChecking=accept-new" can be
used which is safer and will only accepts host keys which are
not present or are the same. if C(yes), ensure that
"-o StrictHostKeyChecking=accept-new" is present as an ssh option.
type: bool
default: 'no'
version_added: "2.12"
ssh_opts:
description:
- Creates a wrapper script and exports the path as GIT_SSH
which git then automatically uses to override ssh arguments.
An example value could be "-o StrictHostKeyChecking=no"
(although this particular option is better set by
I(accept_hostkey)).
type: str
version_added: "1.5"
key_file:
description:
- Specify an optional private key file path, on the target host, to use for the checkout.
type: path
version_added: "1.5"
reference:
description:
- Reference repository (see "git clone --reference ...").
version_added: "1.4"
remote:
description:
- Name of the remote.
type: str
default: "origin"
refspec:
description:
- Add an additional refspec to be fetched.
If version is set to a I(SHA-1) not reachable from any branch
or tag, this option may be necessary to specify the ref containing
the I(SHA-1).
Uses the same syntax as the C(git fetch) command.
An example value could be "refs/meta/config".
type: str
version_added: "1.9"
force:
description:
- If C(yes), any modified files in the working
repository will be discarded. Prior to 0.7, this was always
'yes' and could not be disabled. Prior to 1.9, the default was
`yes`.
type: bool
default: 'no'
version_added: "0.7"
depth:
description:
- Create a shallow clone with a history truncated to the specified
number or revisions. The minimum possible value is C(1), otherwise
ignored. Needs I(git>=1.9.1) to work correctly.
type: int
version_added: "1.2"
clone:
description:
- If C(no), do not clone the repository even if it does not exist locally.
type: bool
default: 'yes'
version_added: "1.9"
update:
description:
- If C(no), do not retrieve new revisions from the origin repository.
- Operations like archive will work on the existing (old) repository and might
not respond to changes to the options version or remote.
type: bool
default: 'yes'
version_added: "1.2"
executable:
description:
- Path to git executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
type: path
version_added: "1.4"
bare:
description:
- If C(yes), repository will be created as a bare repo, otherwise
it will be a standard repo with a workspace.
type: bool
default: 'no'
version_added: "1.4"
umask:
description:
- The umask to set before doing any checkouts, or any other
repository maintenance.
type: raw
version_added: "2.2"
recursive:
description:
- If C(no), repository will be cloned without the --recursive
option, skipping sub-modules.
type: bool
default: 'yes'
version_added: "1.6"
single_branch:
description:
- Clone only the history leading to the tip of the specified I(branch).
type: bool
default: 'no'
version_added: '2.11'
track_submodules:
description:
- If C(yes), submodules will track the latest commit on their
master branch (or other branch specified in .gitmodules). If
C(no), submodules will be kept at the revision specified by the
main project. This is equivalent to specifying the --remote flag
to git submodule update.
type: bool
default: 'no'
version_added: "1.8"
verify_commit:
description:
- If C(yes), when cloning or checking out a I(version) verify the
signature of a GPG signed commit. This requires git version>=2.1.0
to be installed. The commit MUST be signed and the public key MUST
be present in the GPG keyring.
type: bool
default: 'no'
version_added: "2.0"
archive:
description:
- Specify archive file path with extension. If specified, creates an
archive file of the specified format containing the tree structure
for the source tree.
Allowed archive formats ["zip", "tar.gz", "tar", "tgz"].
- This will clone and perform git archive from local directory as not
all git servers support git archive.
type: path
version_added: "2.4"
archive_prefix:
description:
- Specify a prefix to add to each file path in archive. Requires I(archive) to be specified.
version_added: "2.10"
type: str
separate_git_dir:
description:
- The path to place the cloned repository. If specified, Git repository
can be separated from working tree.
type: path
version_added: "2.7"
gpg_whitelist:
description:
- A list of trusted GPG fingerprints to compare to the fingerprint of the
GPG-signed commit.
- Only used when I(verify_commit=yes).
- Use of this feature requires Git 2.6+ due to its reliance on git's C(--raw) flag to C(verify-commit) and C(verify-tag).
type: list
elements: str
default: []
version_added: "2.9"
requirements:
- git>=1.7.1 (the command line tool)
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to use the option accept_hostkey. Another solut
|
grs/amqp_subscriptions
|
d.py
|
Python
|
apache-2.0
| 2,628
| 0.003044
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
import optparse
from proton import Url
from proton.handlers import MessagingHandler
from proton.reactor import Container
from proton_extension import Capabilities
class Recv(MessagingHandler):
def __init__(self, url, id, subscription, count):
super(Recv, self).__init__()
self.url = Url(url)
self.id = id
self.subscription = subscription
self.expected = count
self.received = 0
def on_start(self, event):
# shared, container-scoped id, n
|
on-durable
event.container.container_id = self.id
event.container.create_receiver(self.url, name=self.subscription, options=[Capabilities('shared')])
def on_message(self, event):
if self.expected == 0 or self.received < self.expected:
print(event.message.body)
self.received += 1
|
if self.received == self.expected:
event.receiver.close()
event.connection.close()
parser = optparse.OptionParser(usage="usage: %prog [options]")
parser.add_option("-a", "--address", default="localhost:5672/examples",
help="address from which messages are received (default %default)")
parser.add_option("-m", "--messages", type="int", default=100,
help="number of messages to receive; 0 receives indefinitely (default %default)")
parser.add_option("-i", "--id", default="client-d",
help="client's connection identifier (default %default)")
parser.add_option("-s", "--subscription", default="subscription-d",
help="client's subscription identifier (default %default)")
opts, args = parser.parse_args()
try:
Container(Recv(opts.address, opts.id, opts.subscription, opts.messages)).run()
except KeyboardInterrupt: pass
|
dturner-tw/pants
|
src/python/pants/backend/graph_info/tasks/sorttargets.py
|
Python
|
apache-2.0
| 1,081
| 0.007401
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, p
|
rint_function,
unicode_literals, with_statement)
from pants.build_graph.bu
|
ild_graph import sort_targets
from pants.task.console_task import ConsoleTask
class SortTargets(ConsoleTask):
"""Topologically sort the targets."""
@classmethod
def register_options(cls, register):
super(SortTargets, cls).register_options(register)
register('--reverse', action='store_true', default=False,
help='Sort least-dependent to most-dependent.')
def console_output(self, targets):
sorted_targets = sort_targets(targets)
# sort_targets already returns targets in reverse topologically sorted order.
if not self.get_options().reverse:
sorted_targets = reversed(sorted_targets)
for target in sorted_targets:
if target in self.context.target_roots:
yield target.address.reference()
|
mrawls/kepler-makelc
|
lc_functions.py
|
Python
|
mit
| 5,357
| 0.022961
|
import numpy as np
from pyraf import iraf
from pyraf.iraf import kepler
'''
Useful functions for Kepler light curve processing
Use this with the program 'makelc.py'
Originally by Jean McKeever
Edited and improved by Meredith Rawls
'''
# calculate orbital phase
# times must be a list of observation times in the same units as BJD0
# it returns 'phases': orbital phases from 0 to 1
# it also returns 'phasedoubles': twice as long as 'phases' and now from 0 to 2
def phasecalc(times, period=100, BJD0=2454833):
phases = []
cycles = []
for i in range(0, len(times)):
fracP = (times[i] - BJD0) / period
if fracP < 0:
phases.append(fracP % 1)
cycles.append(int(fracP))
else:
phases.append(fracP % 1)
cycles.append(int(fracP) + 1)
#print(fracP, phases[i])
return phases
# remove long-term trends
# uses a simple 3rd-order polynomial by default
# operates on one array at a time (e.g., after all quarters have been combined)
def long_detrend(t, flux, order=3):
model = np.polyfit(t, flux, order)
fit = np.zeros(len(t))
# apply the model coefficients to create the fit
for i in range(0, order+1):
fit += model[i]*np.power(t, (order-i))
#fl
|
ux = flux/fit*1e6 - 1e6 # put it in ppm >:(
flux = flux/fit*np.median(flux) # don't put it in ppm, because ppm is annoying
return t, flux
# Delete any observation that has one or more NaN values.
# Assumes there are six parallel arrays... use dummy arrays if you don't have 6
# columns of interest to operate on (sorry).
# Operates on one quarter at a time
def nan_delete(time, flux, ferr, other1, other2, other3):
a = []
a = [time, flux, ferr, other1, other2, other3]
atrans
|
= np.transpose(a)
newatrans = []
newa = []
for row in atrans:
# only save rows that DON'T contain a NaN value
if np.isnan(row).any() != True:
newatrans.append(row)
newa = np.transpose(newatrans)
newtime = newa[0]
newflux = newa[1]
newferr = newa[2]
newother1 = newa[3]
newother2 = newa[4]
newother3 = newa[5]
return newtime, newflux, newferr, newother1, newother2, newother3
# Put data from different quarters on the same AVERAGE level
# operates on a list of arrays (multiple quarters) all at once
# DON'T USE THIS ONE
# def normalize_qtr_avg(flux):
# sumflux = 0
# npts = 0
# for arr in flux:
# sumflux += np.nansum(arr)
# npts += len(arr[arr>0])
# avgflux = sumflux/npts # overall average for all quarters
# for arr in flux:
# avg_arr = np.mean(arr[arr>0]) # average for an individual quarter
# arr += avgflux - avg_arr
# return flux
# Put data from different quarters on the same MEDIAN level
# operates on a list of arrays (multiple quarters) all at once
def normalize_qtr_med(flux):
sumflux = 0
npts = 0
for arr in flux:
sumflux += np.nansum(arr)
npts += len(arr)
avgflux = sumflux/npts # overall average for all quarters
for arr in flux:
med_arr = np.median(arr) # median for an individual quarter
arr += avgflux - med_arr
return flux
# Line up the gaps within each quarter
# operates on a list of arrays (multiple quarters) all at once
def lineup_qtr_gaps(time, flux, maskstart, maskend):
diffs = np.zeros(len(time) - 1)
for i in range(0,len(time) - 1): # loop through quarters
# calculate differences between flux points at quarter start/end
start = 0
end = -1
for idx, mask in enumerate(maskstart):
while (time[i][end] > maskstart[idx] and time[i][end] < maskend[idx]):
#print('end', end, time[i][end], maskstart[idx], maskend[idx])
end -= 1
while (time[i+1][start] > maskstart[idx] and time[i+1][start] < maskend[idx]):
#print('start', start, time[i+1][start], maskstart[idx], maskend[idx])
start += 1
diffs[i] = (flux[i][end] - flux[i+1][start])
# maxi will find the point with the largest change in flux
maxi = lambda z: np.where(max(abs(z)) == abs(z))[0][0]
cntr = 0 # counter
max_val = max(abs(diffs))
while max_val > 100: #original value here was 100
# this is the index of the largest change in flux, so it needs adjusting
ind = maxi(diffs)
# this is the actual change in flux associated with that index
diff = diffs[ind]
# adjust the flux at this spot and its neighbor so they meet
flux[ind] = flux[ind] - diff/2.0
flux[ind+1] = flux[ind+1] + diff/2.0
diffs = np.zeros(len(time) - 1)
for i in range(0, len(time) - 1):
# calculate differences between flux points at quarter start/end, again
start = 0
end = -1
for idx, mask in enumerate(maskstart):
while time[i][end] > maskstart[idx] and time[i][end] < maskend[idx]:
#print('end', end, time[i][end], maskstart[idx], maskend[idx])
end -= 1
while time[i+1][start] > maskstart[idx] and time[i+1][start] < maskend[idx]:
#print('start', start, time[i+1][start], maskstart[idx], maskend[idx])
start += 1
diffs[i] = (flux[i][end] - flux[i+1][start])
cntr += 1 # count how many times this while-loop happens
max_val = max(abs(diffs))
# print(max_val, cntr)
return time, flux
# performs detrending with cotrending basis vectors (cbvs)
# lcin and lcout must both be FITS filenames
def kepcotrend(lcin, lcout, cbvfile, maskfile=''):
iraf.kepcotrend(infile=lcin, outfile=lcout, cbvfile=cbvfile,
vectors='1 2', method='simplex', fitpower=1, iterate='yes', sigmaclip=2.0,
maskfile=maskfile, scinterp='None', plot='no', clobber='yes', verbose='no')
return
|
grlee77/numpy
|
numpy/core/overrides.py
|
Python
|
bsd-3-clause
| 8,273
| 0.000363
|
"""Implementation of __array_function__ overrides from NEP-18."""
import collections
import functools
import os
import textwrap
from numpy.core._multiarray_umath import (
add_docstring, implement_array_function, _get_implementing_args)
from numpy.compat._inspect import getargspec
ARRAY_FUNCTION_ENABLED = bool(
int(os.environ.get('NUMPY_EXPERIMENTAL_ARRAY_FUNCTION', 1)))
array_function_like_doc = (
"""like : array_like
Reference object to allow the creation of arrays which are not
NumPy arrays. If an array-like passed in as ``like`` supports
the ``__array_function__`` protocol, the result will be defined
by it. In this case, it ensures the creation of an array object
compatible with that passed in via this argument.
.. note::
The ``like`` keyword is an experimental feature pending on
acceptance of :ref:`NEP 35 <NEP35>`."""
)
def set_array_function_like_doc(public_api):
if public_api.__doc__ is not None:
public_api.__doc__ = public_api.__doc__.replace(
"${ARRAY_FUNCTION_LIKE}",
array_function_like_doc,
)
return public_api
add_docstring(
implement_array_function,
"""
Implement a function with checks for __array_function__ overrides.
All arguments are required, and can only be passed by position.
Parameters
----------
implementation : function
Function that implements the operation on NumPy array without
overrides when called like ``implementation(*args, **kwargs)``.
public_api : function
Function exposed by NumPy's public API originally called like
``public_api(*args, **kwargs)`` on which arguments are now being
checked.
relevant_args : iterable
Iterable of arguments to check for __array_function__ methods.
args : tuple
Arbitrary positional arguments originally passed into ``public_api``.
kwargs : dict
Arbitrary keyword arguments originally passed into ``public_api
|
``.
Returns
-------
Result from calling ``implementation()`` or an ``__array_function__``
method, as appropriate.
Raises
------
TypeError : if no implementation is found.
""")
# exposed for testing purposes; used
|
internally by implement_array_function
add_docstring(
_get_implementing_args,
"""
Collect arguments on which to call __array_function__.
Parameters
----------
relevant_args : iterable of array-like
Iterable of possibly array-like arguments to check for
__array_function__ methods.
Returns
-------
Sequence of arguments with __array_function__ methods, in the order in
which they should be called.
""")
ArgSpec = collections.namedtuple('ArgSpec', 'args varargs keywords defaults')
def verify_matching_signatures(implementation, dispatcher):
"""Verify that a dispatcher function has the right signature."""
implementation_spec = ArgSpec(*getargspec(implementation))
dispatcher_spec = ArgSpec(*getargspec(dispatcher))
if (implementation_spec.args != dispatcher_spec.args or
implementation_spec.varargs != dispatcher_spec.varargs or
implementation_spec.keywords != dispatcher_spec.keywords or
(bool(implementation_spec.defaults) !=
bool(dispatcher_spec.defaults)) or
(implementation_spec.defaults is not None and
len(implementation_spec.defaults) !=
len(dispatcher_spec.defaults))):
raise RuntimeError('implementation and dispatcher for %s have '
'different function signatures' % implementation)
if implementation_spec.defaults is not None:
if dispatcher_spec.defaults != (None,) * len(dispatcher_spec.defaults):
raise RuntimeError('dispatcher functions can only use None for '
'default argument values')
def set_module(module):
"""Decorator for overriding __module__ on a function or class.
Example usage::
@set_module('numpy')
def example():
pass
assert example.__module__ == 'numpy'
"""
def decorator(func):
if module is not None:
func.__module__ = module
return func
return decorator
# Call textwrap.dedent here instead of in the function so as to avoid
# calling dedent multiple times on the same text
_wrapped_func_source = textwrap.dedent("""
@functools.wraps(implementation)
def {name}(*args, **kwargs):
relevant_args = dispatcher(*args, **kwargs)
return implement_array_function(
implementation, {name}, relevant_args, args, kwargs)
""")
def array_function_dispatch(dispatcher, module=None, verify=True,
docs_from_dispatcher=False):
"""Decorator for adding dispatch with the __array_function__ protocol.
See NEP-18 for example usage.
Parameters
----------
dispatcher : callable
Function that when called like ``dispatcher(*args, **kwargs)`` with
arguments from the NumPy function call returns an iterable of
array-like arguments to check for ``__array_function__``.
module : str, optional
__module__ attribute to set on new function, e.g., ``module='numpy'``.
By default, module is copied from the decorated function.
verify : bool, optional
If True, verify the that the signature of the dispatcher and decorated
function signatures match exactly: all required and optional arguments
should appear in order with the same names, but the default values for
all optional arguments should be ``None``. Only disable verification
if the dispatcher's signature needs to deviate for some particular
reason, e.g., because the function has a signature like
``func(*args, **kwargs)``.
docs_from_dispatcher : bool, optional
If True, copy docs from the dispatcher function onto the dispatched
function, rather than from the implementation. This is useful for
functions defined in C, which otherwise don't have docstrings.
Returns
-------
Function suitable for decorating the implementation of a NumPy function.
"""
if not ARRAY_FUNCTION_ENABLED:
def decorator(implementation):
if docs_from_dispatcher:
add_docstring(implementation, dispatcher.__doc__)
if module is not None:
implementation.__module__ = module
return implementation
return decorator
def decorator(implementation):
if verify:
verify_matching_signatures(implementation, dispatcher)
if docs_from_dispatcher:
add_docstring(implementation, dispatcher.__doc__)
# Equivalently, we could define this function directly instead of using
# exec. This version has the advantage of giving the helper function a
# more interpettable name. Otherwise, the original function does not
# show up at all in many cases, e.g., if it's written in C or if the
# dispatcher gets an invalid keyword argument.
source = _wrapped_func_source.format(name=implementation.__name__)
source_object = compile(
source, filename='<__array_function__ internals>', mode='exec')
scope = {
'implementation': implementation,
'dispatcher': dispatcher,
'functools': functools,
'implement_array_function': implement_array_function,
}
exec(source_object, scope)
public_api = scope[implementation.__name__]
if module is not None:
public_api.__module__ = module
public_api._implementation = implementation
return public_api
return decorator
def array_function_from_dispatcher(
implementation, module=None, verify=True, docs_from_dispatcher=True):
"""Like array_function_dispatcher, but with function arguments flipped."""
def decorator(dispatcher):
return array_function_dispatch(
dispatcher, module, verify=verify,
|
srcLurker/home-assistant
|
homeassistant/components/media_player/mpd.py
|
Python
|
mit
| 7,141
| 0
|
"""
Support to interact with a Music Player Daemon.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.mpd/
"""
import logging
import socket
import voluptuous as vol
from homeassistant.components.media_player import (
MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, PLATFORM_SCHEMA,
SUPPORT_PREVIOUS_TRACK, SUPPORT_TURN_OFF, SUPPORT_TURN_ON,
SUPPORT_VOLUME_SET, SUPPORT_PLAY_MEDIA, MEDIA_TYPE_PLAYLIST,
MediaPlayerDevice)
from homeassistant.const import (
STATE_OFF, STATE_PAUSED, STATE_PLAYING, CONF_PORT, CONF_PASSWORD,
CONF_HOST)
import homeassistant.helpers.config_validatio
|
n as cv
REQUIREMENTS = ['python-mpd2==0.5.5']
_LOGGER = logging.getLogger(__name__)
CONF_LOCATION = 'location'
DEFAULT_LOCATION = 'MPD'
DEFAULT_PORT = 6600
SUPPORT_MPD = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_TURN_OFF | \
SUPPORT_TURN_ON | SUPPORT_PREVIOUS_TRACK | SU
|
PPORT_NEXT_TRACK | \
SUPPORT_PLAY_MEDIA
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_LOCATION, default=DEFAULT_LOCATION): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the MPD platform."""
daemon = config.get(CONF_HOST)
port = config.get(CONF_PORT)
location = config.get(CONF_LOCATION)
password = config.get(CONF_PASSWORD)
import mpd
# pylint: disable=no-member
try:
mpd_client = mpd.MPDClient()
mpd_client.connect(daemon, port)
if password is not None:
mpd_client.password(password)
mpd_client.close()
mpd_client.disconnect()
except socket.error:
_LOGGER.error("Unable to connect to MPD")
return False
except mpd.CommandError as error:
if "incorrect password" in str(error):
_LOGGER.error("MPD reported incorrect password")
return False
else:
raise
add_devices([MpdDevice(daemon, port, location, password)])
class MpdDevice(MediaPlayerDevice):
"""Representation of a MPD server."""
# pylint: disable=no-member
def __init__(self, server, port, location, password):
"""Initialize the MPD device."""
import mpd
self.server = server
self.port = port
self._name = location
self.password = password
self.status = None
self.currentsong = None
self.client = mpd.MPDClient()
self.client.timeout = 10
self.client.idletimeout = None
self.update()
def update(self):
"""Get the latest data and update the state."""
import mpd
try:
self.status = self.client.status()
self.currentsong = self.client.currentsong()
except (mpd.ConnectionError, BrokenPipeError, ValueError):
# Cleanly disconnect in case connection is not in valid state
try:
self.client.disconnect()
except mpd.ConnectionError:
pass
self.client.connect(self.server, self.port)
if self.password is not None:
self.client.password(self.password)
self.status = self.client.status()
self.currentsong = self.client.currentsong()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the media state."""
if self.status['state'] == 'play':
return STATE_PLAYING
elif self.status['state'] == 'pause':
return STATE_PAUSED
else:
return STATE_OFF
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.currentsong['id']
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
# Time does not exist for streams
return self.currentsong.get('time')
@property
def media_title(self):
"""Title of current playing media."""
name = self.currentsong.get('name', None)
title = self.currentsong.get('title', None)
if name is None and title is None:
return "None"
elif name is None:
return title
elif title is None:
return name
else:
return '{}: {}'.format(name, title)
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
return self.currentsong.get('artist')
@property
def media_album_name(self):
"""Album of current playing media (Music track only)."""
return self.currentsong.get('album')
@property
def volume_level(self):
"""Return the volume level."""
return int(self.status['volume'])/100
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_MPD
def turn_off(self):
"""Service to send the MPD the command to stop playing."""
self.client.stop()
def turn_on(self):
"""Service to send the MPD the command to start playing."""
self.client.play()
def set_volume_level(self, volume):
"""Set volume of media player."""
self.client.setvol(int(volume * 100))
def volume_up(self):
"""Service to send the MPD the command for volume up."""
current_volume = int(self.status['volume'])
if current_volume <= 100:
self.client.setvol(current_volume + 5)
def volume_down(self):
"""Service to send the MPD the command for volume down."""
current_volume = int(self.status['volume'])
if current_volume >= 0:
self.client.setvol(current_volume - 5)
def media_play(self):
"""Service to send the MPD the command for play/pause."""
self.client.pause(0)
def media_pause(self):
"""Service to send the MPD the command for play/pause."""
self.client.pause(1)
def media_next_track(self):
"""Service to send the MPD the command for next track."""
self.client.next()
def media_previous_track(self):
"""Service to send the MPD the command for previous track."""
self.client.previous()
def play_media(self, media_type, media_id, **kwargs):
"""Send the media player the command for playing a playlist."""
_LOGGER.info(str.format("Playing playlist: {0}", media_id))
if media_type == MEDIA_TYPE_PLAYLIST:
self.client.clear()
self.client.load(media_id)
self.client.play()
else:
_LOGGER.error(str.format("Invalid media type. Expected: {0}",
MEDIA_TYPE_PLAYLIST))
|
Lukasa/urllib3
|
test/contrib/test_socks.py
|
Python
|
mit
| 21,538
| 0
|
import threading
import socket
from urllib3.contrib import socks
from urllib3.exceptions import ConnectTimeoutError, NewConnectionError
from dummyserver.server import DEFAULT_CERTS
from dummyserver.testcase import IPV4SocketDummyServerTestCase
from nose.plugins.skip import SkipTest
try:
import ssl
from urllib3.util import ssl_ as better_ssl
HAS_SSL = True
except ImportError:
ssl = None
better_ssl = None
HAS_SSL = False
SOCKS_NEGOTIATION_NONE = b'\x00'
SOCKS_NEGOTIATION_PASSWORD = b'\x02'
SOCKS_VERSION_SOCKS4 = b'\x04'
SOCKS_VERSION_SOCKS5 = b'\x05'
def _get_free_port(host):
"""
Gets a free port by opening a socket, binding it, checking the assigned
port, and then closing it.
"""
s = socket.socket()
s.bind((host, 0))
port = s.getsockname()[1]
s.close()
return port
def _read_exactly(sock, amt):
"""
Read *exactly* ``amt`` bytes from the socket ``sock``.
"""
data = b''
while amt > 0:
chunk = sock.recv(amt)
data += chunk
amt -= len(chunk)
return data
def _read_until(sock, char):
"""
Read from the socket until the character is received.
"""
chunks = []
while True:
chunk = sock.r
|
ecv(1)
chunks.append(chunk)
if chunk == char:
break
return b''.join(chunks)
def _address_from_socket(sock):
"""
Returns the address from the SOCKS socket
"""
addr_type = sock.recv(1)
if addr_type == b'\x01':
ipv4_addr = _read_exactly(sock, 4)
return socket.inet_ntoa(ipv4_addr)
elif addr_type == b'\x04':
ipv6_addr = _read_exactly(sock, 16)
|
return socket.inet_ntop(socket.AF_INET6, ipv6_addr)
elif addr_type == b'\x03':
addr_len = ord(sock.recv(1))
return _read_exactly(sock, addr_len)
else:
raise RuntimeError("Unexpected addr type: %r" % addr_type)
def handle_socks5_negotiation(sock, negotiate, username=None,
password=None):
"""
Handle the SOCKS5 handshake.
Returns a generator object that allows us to break the handshake into
steps so that the test code can intervene at certain useful points.
"""
received_version = sock.recv(1)
assert received_version == SOCKS_VERSION_SOCKS5
nmethods = ord(sock.recv(1))
methods = _read_exactly(sock, nmethods)
if negotiate:
assert SOCKS_NEGOTIATION_PASSWORD in methods
send_data = SOCKS_VERSION_SOCKS5 + SOCKS_NEGOTIATION_PASSWORD
sock.sendall(send_data)
# This is the password negotiation.
negotiation_version = sock.recv(1)
assert negotiation_version == b'\x01'
ulen = ord(sock.recv(1))
provided_username = _read_exactly(sock, ulen)
plen = ord(sock.recv(1))
provided_password = _read_exactly(sock, plen)
if username == provided_username and password == provided_password:
sock.sendall(b'\x01\x00')
else:
sock.sendall(b'\x01\x01')
sock.close()
yield False
return
else:
assert SOCKS_NEGOTIATION_NONE in methods
send_data = SOCKS_VERSION_SOCKS5 + SOCKS_NEGOTIATION_NONE
sock.sendall(send_data)
# Client sends where they want to go.
received_version = sock.recv(1)
command = sock.recv(1)
reserved = sock.recv(1)
addr = _address_from_socket(sock)
port = _read_exactly(sock, 2)
port = (ord(port[0:1]) << 8) + (ord(port[1:2]))
# Check some basic stuff.
assert received_version == SOCKS_VERSION_SOCKS5
assert command == b'\x01' # Only support connect, not bind.
assert reserved == b'\x00'
# Yield the address port tuple.
succeed = yield addr, port
if succeed:
# Hard-coded response for now.
response = (
SOCKS_VERSION_SOCKS5 + b'\x00\x00\x01\x7f\x00\x00\x01\xea\x60'
)
else:
# Hard-coded response for now.
response = SOCKS_VERSION_SOCKS5 + b'\x01\00'
sock.sendall(response)
yield True # Avoid StopIteration exceptions getting fired.
def handle_socks4_negotiation(sock, username=None):
"""
Handle the SOCKS4 handshake.
Returns a generator object that allows us to break the handshake into
steps so that the test code can intervene at certain useful points.
"""
received_version = sock.recv(1)
command = sock.recv(1)
port = _read_exactly(sock, 2)
port = (ord(port[0:1]) << 8) + (ord(port[1:2]))
addr = _read_exactly(sock, 4)
provided_username = _read_until(sock, b'\x00')[:-1] # Strip trailing null.
if addr == b'\x00\x00\x00\x01':
# Magic string: means DNS name.
addr = _read_until(sock, b'\x00')[:-1] # Strip trailing null.
else:
addr = socket.inet_ntoa(addr)
# Check some basic stuff.
assert received_version == SOCKS_VERSION_SOCKS4
assert command == b'\x01' # Only support connect, not bind.
if username is not None and username != provided_username:
sock.sendall(b'\x00\x5d\x00\x00\x00\x00\x00\x00')
sock.close()
yield False
return
# Yield the address port tuple.
succeed = yield addr, port
if succeed:
response = b'\x00\x5a\xea\x60\x7f\x00\x00\x01'
else:
response = b'\x00\x5b\x00\x00\x00\x00\x00\x00'
sock.sendall(response)
yield True # Avoid StopIteration exceptions getting fired.
class TestSocks5Proxy(IPV4SocketDummyServerTestCase):
"""
Test the SOCKS proxy in SOCKS5 mode.
"""
def test_basic_request(self):
def request_handler(listener):
sock = listener.accept()[0]
handler = handle_socks5_negotiation(sock, negotiate=False)
addr, port = next(handler)
self.assertEqual(addr, '16.17.18.19')
self.assertTrue(port, 80)
handler.send(True)
while True:
buf = sock.recv(65535)
if buf.endswith(b'\r\n\r\n'):
break
sock.sendall(b'HTTP/1.1 200 OK\r\n'
b'Server: SocksTestServer\r\n'
b'Content-Length: 0\r\n'
b'\r\n')
sock.close()
self._start_server(request_handler)
proxy_url = "socks5://%s:%s" % (self.host, self.port)
pm = socks.SOCKSProxyManager(proxy_url)
self.addCleanup(pm.clear)
response = pm.request('GET', 'http://16.17.18.19')
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'')
self.assertEqual(response.headers['Server'], 'SocksTestServer')
def test_local_dns(self):
def request_handler(listener):
sock = listener.accept()[0]
handler = handle_socks5_negotiation(sock, negotiate=False)
addr, port = next(handler)
self.assertIn(addr, ['127.0.0.1', '::1'])
self.assertTrue(port, 80)
handler.send(True)
while True:
buf = sock.recv(65535)
if buf.endswith(b'\r\n\r\n'):
break
sock.sendall(b'HTTP/1.1 200 OK\r\n'
b'Server: SocksTestServer\r\n'
b'Content-Length: 0\r\n'
b'\r\n')
sock.close()
self._start_server(request_handler)
proxy_url = "socks5://%s:%s" % (self.host, self.port)
pm = socks.SOCKSProxyManager(proxy_url)
response = pm.request('GET', 'http://localhost')
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'')
self.assertEqual(response.headers['Server'], 'SocksTestServer')
def test_correct_header_line(self):
def request_handler(listener):
sock = listener.accept()[0]
handler = handle_socks5_negotiation(sock, negotiate=False)
addr, port = next(handler)
self.assertEqual(addr, b'example.com')
self.assertTrue(port, 80)
handler.send(True)
buf = b''
while True:
buf += sock.recv(655
|
tundebabzy/frappe
|
frappe/model/db_query.py
|
Python
|
mit
| 21,717
| 0.027766
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from six import iteritems, string_types
"""build query for doclistview and return results"""
import frappe, json, copy, re
import frappe.defaults
import frappe.share
import frappe.permissions
from frappe.utils import flt, cint, getdate, get_datetime, get_time, make_filter_tuple, get_filter, add_to_date
from frappe import _
from frappe.model import optional_fields
from frappe.model.utils.user_settings import get_user_settings, update_user_settings
from datetime import datetime
class DatabaseQuery(object):
def __init__(self, doctype, user=None):
self.doctype = doctype
self.tables = []
self.conditions = []
self.or_conditions = []
self.fields = None
self.user = user or frappe.session.user
self.ignore_ifnull = False
self.flags = frappe._dict()
def execute(self, query=None, fields=None, filters=None, or_filters=None,
docstatus=None, group_by=None, order_by=None, limit_start=False,
limit_page_length=None, as_list=False, with_childnames=False, debug=False,
ignore_permissions=False, user=None, with_comment_count=False,
join='left join', distinct=False, start=None, page_length=None, limit=None,
ignore_ifnull=False, save_user_settings=False, save_user_settings_fields=False,
update=None, add_total_row=None, user_settings=None):
if not ignore_permissions and not frappe.has_permission(self.doctype, "read", user=user):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(frappe.bold(self.doctype))
raise frappe.PermissionError(self.do
|
ctype)
# filters and fields swappable
# its hard to remember what comes first
if (isinstance(fields, dict)
or (isinstance(fields, list) and fields and isinstance(fields[0], list))):
# if fields is given as dict/list of list, its probably filters
filters, fields = fields, filters
elif fields and isinstance(filters, list)
|
\
and len(filters) > 1 and isinstance(filters[0], string_types):
# if `filters` is a list of strings, its probably fields
filters, fields = fields, filters
if fields:
self.fields = fields
else:
self.fields = ["`tab{0}`.`name`".format(self.doctype)]
if start: limit_start = start
if page_length: limit_page_length = page_length
if limit: limit_page_length = limit
self.filters = filters or []
self.or_filters = or_filters or []
self.docstatus = docstatus or []
self.group_by = group_by
self.order_by = order_by
self.limit_start = 0 if (limit_start is False) else cint(limit_start)
self.limit_page_length = cint(limit_page_length) if limit_page_length else None
self.with_childnames = with_childnames
self.debug = debug
self.join = join
self.distinct = distinct
self.as_list = as_list
self.ignore_ifnull = ignore_ifnull
self.flags.ignore_permissions = ignore_permissions
self.user = user or frappe.session.user
self.update = update
self.user_settings_fields = copy.deepcopy(self.fields)
if user_settings:
self.user_settings = json.loads(user_settings)
if query:
result = self.run_custom_query(query)
else:
result = self.build_and_run()
if with_comment_count and not as_list and self.doctype:
self.add_comment_count(result)
if save_user_settings:
self.save_user_settings_fields = save_user_settings_fields
self.update_user_settings()
return result
def build_and_run(self):
args = self.prepare_args()
args.limit = self.add_limit()
if args.conditions:
args.conditions = "where " + args.conditions
if self.distinct:
args.fields = 'distinct ' + args.fields
query = """select %(fields)s from %(tables)s %(conditions)s
%(group_by)s %(order_by)s %(limit)s""" % args
return frappe.db.sql(query, as_dict=not self.as_list, debug=self.debug, update=self.update)
def prepare_args(self):
self.parse_args()
self.sanitize_fields()
self.extract_tables()
self.set_optional_columns()
self.build_conditions()
args = frappe._dict()
if self.with_childnames:
for t in self.tables:
if t != "`tab" + self.doctype + "`":
self.fields.append(t + ".name as '%s:name'" % t[4:-1])
# query dict
args.tables = self.tables[0]
# left join parent, child tables
for child in self.tables[1:]:
args.tables += " {join} {child} on ({child}.parent = {main}.name)".format(join=self.join,
child=child, main=self.tables[0])
if self.grouped_or_conditions:
self.conditions.append("({0})".format(" or ".join(self.grouped_or_conditions)))
args.conditions = ' and '.join(self.conditions)
if self.or_conditions:
args.conditions += (' or ' if args.conditions else "") + \
' or '.join(self.or_conditions)
self.set_field_tables()
args.fields = ', '.join(self.fields)
self.set_order_by(args)
self.validate_order_by_and_group_by(args.order_by)
args.order_by = args.order_by and (" order by " + args.order_by) or ""
self.validate_order_by_and_group_by(self.group_by)
args.group_by = self.group_by and (" group by " + self.group_by) or ""
return args
def parse_args(self):
"""Convert fields and filters from strings to list, dicts"""
if isinstance(self.fields, string_types):
if self.fields == "*":
self.fields = ["*"]
else:
try:
self.fields = json.loads(self.fields)
except ValueError:
self.fields = [f.strip() for f in self.fields.split(",")]
for filter_name in ["filters", "or_filters"]:
filters = getattr(self, filter_name)
if isinstance(filters, string_types):
filters = json.loads(filters)
if isinstance(filters, dict):
fdict = filters
filters = []
for key, value in iteritems(fdict):
filters.append(make_filter_tuple(self.doctype, key, value))
setattr(self, filter_name, filters)
def sanitize_fields(self):
'''
regex : ^.*[,();].*
purpose : The regex will look for malicious patterns like `,`, '(', ')', ';' in each
field which may leads to sql injection.
example :
field = "`DocType`.`issingle`, version()"
As field contains `,` and mysql function `version()`, with the help of regex
the system will filter out this field.
'''
regex = re.compile('^.*[,();].*')
blacklisted_keywords = ['select', 'create', 'insert', 'delete', 'drop', 'update', 'case']
blacklisted_functions = ['concat', 'concat_ws', 'if', 'ifnull', 'nullif', 'coalesce',
'connection_id', 'current_user', 'database', 'last_insert_id', 'session_user',
'system_user', 'user', 'version']
def _raise_exception():
frappe.throw(_('Cannot use sub-query or function in fields'), frappe.DataError)
for field in self.fields:
if regex.match(field):
if any(keyword in field.lower() for keyword in blacklisted_keywords):
_raise_exception()
if any("{0}(".format(keyword) in field.lower() \
for keyword in blacklisted_functions):
_raise_exception()
def extract_tables(self):
"""extract tables from fields"""
self.tables = ['`tab' + self.doctype + '`']
# add tables from fields
if self.fields:
for f in self.fields:
if ( not ("tab" in f and "." in f) ) or ("locate(" in f) or ("count(" in f):
continue
table_name = f.split('.')[0]
if table_name.lower().startswith('group_concat('):
table_name = table_name[13:]
if table_name.lower().startswith('ifnull('):
table_name = table_name[7:]
if not table_name[0]=='`':
table_name = '`' + table_name + '`'
if not table_name in self.tables:
self.append_table(table_name)
def append_table(self, table_name):
self.tables.append(table_name)
doctype = table_name[4:-1]
if (not self.flags.ignore_permissions) and (not frappe.has_permission(doctype)):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(frappe.bold(doctype))
raise frappe.PermissionError(doctype)
def set_field_tables(self):
'''If there are more than one table, the fieldname must not be ambigous.
If the fieldname is not explicitly mentioned, set the default table'''
if len(self.tables) > 1:
for i, f in enumerate(self.fields):
if '.' not in f:
self.fields[i] = '{0}.{1}'.format(self.tables[0], f)
def set_op
|
anksp21/Community-Zenpacks
|
ZenPacks.community.powerware/setup.py
|
Python
|
gpl-2.0
| 3,281
| 0.009448
|
###########################################################################
#
# This program is part of Zenoss Core, an open source monitoring platform.
# Copyright (C) 2008, Zenoss Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# For complete information please visit: http://www.zenoss.com/oss/
#
###########################################################################
################################
# These variables are overwritten by Zenoss when the ZenPack is exported
# or saved. Do not modify them directly here.
NAME = 'ZenPacks.community.powerware'
VERSION = '1.0'
AUTHOR = 'Alexander Vorobiyov'
LICENSE = ''
NAMESPACE_PACKAGES = ['ZenPacks', 'ZenPacks.community']
PACKAGES = ['ZenPacks', 'ZenPacks.community', 'ZenPacks.community.powerware']
INSTALL_REQUIRES = []
COMPAT_ZENOSS_VERS = '>=2.2'
PREV_ZENPACK_NAME = ''
# STOP_REPLACEMENTS
################################
# Zenoss will not overwrite any changes you make below here.
from setuptools import setup, find_packages
setup(
# This ZenPack metadata should usually be edited with the Zenoss
# ZenPack edit page. Whenever the edit page is submitted it will
# overwrite the values below (the ones it knows about) with new values.
name = NAME,
version = VERSION,
author = AUTHOR,
license = LICENSE,
# This is the version spec which indicates what versions of Zenoss
# this ZenPack is compatible with
compatZenossVers = COMPAT_ZENOSS_VERS,
# previousZenPackName is a facility for telling Zenoss that the name
# of this ZenPack has changed. If no ZenPack with the current name is
# installed then a zenpack of this name if installed will be upgraded.
prevZenPackName = PREV_ZENPACK_NAME,
# Indicate to setuptools which namespace packages the zenpack
# participates in
namespace_packages = NAMESPACE_PACKAGES,
# Tell setuptools what packages this zenpack provides.
packages = find_packages(),
# Tell setuptools to figure out for itself which files to include
# in the binary egg when it is built.
include_package_data = True,
# Tell setuptools what non-python files should also be included
# with the binary egg.
package_data = {
'': ['*.txt'],
'':['../COPYRIGHT.txt','../LICENSE.txt'],
NAME: ['objects/*','skins/*/*','services/*', 'reports/*/*',
'modeler/*/*', 'daemons/*', 'lib/*', 'libexec/*'],
},
# Indicate dependencies on other python modules or ZenPacks. This line
# is modified by zenoss when the ZenPack edit page is submitted. Zenoss
# tries to put add/delete the names it manages at the beginning of this
# list, so any manual additions should be added to the end. Things will
# go poorly if this line is broken into multiple lines or modified to
# dramatically.
install_requires = INSTALL_REQUIRES,
# Every ZenPack egg must define exactly one zenoss.zenpacks entry point
|
# of this form.
e
|
ntry_points = {
'zenoss.zenpacks': '%s = %s' % (NAME, NAME),
},
# All ZenPack eggs must be installed in unzipped form.
zip_safe = False,
)
|
Maethorin/pivocram
|
tests/unit/test_pivocram.py
|
Python
|
mit
| 8,498
| 0.00353
|
# -*- coding: utf-8 -*-
from tests import base
from app import pivocram
class PivocramConnetcTest(base.TestCase):
def setUp(self):
self.connect = pivocram.Connect('PIVOTAL_TEST_TOKEN')
def test_should_have_the_pivotal_api_url(self):
self.connect.PIVOTAL_URL.should.be.equal('https://www.pivotaltracker.com/services/v5')
def test_should_have_header_with_token(self):
self.connect.headers.should.be.equal({'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'})
def test_should_have_projects_url_for_list(self):
self.connect.projects_url().should.be.equal('https://www.pivotaltracker.com/services/v5/projects')
def test_should_have_projects_url_for_item(self):
self.connect.projects_url(123).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123')
def test_should_have_account_member_url(self):
self.connect.account_member_url(123, 333).should.be.equal('https://www.pivotaltracker.com/services/v5/accounts/123/memberships/333')
def test_should_have_iterations_url(self):
self.connect.iterations_url(123, 1).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/iterations/1')
def test_should_have_project_story_url(self):
self.connect.project_story_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234')
def test_should_have_project_story_tasks_url(self):
self.connect.project_story_tasks_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks')
def test_should_have_project_story_task_url(self):
self.connect.project_story_task_url(123, 1234, 12345).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks/12345')
@base.TestCase.mock.patch('app.pivocram.requests')
def test_should_make_get(self, req_mock):
response = self.mock.MagicMock()
response.json.return_value = 'req-response'
req_mock.get.return_value = response
self.connect.get('url').should.be.equal('req-response')
req_mock.get.assert_called_with('url', headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'})
@base.TestCase.mock.patch('app.pivocram.requests')
def test_should_make_put(self, req_mock):
response = self.mock.MagicMock()
response.json.return_value = 'req-response'
req_mock.put.return_value = response
self.connect.put('url', {'data': 'value'}).should.be.equal('req-response')
req_mock.put.assert_called_with('url', {'data': 'value'}, headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'})
def test_should_get_projects_list(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.projects_url = self.mock.MagicMock(return_value='url-projects')
self.connect.get_projects().should.be.equal('req-response')
self.connect.get.assert_called_with('url-projects')
def test_should_get_project(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.projects_url = self.mock.MagicMock(return_value='url-projects')
self.connect.get_project(123).should.be.equal('req-response')
self.connect.get.assert_called_with('url-projects')
self.connect.projects_url.assert_called_with(123)
def test_should_get_project_member(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.account_member_url = self.mock.MagicMock(return_value='url-project-member')
self.connect.get_account_member(123, 333).should.be.equal('req-response')
self.connect.get.assert_called_with('url-project-member')
self.connect.account_member_url.assert_called_with(123, 333)
def test_should_get_project_story_tasks(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.project_story_tasks_url = self.mock.MagicMock(return_value='url-tasks')
self.connect.get_project_story_tasks(123, 1234).should.be.equal('req-response')
self.connect.get.assert_called_with('url-tasks')
self.connect.project_story_tasks_url.assert_called_with(123, 1234)
def test_should_get_iteration_stories(self):
self.connect.get = self.mock.MagicMock(return_value='req-response')
self.connect.iterations_url = self.mock.MagicMock(return_value='url-iterations')
self.connect.get_current_iteration(123, 1).should.be.equal('req-response')
self.connect.get.assert_called_with('url-iterations')
self.connect.iterations_url.assert_called_with(123, 1)
def test_should_update_story(self):
self.connect.put = self.mock.MagicMock(return_value='req-response')
self.connect.project_story_url = self.mock.MagicMock(return_value='url-stories')
self.connect.update_story(123, 1234, {'data': 'value'}).should.be.equal('req-response')
self.connect.put.assert_called_with('url-stories', {'data': 'value'})
self.connect.project_story_url.assert_called_with(123, 1234)
def test_should_update_story_task(self):
self.connect.put = self.mock.MagicMock(return_value='req-response')
self.connect.project_story_task_url = self.mock.MagicMock(return_value='url-stories')
self.connect.update_story_task(123, 1234, 12345, {'data': 'value'}).should.be.equal('req-response')
self.connect.put.assert_called_with('url-stories', {'data': 'value'})
self.connect.project_story_task_url.assert_called_with(123, 1234, 12345)
class PivocramClientTest(base.TestCase):
project_mock = {"current_iteration_number": 1}
def setUp(self):
user = self.mock.MagicMock()
user.pivotal_token = 'PIVOTAL_TEST_TOKEN'
self.client = pivocram.Client(user, project_id='PROJECT-ID')
def test_should_have_connect_attribute(self):
self.assertTrue(isinstance(self.client.connect, pivocram.Connect))
def test_should_be_create_with_project_id(self):
self.client.project_id.should.be.equal('PROJECT-ID')
def test_should_have_property_list_stories(self):
self.client._current_iteration = 'CURRENT'
self.client.current_iteration.should.be.equal('CURRENT')
def test_should_have_method_to_get_story(self):
self.client.get_story('STORY-ID').should.be.equal(None)
def test_should_have_method_to_list_story_tasks(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_project_story_tasks.return_value = [1, 2, 3]
self.client.get_story_tasks('STORY-ID').should.be.equal([1, 2, 3])
self.client.connect.get_project_story_tasks.assert_called_with('PROJECT-ID', 'STORY-ID')
def test_should_have_method_to_get_story_task(self):
self.client.get_story_task('STORY-ID', 'TASKS-ID').should.be.equal(None)
def test_should_get_projects(self):
|
self.client.connect = self.mock.MagicMock()
self.client.connect.get_projects.return_value = [1, 2, 3]
self.client.get_projects().should.be.equal([1, 2, 3])
def test_should_get_empty_if_no_projects(self):
self.client.conne
|
ct = self.mock.MagicMock()
self.client.connect.get_projects.return_value = []
self.client.get_projects().should.be.equal([])
def test_should_set_current_iteration(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_project.return_value = self.project_mock
self.client._current_iteration_number = None
self.client.current_iteration_number.should.be.equal(1)
self.client.connect.get_project.assert_called_with('PROJECT-ID')
def test_should_get_current_stories(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.get_current_iteration.return_value = {'stories': [1, 2, 3]}
self.client.current_iteration.should.be.equal({'stories': [1, 2, 3]})
def test_should_update_story(self):
self.client.connect = self.mock.MagicMock()
self.client.connect.update_story.return_value = {'id': 1234}
self.client.update_story(1234, {'data'
|
robobrobro/coffer
|
coffer/command/commands/__init__.py
|
Python
|
mit
| 53
| 0
|
""" Functions and cla
|
sses dealing with commands. """
|
|
mitdbg/modeldb
|
client/verta/verta/_swagger/_public/modeldb/model/ModeldbFindHydratedProjectsByTeam.py
|
Python
|
mit
| 1,079
| 0.012048
|
# THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class ModeldbFindHydratedProjectsByTeam(BaseType):
def __init__(self, find_projects=None, org_id=None, name=None, id=None):
required =
|
{
"find_projects": False,
"org_id": False,
"name": False,
"id": False,
}
self.find_projects = find_projects
self.org_id = org_id
self.name = name
self.id = id
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
from .ModeldbFindProjects import ModeldbFindProjects
tmp = d.get('find_projects', None)
if tmp is not None
|
:
d['find_projects'] = ModeldbFindProjects.from_json(tmp)
tmp = d.get('org_id', None)
if tmp is not None:
d['org_id'] = tmp
tmp = d.get('name', None)
if tmp is not None:
d['name'] = tmp
tmp = d.get('id', None)
if tmp is not None:
d['id'] = tmp
return ModeldbFindHydratedProjectsByTeam(**d)
|
laborautonomo/bitmask_client
|
src/leap/bitmask/services/eip/darwinvpnlauncher.py
|
Python
|
gpl-3.0
| 6,729
| 0
|
# -*- coding: utf-8 -*-
# darwinvpnlauncher.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Darwin VPN launcher implementation.
"""
import commands
import getpass
import logging
import os
import sys
from leap.bitmask.services.eip.vpnlauncher import VPNLauncher
from leap.bitmask.services.eip.vpnlauncher import VPNLauncherException
from leap.bitmask.util import get_path_prefix
logger = logging.getLogger(__name__)
class EIPNoTunKextLoaded(VPNLauncherException):
pass
class DarwinVPNLauncher(VPNLauncher):
"""
VPN launcher for the Darwin Platform
"""
COCOASUDO = "cocoasudo"
# XXX need the good old magic translate for these strings
# (look for magic in 0.2.0 release)
SUDO_MSG = ("Bitmask needs administrative privileges to run "
"Encrypted Internet.")
INSTALL_MSG = ("\"Bitmask needs administrative privileges to install "
"missing scripts and fix permissions.\"")
# Hardcode the installation path for OSX for security, openvpn is
# run as root
INSTALL
|
_PATH = "/Applications/Bitmask.app/"
INSTALL_PATH_ESCAPED = os.path.realpath(os.getcwd() + "/../../")
OPENVPN_BIN = 'openvpn.leap'
OPENVPN_PATH = "%s/Contents/Resources/openvpn" % (INSTALL_PATH,)
OPENVPN_PATH_ESCAPED = "%s/Contents/Resources/openvpn" % (
INSTALL_PATH_ESCAPED,)
OPENVPN_BIN_PATH = "%s/Contents/Resources/%s" % (INSTALL_PATH,
OPENVPN_BIN)
UP_SCRIPT = "%s/client.up.sh" % (OPENVPN_PATH,)
DOWN_SCRIPT = "%
|
s/client.down.sh" % (OPENVPN_PATH,)
OPENVPN_DOWN_PLUGIN = '%s/openvpn-down-root.so' % (OPENVPN_PATH,)
UPDOWN_FILES = (UP_SCRIPT, DOWN_SCRIPT, OPENVPN_DOWN_PLUGIN)
OTHER_FILES = []
@classmethod
def cmd_for_missing_scripts(kls, frompath):
"""
Returns a command that can copy the missing scripts.
:rtype: str
"""
to = kls.OPENVPN_PATH_ESCAPED
cmd = "#!/bin/sh\n"
cmd += "mkdir -p {0}\n".format(to)
cmd += "cp '{0}'/* {1}\n".format(frompath, to)
cmd += "chmod 744 {0}/*".format(to)
return cmd
@classmethod
def is_kext_loaded(kls):
"""
Checks if the needed kext is loaded before launching openvpn.
:returns: True if kext is loaded, False otherwise.
:rtype: bool
"""
return bool(commands.getoutput('kextstat | grep "leap.tun"'))
@classmethod
def _get_icon_path(kls):
"""
Returns the absolute path to the app icon.
:rtype: str
"""
resources_path = os.path.abspath(
os.path.join(os.getcwd(), "../../Contents/Resources"))
return os.path.join(resources_path, "bitmask.tiff")
@classmethod
def get_cocoasudo_ovpn_cmd(kls):
"""
Returns a string with the cocoasudo command needed to run openvpn
as admin with a nice password prompt. The actual command needs to be
appended.
:rtype: (str, list)
"""
# TODO add translation support for this
sudo_msg = ("Bitmask needs administrative privileges to run "
"Encrypted Internet.")
iconpath = kls._get_icon_path()
has_icon = os.path.isfile(iconpath)
args = ["--icon=%s" % iconpath] if has_icon else []
args.append("--prompt=%s" % (sudo_msg,))
return kls.COCOASUDO, args
@classmethod
def get_cocoasudo_installmissing_cmd(kls):
"""
Returns a string with the cocoasudo command needed to install missing
files as admin with a nice password prompt. The actual command needs to
be appended.
:rtype: (str, list)
"""
# TODO add translation support for this
install_msg = ('"Bitmask needs administrative privileges to install '
'missing scripts and fix permissions."')
iconpath = kls._get_icon_path()
has_icon = os.path.isfile(iconpath)
args = ["--icon=%s" % iconpath] if has_icon else []
args.append("--prompt=%s" % (install_msg,))
return kls.COCOASUDO, args
@classmethod
def get_vpn_command(kls, eipconfig, providerconfig, socket_host,
socket_port="unix", openvpn_verb=1):
"""
Returns the OSX implementation for the vpn launching command.
Might raise:
EIPNoTunKextLoaded,
OpenVPNNotFoundException,
VPNLauncherException.
:param eipconfig: eip configuration object
:type eipconfig: EIPConfig
:param providerconfig: provider specific configuration
:type providerconfig: ProviderConfig
:param socket_host: either socket path (unix) or socket IP
:type socket_host: str
:param socket_port: either string "unix" if it's a unix socket,
or port otherwise
:type socket_port: str
:param openvpn_verb: the openvpn verbosity wanted
:type openvpn_verb: int
:return: A VPN command ready to be launched.
:rtype: list
"""
if not kls.is_kext_loaded():
raise EIPNoTunKextLoaded
# we use `super` in order to send the class to use
command = super(DarwinVPNLauncher, kls).get_vpn_command(
eipconfig, providerconfig, socket_host, socket_port, openvpn_verb)
cocoa, cargs = kls.get_cocoasudo_ovpn_cmd()
cargs.extend(command)
command = cargs
command.insert(0, cocoa)
command.extend(['--setenv', "LEAPUSER", getpass.getuser()])
return command
@classmethod
def get_vpn_env(kls):
"""
Returns a dictionary with the custom env for the platform.
This is mainly used for setting LD_LIBRARY_PATH to the correct
path when distributing a standalone client
:rtype: dict
"""
ld_library_path = os.path.join(get_path_prefix(), "..", "lib")
ld_library_path.encode(sys.getfilesystemencoding())
return {
"DYLD_LIBRARY_PATH": ld_library_path
}
|
mindbody/API-Examples
|
SDKs/Python/test/test_client_api.py
|
Python
|
bsd-2-clause
| 5,280
| 0
|
# coding: utf-8
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.api.client_api import ClientApi # noqa: E501
from swagger_client.rest import ApiException
class TestClientApi(unittest.TestCase):
"""ClientApi unit test stubs"""
def setUp(self):
self.api = swagger_client.api.client_api.ClientApi() # noqa: E501
def tearDown(self):
pass
def test_client_add_arrival(self):
"""Test case for client_add_arrival
Add an arrival for a client. # noqa: E501
"""
pass
def test_client_add_client(self):
"""Test case for client_add_client
Add a client to a site. # noqa: E501
"""
pass
def test_client_add_contact_log(self):
"""Test case for client_add_contact_log
Add a contact log to a client's account. # noqa: E501
"""
pass
def test_client_get_active_client_memberships(self):
"""Test case for client_get_active_client_memberships
Get a client's active memberships. # noqa: E501
"""
pass
def test_client_get_client_account_balances(self):
"""Test case for client_get_client_account_balances
Get account balance information for one or more client(s). # noqa: E501
"""
pass
def test_client_get_client_contracts(self):
"""Test case for client_get_client_contracts
Get contracts that a client has purchased. # noqa: E501
"""
pass
def test_client_get_client_formula_notes(self):
|
"""Test case for client_get_client_formula_notes
Get a client's formula notes. # noqa: E501
"""
pass
def test_client_get_client_indexes(self):
"""Test case for client_get_client_indexes
Get a site's configured client indexes and client index values.
|
# noqa: E501
"""
pass
def test_client_get_client_purchases(self):
"""Test case for client_get_client_purchases
Get a client's purchase history. # noqa: E501
"""
pass
def test_client_get_client_referral_types(self):
"""Test case for client_get_client_referral_types
Get a site's configured client referral types. # noqa: E501
"""
pass
def test_client_get_client_services(self):
"""Test case for client_get_client_services
Get pricing options that a client has purchased. # noqa: E501
"""
pass
def test_client_get_client_visits(self):
"""Test case for client_get_client_visits
Get a client's visit history. # noqa: E501
"""
pass
def test_client_get_clients(self):
"""Test case for client_get_clients
Get clients. # noqa: E501
"""
pass
def test_client_get_contact_logs(self):
"""Test case for client_get_contact_logs
Get contact logs on a client's account. # noqa: E501
"""
pass
def test_client_get_cross_regional_client_associations(self):
"""Test case for client_get_cross_regional_client_associations
Get a client's cross regional site associations. # noqa: E501
"""
pass
def test_client_get_custom_client_fields(self):
"""Test case for client_get_custom_client_fields
Get a site's configured custom client fields. # noqa: E501
"""
pass
def test_client_get_required_client_fields(self):
"""Test case for client_get_required_client_fields
Get client required fields for a site. # noqa: E501
"""
pass
def test_client_send_password_reset_email(self):
"""Test case for client_send_password_reset_email
Send a password reset email to a client. # noqa: E501
"""
pass
def test_client_update_client(self):
"""Test case for client_update_client
Update a client at a site. # noqa: E501
"""
pass
def test_client_update_client_service(self):
"""Test case for client_update_client_service
Update a client's purchase pricing option. # noqa: E501
"""
pass
def test_client_update_client_visit(self):
"""Test case for client_update_client_visit
Update a client's visit. # noqa: E501
"""
pass
def test_client_update_contact_log(self):
"""Test case for client_update_contact_log
Update a contact log on a client's account. # noqa: E501
"""
pass
def test_client_upload_client_document(self):
"""Test case for client_upload_client_document
Upload a document to a client's profile. # noqa: E501
"""
pass
def test_client_upload_client_photo(self):
"""Test case for client_upload_client_photo
Upload a profile photo to a client's profile. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
agry/NGECore2
|
scripts/houses/player_house_corellia_large_style_02.py
|
Python
|
lgpl-3.0
| 706
| 0.022663
|
import sys
from services.housing import HouseTemplate
from engine.resources.scene import Point3D
def setup(housingTemplates):
houseTemplate = HouseTemplate("object/tangible/deed/player_house_deed/shared_corellia_house_large_style_02_deed.
|
iff", "object/building/player/shared_player_house_generic_large_style_02.iff", 5)
houseTemplate.addBuildingSign("object/tangible/sign/player/shared_house_address.iff", Point3D(float(-13.4), float(3), float(9.1)))
houseTemplate.addPlaceablePlanet("corellia")
houseTemplate.addPlaceab
|
lePlanet("talus")
houseTemplate.setDefaultItemLimit(500)
houseTemplate.setBaseMaintenanceRate(26)
housingTemplates.put(houseTemplate.getDeedTemplate(), houseTemplate)
return
|
MiniLight/DeepCL
|
python/benchmarking/deepcl_benchmark2.py
|
Python
|
mpl-2.0
| 11,420
| 0.009632
|
#!/usr/bin/python
# This was originally created to target inclusion in soumith's benchmarks as
# https://github.com/soumith/convnet-benchmarks
# extending this to handle also a couple of clarke and storkey type layers
# and some plausible mnist layers
from __future__ import print_function
import os
import sys
import time
import numpy as np
import random
import json
import subprocess
import PyDeepCL
cmd_line = 'cd python; python setup.py build_ext -i; PYTHONPATH=. python'
for arg in sys.argv:
cmd_line += ' ' + arg
print('cmd_line: [' + cmd_line + ']')
# benchmarking/deepcl_benchmark2.py'
num_epochs = 10
batch_size = 128 # always use this, seems pretty standard
runs = [
# format for single layer: ('[label]','[inputplanes]i[inputsize]-[numfilters]c[filtersize]', 'layer')
# format for full net: ('[label]', '[netdef]', 'fullnet')
('soumith1', '3i128-96c11', 'layer'),
('soumith2', '64i64-128c9', 'layer'),
('soumith3', '128i32-128c9', 'layer'),
('soumith4', '128i16-128c7', 'layer'),
('soumith5', '384i13-384c3', 'layer'),
('maddison-convolve', '128i19-128c3', 'layer'),
('maddison-fc', '128i19-361n', 'layer'),
('mnist-c1', '1i28-8c5', 'layer'),
('mnist-c2', '8i14-16c5', 'layer'),
('mnist-fc', '16i7-150n', 'layer'),
('mnist-full', '1i24-8c5z-relu-mp2-16c5z-relu-mp3-150n-tanh-10n', 'fullnet'),
('mnist-full-factorized', '1i24-8c5z-relu-mp2-16c5z-relu-mp3-150n-tanh-10n', 'fullnet'),
('maddison-full', '8i19-12*128c3z-relu-361n', 'fullnet'),
('maddison-full-factorized', '8i19-12*(128c3z-relu)-361n', 'fullnet')
]
def write_results( label, net_string, layer, benchmark_type, direction, time_ms ):
global cmd_line
results_dict = {}
results_dict['label'] = label
|
results_dict['type'] = benchmark_type
results_dict['format'] = 'v0.4'
results_dict['direction'] = direction
results_dict['net_string'] = net_string
if layer is
|
not None:
results_dict['layer_string'] = layer.asString()
results_dict['time_ms'] = str(time_ms)
results_dict['cmd_line'] = cmd_line
f = open('results.txt', 'a')
json.dump(results_dict, f)
f.write( '\n' )
f.close()
def time_layer(num_epochs, label, batch_size, net_string):
print('building network...')
input_string, layer_string = net_string.split('-')
input_planes, input_size = map(lambda x: int(x), input_string.split('i'))
cl = PyDeepCL.DeepCL()
net = PyDeepCL.NeuralNet( cl, input_planes, input_size )
net.addLayer( PyDeepCL.ForceBackpropMaker() ) # this forces the next layer to backward gradients to
# this layer
print( net.asString() )
if 'c' in layer_string:
num_filters, filter_size = map(lambda x: int(x), layer_string.split('c'))
net.addLayer( PyDeepCL.ConvolutionalMaker().numFilters(num_filters)
.filterSize(filter_size).biased() )
elif 'n' in layer_string:
num_neurons = int(layer_string.split('n')[0])
net.addLayer( PyDeepCL.FullyConnectedMaker().numPlanes(num_neurons).imageSize(1).biased() )
else:
raise Exception('layer_string {layer_string} not recognized'.format(
layer_string=layer_string))
print( net.asString() )
net.addLayer( PyDeepCL.FullyConnectedMaker().numPlanes(1).imageSize(1) )
net.addLayer( PyDeepCL.SoftMaxMaker() )
print( net.asString() )
images = np.zeros((batch_size, input_planes, input_size, input_size), dtype=np.float32)
images[:] = np.random.uniform(-0.5, 0.5, images.shape)
labels = np.zeros((batch_size,), dtype=np.int32)
print('warming up...')
#try:
net.setBatchSize(batch_size)
# warm up forward
for i in range(9):
last = time.time()
net.forward( images )
now = time.time()
print(' warm up forward all-layer time', ( now - last ) * 1000, 'ms' )
last = now
net.backwardFromLabels(labels)
now = time.time()
print(' warm up backward all-layer time', (now - last) * 1000, 'ms' )
last = now
layer = net.getLayer(2)
print('running forward prop timings:')
for i in range(num_epochs):
layer.forward()
now = time.time()
print('forward layer total time', ( now - last) * 1000, 'ms' )
print('forward layer average time', ( now - last ) * 1000 / float(num_epochs), 'ms' )
# forward_time_per_layer_ms = ( now - last ) / float(num_epochs) * 1000
# writeResults( label + ', ' + net_string + ', ' + layer.asString() + ', forward=' + str( ( now - last ) / float(num_epochs) * 1000 ) + 'ms' )
write_results( label=label, net_string=net_string, layer=layer, direction='forward',
benchmark_type='layer', time_ms=( now - last ) / float(num_epochs) * 1000 )
print('warm up backwards again')
layer.backward()
layer.backward()
print('warm up backwards done. start timings:')
now = time.time()
last = now
for i in range(num_epochs):
layer.backward()
now = time.time()
print('backward layer total time', (now - last)*1000, 'ms' )
print('backward layer average time', ( now - last ) * 1000 / float(num_epochs), 'ms' )
# writeResults( label + ', ' + net_string + ', ' + layer.asString() + ', backward=' + str( ( now - last ) / float(num_epochs) * 1000 ) + 'ms' )
write_results( label=label, net_string=net_string, layer=layer,
direction='backward', benchmark_type='layer', time_ms=( now - last ) / float(num_epochs) * 1000 )
last = now
def time_fullnet(num_epochs, label, batch_size, net_string):
print('building network...')
split_net_string = net_string.split('-')
input_string = split_net_string[0]
netdef = '-'.join(split_net_string[1:])
input_planes, input_size = map(lambda x: int(x), input_string.split('i'))
cl = PyDeepCL.DeepCL()
net = PyDeepCL.NeuralNet(cl, input_planes, input_size )
PyDeepCL.NetdefToNet.createNetFromNetdef(net, netdef)
print( net.asString() )
images = np.zeros((batch_size, input_planes, input_size, input_size), dtype=np.float32)
images[:] = np.random.uniform(-0.5, 0.5, images.shape)
labels = np.zeros((batch_size,), dtype=np.int32)
print('warming up...')
#try:
net.setBatchSize(batch_size)
# warm up forward
for i in range(8):
last = time.time()
net.forward( images )
now = time.time()
print(' warm up forward all-layer time', (now - last)*1000.0, 'ms')
last = now
print('warming up backward:')
last = time.time()
net.backwardFromLabels(labels)
now = time.time()
print(' warm up backward time', (now - last) * 1000, 'ms' )
last = now
net.backwardFromLabels(labels)
now = time.time()
print(' warm up backward time', (now - last) * 1000, 'ms' )
total_forward = 0
total_backward = 0
last = time.time()
num_epochs = 0
while total_forward < 1000 or total_backward < 0: # make sure collect suffiicnet timing
# for epoch in range(num_epochs):
print('epoch {epoch}'.format(epoch=num_epochs+1))
print('run forward for real...')
# last = time.time()
net.forward(images)
now = time.time()
diff = now - last
forward_ms = diff * 1000.0
total_forward += forward_ms
print('forward time: {forward_ms}ms'.format(
forward_ms=forward_ms))
last = now
print('backward for real:')
# last = time.time()
net.backwardFromLabels(labels)
now = time.time()
diff = now - last
backward_ms = diff * 1000.0
total_backward += backward_ms
print('backward time: {backward_ms}ms'.format(
backward_ms=backward_ms))
last = now
num_epochs += 1
print('num_epochs: {num_epochs}'.format(num_epochs=num_epochs))
average_forward = total_forward / num_epochs
average_backward = total_backward / num_epochs
print('average forward time: {forward_ms}ms'.format(
forward_ms=average_forward))
print('average backward time: {backward_ms}ms'.format(
backward_ms=average_backward))
write_results( label=label, net_st
|
stxnext-csr/volontulo
|
apps/volontulo/views/organizations.py
|
Python
|
mit
| 6,645
| 0
|
# -*- coding: utf-8 -*-
u"""
.. module:: organizations
"""
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.shortcuts import render
from django.utils.text import slugify
from django.views.generic import View
from apps.volontulo.forms import VolounteerToOrganizationContactForm
from apps.volontulo.lib.email import send_mail
from apps.volontulo.models import Offer
from apps.volontulo.models import Organization
from apps.volontulo.models import UserProfile
from apps.volontulo.utils import correct_slug
def organizations_list(request):
u"""View responsible for listing all organizations.
:param request: WSGIRequest instance
"""
organizations = Organization.objects.all()
return render(
request,
"organizations/list.html",
{'organizations': organizations},
)
class OrganizationsCreate(View):
u"""Class view supporting creation of new organization."""
@staticmethod
@login_required
def get(request):
u"""Method responsible for rendering form for new organization."""
return render(
request,
"organizations/organization_form.html",
{'organization': Organization()}
)
@staticmethod
@login_required
def post(request):
u"""Method responsible for saving new organization."""
if not (
request.POST.get('name') and
request.POST.get('address') and
request.POST.get('description')
):
messages.error(
request,
u"Należy wypełnić wszystkie pola formularza."
)
return render(
request,
"organizations/organization_form.html",
{'organization': Organization()}
)
organization = Organization(
name=request.POST.get('name'),
address=request.POST.get('address'),
description=request.POST.get('description'),
)
organization.save()
request.user.userprofile.organizations.add(organization)
messages.success(
request,
u"Organizacja została dodana."
)
return redirect(
'organization_view',
slug=slugify(organization.name),
id_=organization.id,
)
@correct_slug(Organization, 'organization_form', 'name')
@login_required
def organization_form(request, slug, id_): # pylint: disable=unused-argument
u"""View responsible for editing organization.
Edition will only work, if logged user has been registered as organization.
"""
org = Organization.objects.get(pk=id_)
users = [profile.user.email for profile in org.userprofiles.all()]
if (
request.user.is_authenticated() and
request.user.email not in users
):
messages.error(
request,
u'Nie masz uprawnień do edycji tej organizacji.'
)
return redirect(
reverse(
'organization_view',
args=[slugify(org.name), org.id]
)
)
if not (
request.user.is_authenticated() and
UserProfile.objects.get(user=request.user).organizations
):
return redirect('homepage')
if request.method == 'POST':
if (
request.POST.get('name') and
request.POST.get('address') and
request.POST.get('description')
):
org.name = request.POST.get('name')
org.address = request.POST.get('address')
org.description = request.POST.get('description')
org.save()
messages.success(
request,
u'Oferta została dodana/zmieniona.'
)
return redirect(
reverse(
'organization_view',
args=[slugify(org.name), org.id]
)
)
else:
messages.error(
request,
u"Należy wypełnić wszystkie pola formularza."
)
return render(
request,
"organizations/organization_form.html",
{'organization': org},
)
@correct_slug(Organization, 'organization_view', 'name')
def organization_view(request, slug, id_): # pylint: disable=unused-argument
u"""View responsible for viewing organization."""
org = get_object_or_404(Organization, id=id_)
offers = Offer.objects.filter(organization_id=id_)
allow_contact = True
allow_edit = False
allow_offer_create = False
if (
request.user.is_authenticated() and
request.user.userprofile in org.userprofiles.all()
):
allow_contact = False
allow_edit = True
allow_offer_create = True
if request.method == 'POST':
form = VolounteerToOrganizationContactForm(request.POST)
if form.is_valid():
# send email to first organization user (I assume it's main user)
profile = Organization.objects.get(id=id_).userprofiles.all()[0]
send_mail(
request,
'volunteer_to_organisation',
[
profile.user.email,
request.POST.get('email'),
],
{k: v for k, v in request.POST.items()},
)
messages.success(request, u'Email został wysłany.')
else:
messages.error(
request,
u"Formularz zawiera nieprawidłowe dane: {}".format(form.errors)
)
return render(
request,
"organizations/organization_view.html",
|
{
'organization': org,
'contact_form': form,
|
'offers': offers,
'allow_contact': allow_contact,
'allow_edit': allow_edit,
'allow_offer_create': allow_offer_create,
},
)
return render(
request,
"organizations/organization_view.html",
{
'organization': org,
'contact_form': VolounteerToOrganizationContactForm(),
'offers': offers,
'allow_contact': allow_contact,
'allow_edit': allow_edit,
'allow_offer_create': allow_offer_create,
}
)
|
emitrom/integra-openstack-ui
|
workflows/post/post.py
|
Python
|
apache-2.0
| 151
| 0
|
class Post(obje
|
ct):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __iter__(s
|
elf):
return iter(self.__dict__)
|
SaptakS/open-event-orga-server
|
app/api/attendees.py
|
Python
|
gpl-3.0
| 2,552
| 0.002351
|
from flask.ext.restplus import Namespace
from app.api.tickets import ORDER, TICKET
from app.helpers.ticketing import TicketingManager
from app.api.helpers.helpers import (
requires_auth,
can_access, replace_event_id)
from app.api.helpers.utils import POST_RESPONSES
from app.api.helpers.utils import Resource
from app.api.helpers import custom_fields as fields
api = Namespace('attendees', description='Attendees', path='/')
ATTENDEE = api.model('TicketHolder', {
'id': fields.Integer(),
'firstname': fields.String(),
'lastname': fields.String(),
'email': fields.Email(),
'checked_in': fields.Boolean(),
'order': fields.Nested(ORDER, allow_null=False),
'ticket': fields.Nested(TICKET, allow_null=False)
})
@api.route('/events/<string:event_id>/attendees/')
class AttendeesList(Resource):
@requires_auth
@replace_event_id
@can_access
@api.doc('check_in_toggle', responses=POST_RESPONSES)
@api.marshal_list_with(ATTENDEE)
def get(self, event_id):
"""Get attendees of the event"""
return TicketingManager.get_attendees(event_id)
@api.route('/events/<string:event_id>/attendees/check_in_toggle/<holder_identifier>')
class AttendeeCheckInToggle(Resource):
@requires_auth
@replace_event_id
@can_access
@api.doc('chec
|
k_in_toggle', responses=POST_RESPONSES)
@api.marshal_with(ATTENDEE)
def post(self, event_id, holder_identifier):
"""Toggle and Attendee's Checked in State"""
holder = TicketingManager.attendee_check_in_out(event_id, holder_identifier)
return holder, 200
@api.route('/events/<string:event_id>/attendees/check_in_toggle/<holder_identifier>/check_in')
c
|
lass AttendeeCheckIn(Resource):
@requires_auth
@replace_event_id
@can_access
@api.doc('check_in_toggle', responses=POST_RESPONSES)
@api.marshal_with(ATTENDEE)
def post(self, event_id, holder_identifier):
"""Check in attendee"""
holder = TicketingManager.attendee_check_in_out(event_id, holder_identifier, True)
return holder, 200
@api.route('/events/<string:event_id>/attendees/check_in_toggle/<holder_identifier>/check_out')
class AttendeeCheckOut(Resource):
@requires_auth
@replace_event_id
@can_access
@api.doc('check_in_toggle', responses=POST_RESPONSES)
@api.marshal_with(ATTENDEE)
def post(self, event_id, holder_identifier):
"""Check out attendee"""
holder = TicketingManager.attendee_check_in_out(event_id, holder_identifier, False)
return holder, 200
|
DePierre/owtf
|
framework/db/command_register.py
|
Python
|
bsd-3-clause
| 2,347
| 0.001704
|
#!/usr/bin/env python
'''
Component to handle data storage and search of all commands run
'''
from framework.dependency_management.dependency_resolver import BaseComponent
from framework.dependency_management.interfaces import CommandRegisterInterface
from framework.lib.general import cprint
from framework.db import models
from framework.db.target_manager import target_required
class CommandRegister(BaseComponent, CommandRegisterInterface):
COMPONENT_NAME = "command_register"
def __init__(self):
self.register_in_service_locator()
self.config = self.get_component("config")
|
self.db = self.get_component("db")
self.plugin_output = None
self.target = None
def init(sel
|
f):
self.target = self.get_component("target")
self.plugin_output = self.get_component("plugin_output")
def AddCommand(self, Command):
self.db.session.merge(models.Command(
start_time=Command['Start'],
end_time=Command['End'],
success=Command['Success'],
target_id=Command['Target'],
plugin_key=Command['PluginKey'],
modified_command=Command['ModifiedCommand'].strip(),
original_command=Command['OriginalCommand'].strip()
))
self.db.session.commit()
def DeleteCommand(self, Command):
command_obj = self.db.session.query(models.Command).get(Command)
self.db.session.delete(command_obj)
self.db.session.commit()
@target_required
def CommandAlreadyRegistered(self, original_command, target_id=None):
register_entry = self.db.session.query(models.Command).get(original_command)
if register_entry:
# If the command was completed and the plugin output to which it
# is referring exists
if register_entry.success:
if self.plugin_output.PluginOutputExists(register_entry.plugin_key, register_entry.target_id):
return self.target.GetTargetURLForID(register_entry.target_id)
else:
self.DeleteCommand(original_command)
return None
else: # Command failed
self.DeleteCommand(original_command)
return self.target.GetTargetURLForID(register_entry.target_id)
return None
|
Christoph/tag-connect
|
keyvis_add/ml.py
|
Python
|
mit
| 2,394
| 0.002924
|
import numpy as np
import keras
from keras.datasets import mnist
from keras.models import Model, Sequential
from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D
from keras.layers import Flatten, Reshape
from keras import regularizers
from plotly import offline as py
import plotly.graph_objs as go
from plotly import tools
py.init_notebook_mode()
# Loads the training and test data sets (ignoring class labels)
(x_train, _), (x_test, _) = mnist.load_data()
# Scales the training and test data to range between 0 and 1.
max_value = float(x_train.max())
x_train = x_train.astype('float32') / max_value
x_test = x_test.astype('float32') / max_value
# Reshape
x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:])))
x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:])))
# x_train.shape
# Autoencoder
input_dim = x_train.shape[1]
encoding_dim = 32
compression_factor = float(input_dim) / encoding_dim
print("Compression factor: %s" % compression_factor)
autoencoder = Sequential()
autoencoder.add(
Dense(encoding_dim, input_shape=(input_dim,), activation='relu')
)
autoencoder.add(
Dense(input_dim, activation='sigmoid')
)
autoencoder.summary()
input_img = Input(shape=(input_dim,))
encoder_layer = autoencoder.layers[0]
encoder = Model(input_img, encoder_layer(input_img))
encoder.summary()
autoencoder.compile(optimizer='adam', loss='binary_crossentropy')
autoencoder.fit(x_train, x_train,
epochs=50,
batch_size=256,
shuffle=True,
validation_data=(x_test, x_test))
num_images = 10
np.random.seed(42)
random_test_images = np.random.randint(x_test.shape[0], size=num_images)
encoded_imgs = encoder.predict(x_test)
decoded_imgs = autoencoder.predict(x_test)
encoded_imgs[0]
decoded_imgs[0]
fig = tools.
|
make_subplots(rows=1, cols=3, print_grid=False)
t1 = go.Heatmap(z=x_test[random_test_images[0]].reshape(28, 28), showscale=False)
fig.append_trace(t1, 1, 1)
# fig.append_trace(trace2, 1, 2)
# fig.append_trace(trace3, 1, 3)
for i in map(str,range(1, 4)):
y = 'yaxis'+ i
x = 'xaxis' + i
fig['layout'][y].update(autorange='reversed',
showticklabels=False, ticks='', scaleancho
|
r = 'x')
fig['layout'][x].update(showticklabels=False, ticks='')
fig['layout'].update(height=600)
py.iplot(fig)
|
tomjelinek/pcs
|
pcs/common/resource_agent/const.py
|
Python
|
gpl-2.0
| 285
| 0
|
# OCF 1.0 doesn't define unique groups, they are defined since OCF 1.1. Pcs
# transforms OCF 1.0 agents to OCF 1.1 structur
|
e and t
|
herefore needs to create
# a group name for OCF 1.0 unique attrs. The name is: {this_prefix}{attr_name}
DEFAULT_UNIQUE_GROUP_PREFIX = "_pcs_unique_group_"
|
kayhayen/Nuitka
|
tests/programs/absolute_import/foobar/foobar.py
|
Python
|
apache-2.0
| 1,043
| 0
|
# Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python te
|
sts originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed u
|
nder the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Using absolute import, do from module imports.
"""
from __future__ import absolute_import, print_function
from foobar import util
from . import local # pylint: disable=unused-import
class Foobar(object):
def __init__(self):
print(util.someFunction())
|
gkarakou/systemd-denotify
|
denotify/mailer.py
|
Python
|
gpl-3.0
| 8,509
| 0.007051
|
#!/usr/bin/python2
import threading
from systemd import journal
from threading import Thread
import smtplib
import email.utils
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
class Mailer(threading.Thread):
"""
Mailer
:desc: Class that sends an email
Extends Thread
"""
def __init__(self):
"""
__init__
:desc: Constructor function that calls parent
"""
Thread.__init__(self)
def run(self, stri, dictio):
"""
run
:desc : Function that does the heavy lifting
:params : The string to be mailed and a dict
containing config options necessary for the mail to be delivered.
"""
diction
|
ary = dictio
msg = MIMEMultipart("alternative")
#get it from th
|
e queue?
stripped = stri.strip()
part1 = MIMEText(stripped, "plain")
msg['Subject'] = dictionary['email_subject']
#http://pymotw.com/2/smtplib/
msg['To'] = email.utils.formataddr(('Recipient', dictionary['email_to']))
msg['From'] = email.utils.formataddr((dictionary['email_from'], dictionary['email_from']))
msg.attach(part1)
if dictionary['smtp'] == True:
# no auth
if dictionary['auth'] == False:
s = smtplib.SMTP()
s.connect(host=str(dictionary['smtp_host']), port=dictionary['smtp_port'])
try:
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
# auth
elif dictionary['auth'] == True:
s = smtplib.SMTP()
s.connect(host=str(dictionary['smtp_host']), port=dictionary['smtp_port'])
s.login(str(dictionary['auth_user']), str(dictionary['auth_password']))
try:
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string().strip())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
else:
pass
#smtps
if dictionary['smtps'] == True:
# no auth ?
if dictionary['auth'] == False:
try:
if len(dictionary['smtps_cert']) > 0 and len(dictionary['smtps_key']) > 0:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'], keyfile=dictionary['smtps_key'], certfile=dictionary['smtps_cert'])
s.ehlo_or_helo_if_needed()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'])
s.ehlo_or_helo_if_needed()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
# auth
elif dictionary['auth'] == True:
try:
#check whether it is a real file and pem encoded
if len(dictionary['smtps_cert']) > 0 and len(dictionary['smtps_key']) > 0:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'], keyfile=dictionary['smtps_key'], certfile=dictionary['smtps_cert'])
s.ehlo_or_helo_if_needed()
s.login(dictionary['auth_user'], dictionary['auth_password'])
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'])
s.ehlo_or_helo_if_needed()
s.login(dictionary['auth_user'], dictionary['auth_password'])
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
else:
pass
#starttls
if dictionary['starttls'] == True:
# no auth
if dictionary['auth'] == False:
try:
s = smtplib.SMTP()
s.connect(host=str(dictionary['starttls_host']), port=dictionary['starttls_port'])
s.ehlo()
#http://pymotw.com/2/smtplib/
if s.has_extn("STARTTLS"):
#check whether it is a real file and pem encoded
if len(dictionary['starttls_cert']) > 0 and len(dictionary['starttls_key']) > 0:
s.starttls(keyfile=dictionary['starttls_key'], certfile=dictionary['starttls_cert'])
s.ehlo()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s.starttls()
s.ehlo()
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
journal.send("systemd-denotify: "+message)
finally:
s.quit()
del s
# auth
elif dictionary['auth'] == True:
try:
s = smtplib.SMTP()
s.connect(host=str(dictionary['starttls_host']), port=dictionary['starttls_port'])
#http://pymotw.com/2/smtplib/
s.ehlo()
if s.has_extn("STARTTLS"):
#check whether it is a real file and pem encoded
if len(dictionary['starttls_cert']) >0 and len(dictionary['starttls_key'])>0:
s.starttls(keyfile=dictionary['starttls_key'], certfile=dictionary['starttls_cert'])
s.ehlo()
s.login(str(dictionary['auth_user']).strip(), str(dictionary['auth_password']))
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
else:
s.starttls()
s.ehlo()
s.login(str(dictionary['auth_user']).strip(), str(dictionary['auth_password']))
send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string())
except Exception as ex:
|
pgdr/ert
|
python/tests/gui/ide/test_proper_name_format_argument.py
|
Python
|
gpl-3.0
| 595
| 0.001681
|
from ert_gui.ide.keywords.definitions import ProperNameFormatArgument
from ecl.test import ExtendedTestCase
class ProperNameFormatArgumentTest(ExtendedTestCase):
def test_pr
|
oper_name_format_argument(self):
argument = ProperNameFormatArgument()
self.assertTrue(argument.validate("NAME%d"))
self.assertTrue(argument.validate("__NA%dME__"))
self.assertTrue(argument.validate("<NAME>%d"))
self.assertTrue(argument.validate("%d-NAME-"))
|
self.assertFalse(argument.validate("-%dNA ME-"))
self.assertFalse(argument.validate("NAME*%d"))
|
moonso/vcf_parser
|
tests/test_split_variants.py
|
Python
|
mit
| 9,713
| 0.014414
|
import pytest
from vcf_parser.utils import split_variants, format_variant
from vcf_parser import HeaderParser
def get_header(header_lines = None):
"""Initiate a HeaderParser and return it"""
header_parser = HeaderParser()
if not header_lines:
header_lines = [
'##fileformat=VCFv4.2',
'##FILTER=<ID=LowQual,Description="Low quality">',
'##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">',
'##INFO=<ID=CNT,Number=A,Type=Integer,Description="Number of times '\
'this allele was found in external db">',
'##contig=<ID=1,length=249250621,assembly=b37>',
'##INFO=<ID=DP_HIST,Number=R,Type=String,Description="Histogram for '\
'DP; Mids: 2.5|7.5|12.5|17.5|22.5|27.5|32.5|37.5|42.5|47.5|52.5|57.5|'\
'62.5|67.5|72.5|77.5|82.5|87.5|92.5|97.5">',
'##FORMAT=<ID=AD,Number=.,Type=Integer,Description="Allelic depths for'\
' the ref and alt alleles in the order listed">',
'##INFO=<ID=CSQ,Number=.,Type=String,Description="Consequence type as'\
' predicted by VEP. Format: Allele|Gene|Feature">'
'##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Read Depth">',
'##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">',
'##FORMAT=<ID=GQ,Number=1,Type=String,Description="GenotypeQuality">'
'##reference=file:///human_g1k_v37.fasta',
'#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tfather\tmother\tproband'
]
for line in header_lines:
if line.startswith('##'):
header_parser.parse_meta_data(line)
elif line.startswith('#'):
header_parser.parse_header_line(line)
return header_parser
def test_simple_split():
"""
Test how split genotypes behave when a simple split
"""
header_parser = get_header()
variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1;CNT=5,8;"\
"DP_HIST=12,43,22\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\
"\t1/2:60:0,7,8:16"
variant = variant = format_variant(
line = variant_line,
header_parser=header_parser,
check_info=True
)
splitted_variants = []
for variant in split_variants(variant, header_parser):
splitted_variants.append(variant)
assert len(splitted_variants) == 2
first_variant = splitted_variants[0]
second_variant = splitted_variants[1]
# Test if the splitted variants still have the same reference
assert first_variant['REF'] == 'A'
assert second_variant['REF'] == 'A'
# Test if the alternative was splitted properly
assert first_variant['ALT'] == 'T'
assert second_variant['ALT'] == 'C'
# Test if simple ino field is handled correct
assert first_variant['info_dict']['MQ'] == ['1']
assert second_variant['info_dict']['MQ'] == ['1']
# Test if info field with Number='A' is handled correct
assert first_variant['info_dict']['CNT'] == ['5']
assert second_variant['info_dict']['CNT'] == ['8']
# Test if info field with Number='R' is handled correct
assert first_variant['info_dict']['DP_HIST'] == ['12', '43']
assert second_variant['info_dict']['DP_HIST'] == ['12', '22']
# Test if the genortypes are on the correct format
assert first_variant['father'] == "1/1:60:0,7:12"
assert second_variant['father'] == "0/0:60:0,0:12"
assert first_variant['mother'] == "0/0:60:7,0:17"
assert second_variant[
|
'mother'] == "0/1:60:7,10:17"
assert first_variant['proband'] == "0/1:60:0,7:16"
assert second_variant['proband'] == "0/1:60:0,8:16"
def test_split_minimal():
"""
Test to split a vcf line without genotypes
"""
header_lines = [
'##fileformat=VCFv4.2',
'##FILTER=<ID=LowQual,Description="Low quality">',
'##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">',
'##contig=<ID=1,length=249250621,assembly=b37>',
'#CHROM\tPOS\t
|
ID\tREF\tALT\tQUAL\tFILTER\tINFO'
]
header_parser = get_header(header_lines)
variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1"
variant = format_variant(
line = variant_line,
header_parser=header_parser,
check_info=True
)
splitted_variants = []
for variant in split_variants(variant, header_parser):
splitted_variants.append(variant)
assert len(splitted_variants) == 2
def test_csq_split():
"""
Test works when splitting CSQ fields
"""
header_parser = get_header()
variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tCSQ=T|148398|NM_152486.2,"\
"C|148398|NM_152486.2\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\
"\t1/2:60:0,7,8:16"
variant = format_variant(
line = variant_line,
header_parser=header_parser,
check_info=True
)
splitted_variants = []
for variant in split_variants(variant, header_parser):
splitted_variants.append(variant)
assert len(splitted_variants) == 2
first_variant = splitted_variants[0]
second_variant = splitted_variants[1]
assert first_variant['info_dict']['CSQ'] == ['T|148398|NM_152486.2']
assert second_variant['info_dict']['CSQ'] == ['C|148398|NM_152486.2']
assert list(first_variant['vep_info'].keys()) == ['T']
assert list(second_variant['vep_info'].keys()) == ['C']
assert first_variant['vep_info']['T'] == [{
'Allele':'T',
'Gene':'148398',
'Feature':'NM_152486.2'
}]
def test_csq_split_missing_allele():
"""
Test works when splitting CSQ fields where one allele is missing
"""
header_parser = get_header()
variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tCSQ=T|148398|NM_152486.2"\
"\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\
"\t1/2:60:0,7,8:16"
variant = format_variant(
line = variant_line,
header_parser=header_parser,
check_info=True
)
splitted_variants = []
for variant in split_variants(variant, header_parser):
splitted_variants.append(variant)
assert len(splitted_variants) == 2
first_variant = splitted_variants[0]
second_variant = splitted_variants[1]
assert first_variant['info_dict']['CSQ'] == ['T|148398|NM_152486.2']
with pytest.raises(KeyError):
assert second_variant['info_dict']['CSQ'] == ['']
assert list(first_variant['vep_info'].keys()) == ['T']
assert list(second_variant['vep_info'].keys()) == ['C']
assert second_variant['vep_info']['C'] == []
def test_wrong_number_of_A_entrys():
"""
Test how split genotypes when wrong number of entrys
"""
header_parser = get_header()
# CNT should have two entrys since Number=A
variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1;CNT=5;"\
"DP_HIST=12,43,22\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\
"\t1/2:60:0,7,8:16"
variant = format_variant(
line = variant_line,
header_parser=header_parser,
check_info=False
)
splitted_variants = []
for variant in split_variants(variant, header_parser):
splitted_variants.append(variant)
assert len(splitted_variants) == 2
first_variant = splitted_variants[0]
second_variant = splitted_variants[1]
#Vcf-parser should use the first annotation for both alleles
assert first_variant['info_dict']['CNT'] == ['5']
assert second_variant['info_dict']['CNT'] == ['5']
def test_wrong_number_of_R_entrys():
"""
Test how split genotypes when wrong number of entrys
"""
header_parser = get_header()
# CNT should have two entrys since Number=A
variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1;CNT=5,8;"\
"DP_HIST=12,43\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\
"\t1/2:60:0,7,8:16"
#But then we need to skip the info check
variant = format_varian
|
zlorb/mitmproxy
|
mitmproxy/tools/console/flowdetailview.py
|
Python
|
mit
| 5,591
| 0.000358
|
import urwid
from mitmproxy import http
from mitmproxy.tools.console import common, searchable
from mitmproxy.utils import human
from mitmproxy.utils import strutils
def maybe_timestamp(base, attr):
if base is not None and getattr(base, attr):
return human.format_timestamp_with_milli(getattr(base, attr))
else:
return "active"
def flowdetails(state, flow: http.HTTPFlow):
text = []
sc = flow.server_conn
cc = fl
|
ow.client_conn
req = flow.request
resp = flow.response
metadata = flow.metadata
if metadata is not None and len(metadata) > 0:
parts = [(str(k), repr(v)) for k, v in metadata.items()]
text.append(urwid.Text([("head", "Metadata:")]))
text.extend(common.format_keyvals(parts, indent=4))
if sc is not None and sc.ip_address:
text.append(urwid.Text([("head", "Serve
|
r Connection:")]))
parts = [
("Address", human.format_address(sc.address)),
]
if sc.ip_address:
parts.append(("Resolved Address", human.format_address(sc.ip_address)))
if resp:
parts.append(("HTTP Version", resp.http_version))
if sc.alpn_proto_negotiated:
parts.append(("ALPN", sc.alpn_proto_negotiated))
text.extend(
common.format_keyvals(parts, indent=4)
)
c = sc.cert
if c:
text.append(urwid.Text([("head", "Server Certificate:")]))
parts = [
("Type", "%s, %s bits" % c.keyinfo),
("SHA1 digest", c.digest("sha1")),
("Valid to", str(c.notafter)),
("Valid from", str(c.notbefore)),
("Serial", str(c.serial)),
(
"Subject",
urwid.BoxAdapter(
urwid.ListBox(
common.format_keyvals(
c.subject,
key_format="highlight"
)
),
len(c.subject)
)
),
(
"Issuer",
urwid.BoxAdapter(
urwid.ListBox(
common.format_keyvals(
c.issuer,
key_format="highlight"
)
),
len(c.issuer)
)
)
]
if c.altnames:
parts.append(
(
"Alt names",
", ".join(strutils.bytes_to_escaped_str(x) for x in c.altnames)
)
)
text.extend(
common.format_keyvals(parts, indent=4)
)
if cc is not None:
text.append(urwid.Text([("head", "Client Connection:")]))
parts = [
("Address", "{}:{}".format(cc.address[0], cc.address[1])),
]
if req:
parts.append(("HTTP Version", req.http_version))
if cc.tls_version:
parts.append(("TLS Version", cc.tls_version))
if cc.sni:
parts.append(("Server Name Indication", cc.sni))
if cc.cipher_name:
parts.append(("Cipher Name", cc.cipher_name))
if cc.alpn_proto_negotiated:
parts.append(("ALPN", cc.alpn_proto_negotiated))
text.extend(
common.format_keyvals(parts, indent=4)
)
parts = []
if cc is not None and cc.timestamp_start:
parts.append(
(
"Client conn. established",
maybe_timestamp(cc, "timestamp_start")
)
)
if cc.tls_established:
parts.append(
(
"Client conn. TLS handshake",
maybe_timestamp(cc, "timestamp_tls_setup")
)
)
if sc is not None and sc.timestamp_start:
parts.append(
(
"Server conn. initiated",
maybe_timestamp(sc, "timestamp_start")
)
)
parts.append(
(
"Server conn. TCP handshake",
maybe_timestamp(sc, "timestamp_tcp_setup")
)
)
if sc.tls_established:
parts.append(
(
"Server conn. TLS handshake",
maybe_timestamp(sc, "timestamp_tls_setup")
)
)
if req is not None and req.timestamp_start:
parts.append(
(
"First request byte",
maybe_timestamp(req, "timestamp_start")
)
)
parts.append(
(
"Request complete",
maybe_timestamp(req, "timestamp_end")
)
)
if resp is not None and resp.timestamp_start:
parts.append(
(
"First response byte",
maybe_timestamp(resp, "timestamp_start")
)
)
parts.append(
(
"Response complete",
maybe_timestamp(resp, "timestamp_end")
)
)
if parts:
# sort operations by timestamp
parts = sorted(parts, key=lambda p: p[1])
text.append(urwid.Text([("head", "Timing:")]))
text.extend(common.format_keyvals(parts, indent=4))
return searchable.Searchable(text)
|
imatge-upc/trecvid-2015
|
scripts/python/evaluate.py
|
Python
|
mit
| 4,066
| 0.021397
|
import numpy as np
from get_params import get_params
import os
import pickle
""" Returns mAP for each query. """
def relnotrel( fileGT, id_q, rankingShots ):
'''Takes ground truth file (fileGT), query name (id_q) and ranking (rankingShots) in order to create a vector of 1's and 0's to compute Average Precision
Returns: a list of 1 and 0 for the rankingShots and the number of relevant samples in the ground truth file for the given query.
'''
a = np.loadtxt( fileGT, dtype='string' )
# Extract shots for the query
t_shot = a[ (a[:,0]==id_q) ]
# Extract relevant shots for the query
t_shot_rel = t_shot[ t_shot[:,3] == '1' ]
t_shot_notrel = t_shot[ t_shot[:,3] == '0' ]
# Total Number of relevant shots in the ground truth
nRelTot = np.shape( t_shot_rel )[0]
labelRankingShot = np.zeros((1, len(rankingShots)))
i = 0
for shotRanking in rankingShots:
if shotRanking in t_shot_rel:
labelRankingShot[0, i ] = 1
i +=1
return labelRankingShot, nRelTot
def AveragePrecision( relist,nRelTot):
'''Takes a list of 1 and 0 and the number of relevant samples and computes the average precision'''
accu = 0
numRel = 0
for k in range(min(len(relist),1000)):
if relist[k] == 1:
numRel = numRel + 1
accu += float( numRel )/ float(k+1)
return (accu/nRelTot)
def rerank(ranking,baseline):
new_ranking = []
for shot in ranking:
if shot in baseline:
new_ranking.append(shot)
return new_ranking
if __name__ == '__main__':
params = get_params()
if params['year'] == '2014':
queries = range(9099,9129)
else:
queries = range(9069,9099)
errors = []
for query in queries:
if query not in (9100,9113,9117):
params['query_name'] = str(query)
RANKING_FILE = os.path.join(params['root'],'7_rankings',params['net'],params['database'] + params['year'],params['distance_type'],params['query_name'] + '.rank')
if params['year'] == '2014':
GROUND_TRUTH_FILE = os.path.join(params['root'],'8_groundtruth','src','ins.search.qrels.tv14')
else:
GROUND_TRUTH_FILE = os.path.join(params['root'],'8_groundtruth','src','ins.search.qrels.tv13')
#print RANKING_FILE
if os.path.isfile(RANKING_FILE):
baseline_file = os.path.join(params['root'],'2_baseline', 'dcu_caffenet',params['query_name'] + '.rank')
#print baseline_file
f = open(RANKING_FILE)
ranking = pickle.load(f)
frames = pickle.load(f)
regions = pickle.load(f)
distances = pickle.load(f)
unsorted_distances = pickle.load(f)
if params['database'] =='gt_imgs':
baseline_ranking = pickle.load(open(baseline_file,'rb'))
baseline_ranking = baseline_ranking[0:1000]
ranking = rerank(ranking,baseline_ranking)
f.close()
labels, num_relevant = relnotrel(GROUND_TRUTH_FILE, params['query_name'], ranking)
ranking = np.reshape(ranking,(np.shape(ranking)[0],1))
distances = np.reshape(distances,(np.shape(distances)[0],1))
save_file = np.hstack((ranking,np.hstack((distances,regions))))
np.shape(save_file)
save_txt_file = os.path.join(params
|
['root'],'9_other','score_txt',params['query_name'] + '.txt')
np.savetxt(save_txt_file,save_fi
|
le,delimiter='\t', fmt="%s")
ap = AveragePrecision(np.squeeze(labels),num_relevant)
print ap
else:
errors.append(query)
print "Done"
print errors
|
Azure/azure-sdk-for-python
|
sdk/formrecognizer/azure-ai-formrecognizer/azure/ai/formrecognizer/_generated/aio/_form_recognizer_client.py
|
Python
|
mit
| 5,812
| 0.002753
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core import AsyncPipelineClient
from azure.profiles import KnownProfiles, ProfileDefinition
from azure.profiles.multiapiclient import MultiApiClientMixin
from msrest import Deserializer, Serializer
from ._configuration import FormRecognizerClientConfiguration
from ._operations_mixin import FormRecognizerClientOperationsMixin
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
from azure.core.credentials_async import AsyncTokenCredential
class _SDKClient(object):
def __init__(self, *args, **kwargs):
"""This is a fake class to support current implemetation of MultiApiClientMixin."
Will be removed in final version of multiapi azure-core based client
"""
pass
class FormRecognizerClient(FormRecognizerClientOperationsMixin, MultiApiClientMixin, _SDKClient):
"""Extracts information from forms and images into structured data.
This ready contains multiple API versions, to help you deal with all of the Azure clouds
(Azure Stack, Azure Government, Azure China, etc.).
By default, it uses the latest API version available on public Azure.
For production, you should stick to a particular api-version and/or profile.
The profile sets a mapping between an operation group and its API version.
The api-version parameter sets the default API version if the operation
group is not described in the profile.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus2.api.cognitive.microsoft.com).
:type e
|
ndpoint: str
:param api_version: API version to use if no profile is provided, or if missing in profile.
:type api_version: str
:param profile: A profile definition, from KnownProfiles to dict.
:type profile: azure.profiles.KnownProfiles
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
DEFAULT_API_VERSION = '2.1'
_PROFILE_TAG = "azure.ai.formrecognizer.Form
|
RecognizerClient"
LATEST_PROFILE = ProfileDefinition({
_PROFILE_TAG: {
None: DEFAULT_API_VERSION,
'authorize_copy_document_model': '2022-01-30-preview',
'begin_analyze_document': '2022-01-30-preview',
'begin_build_document_model': '2022-01-30-preview',
'begin_compose_document_model': '2022-01-30-preview',
'begin_copy_document_model_to': '2022-01-30-preview',
'delete_model': '2022-01-30-preview',
'get_analyze_document_result': '2022-01-30-preview',
'get_info': '2022-01-30-preview',
'get_model': '2022-01-30-preview',
'get_models': '2022-01-30-preview',
'get_operation': '2022-01-30-preview',
'get_operations': '2022-01-30-preview',
'train_custom_model_async': '2.0',
}},
_PROFILE_TAG + " latest"
)
def __init__(
self,
credential: "AsyncTokenCredential",
endpoint: str,
api_version: Optional[str] = None,
profile: KnownProfiles = KnownProfiles.default,
**kwargs # type: Any
) -> None:
if api_version == '2022-01-30-preview':
base_url = '{endpoint}/formrecognizer'
elif api_version == '2.0':
base_url = '{endpoint}/formrecognizer/v2.0'
elif api_version == '2.1':
base_url = '{endpoint}/formrecognizer/v2.1'
else:
raise ValueError("API version {} is not available".format(api_version))
self._config = FormRecognizerClientConfiguration(credential, endpoint, **kwargs)
self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs)
super(FormRecognizerClient, self).__init__(
api_version=api_version,
profile=profile
)
@classmethod
def _models_dict(cls, api_version):
return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)}
@classmethod
def models(cls, api_version=DEFAULT_API_VERSION):
"""Module depends on the API version:
* 2022-01-30-preview: :mod:`v2022_01_30_preview.models<azure.ai.formrecognizer.v2022_01_30_preview.models>`
* 2.0: :mod:`v2_0.models<azure.ai.formrecognizer.v2_0.models>`
* 2.1: :mod:`v2_1.models<azure.ai.formrecognizer.v2_1.models>`
"""
if api_version == '2022-01-30-preview':
from ..v2022_01_30_preview import models
return models
elif api_version == '2.0':
from ..v2_0 import models
return models
elif api_version == '2.1':
from ..v2_1 import models
return models
raise ValueError("API version {} is not available".format(api_version))
async def close(self):
await self._client.close()
async def __aenter__(self):
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details):
await self._client.__aexit__(*exc_details)
|
elzaggo/pydoop
|
examples/pydoop_submit/mr/map_only_python_writer.py
|
Python
|
apache-2.0
| 1,895
| 0.001583
|
#!/usr/bin/env python
# BEGIN_COPYRIGHT
#
# Copyright 2009-2018 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
import logging
logging.basicConfig()
LOGGER = logging.getLogger("MapOnly")
LOGGER.setLevel(logging.INFO)
import pydoop.mapreduce.api as api
import pydoop.mapreduce.pipes as pipes
import pydoop.hdfs as hdfs
class M
|
apper(api.Mapper):
def __init__(self, context):
self.name = hdfs.path.basename(context.input_split.filename)
def map(self, context):
context.emit((self.name, context.key), context.value.upper())
class Writer(api.RecordWriter):
def __init__(self, context):
super(Writer, self).__init__(context)
|
self.logger = LOGGER.getChild("Writer")
jc = context.job_conf
outfn = context.get_default_work_file()
self.logger.info("writing to %s", outfn)
hdfs_user = jc.get("pydoop.hdfs.user", None)
self.sep = jc.get("mapreduce.output.textoutputformat.separator", "\t")
self.file = hdfs.open(outfn, "wt", user=hdfs_user)
def close(self):
self.file.close()
self.file.fs.close()
def emit(self, key, value):
self.file.write("%r%s%s%s" % (key, self.sep, value, "\n"))
def __main__():
pipes.run_task(pipes.Factory(
mapper_class=Mapper,
record_writer_class=Writer,
))
if __name__ == "__main__":
__main__()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.