code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
"""
Tutorial - Object inheritance
You are free to derive your request handler classes from any base
class you wish. In most real-world applications, you will probably
want to create a central base class used for all your pages, which takes
care of things like printing a common page header and footer.
"""
import cherrypy
class Page:
# Store the page title in a class attribute
title = 'Untitled Page'
def header(self):
return '''
<html>
<head>
<title>%s</title>
<head>
<body>
<h2>%s</h2>
''' % (self.title, self.title)
def footer(self):
return '''
</body>
</html>
'''
# Note that header and footer don't get their exposed attributes
# set to True. This isn't necessary since the user isn't supposed
# to call header or footer directly; instead, we'll call them from
# within the actually exposed handler methods defined in this
# class' subclasses.
class HomePage(Page):
# Different title for this page
title = 'Tutorial 5'
def __init__(self):
# create a subpage
self.another = AnotherPage()
def index(self):
# Note that we call the header and footer methods inherited
# from the Page class!
return self.header() + '''
<p>
Isn't this exciting? There's
<a href="./another/">another page</a>, too!
</p>
''' + self.footer()
index.exposed = True
class AnotherPage(Page):
title = 'Another Page'
def index(self):
return self.header() + '''
<p>
And this is the amazing second page!
</p>
''' + self.footer()
index.exposed = True
import os.path
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(HomePage(), config=tutconf)
else:
# This branch is for the test suite; you can ignore it.
cherrypy.tree.mount(HomePage(), config=tutconf)
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/cherrypy/cherrypy/tutorial/tut05_derived_objects.py
|
Python
|
bsd-3-clause
| 2,291
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Momentum for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.optimizer_v2 import optimizer_v2
from tensorflow.python.training import training_ops
class MomentumOptimizer(optimizer_v2.OptimizerV2):
"""Optimizer that implements the Momentum algorithm.
Computes (if `use_nesterov = False`):
```
accumulation = momentum * accumulation + gradient
variable -= learning_rate * accumulation
```
Note that in the dense version of this algorithm, `accumulation` is updated
and applied regardless of a gradient's value, whereas the sparse version (when
the gradient is an `IndexedSlices`, typically because of `tf.gather` or an
embedding) only updates variable slices and corresponding `accumulation` terms
when that part of the variable was used in the forward pass.
"""
def __init__(self, learning_rate, momentum,
use_locking=False, name="Momentum", use_nesterov=False):
"""Construct a new Momentum optimizer.
Some of the args below are hyperparameters, where a hyperparameter is
defined as a scalar Tensor, a regular Python value or a callable (which
will be evaluated when `apply_gradients` is called) returning a scalar
Tensor or a Python value.
Args:
learning_rate: A float hyperparameter. The learning rate.
momentum: A float hyperparameter. The momentum.
use_locking: If `True` use locks for update operations.
name: Optional name prefix for the operations created when applying
gradients. Defaults to "Momentum".
use_nesterov: If `True` use Nesterov Momentum.
See [Sutskever et al., 2013](
http://jmlr.org/proceedings/papers/v28/sutskever13.pdf).
This implementation always computes gradients at the value of the
variable(s) passed to the optimizer. Using Nesterov Momentum makes the
variable(s) track the values called `theta_t + mu*v_t` in the paper.
@compatibility(eager)
When eager execution is enabled, learning_rate and momentum can each be a
callable that takes no arguments and returns the actual value to use. This
can be useful for changing these values across different invocations of
optimizer functions.
@end_compatibility
"""
super(MomentumOptimizer, self).__init__(use_locking, name)
self._set_hyper("learning_rate", learning_rate)
self._set_hyper("momentum", momentum)
self._use_nesterov = use_nesterov
def _create_vars(self, var_list, state):
for v in var_list:
state.zeros_slot(v, "momentum")
def _apply_dense(self, grad, var, state):
mom = state.get_slot(var, "momentum")
return training_ops.apply_momentum(
var,
mom,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad,
state.get_hyper("momentum", var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov).op
def _resource_apply_dense(self, grad, var, state):
mom = state.get_slot(var, "momentum")
return training_ops.resource_apply_momentum(
var.handle,
mom.handle,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad,
state.get_hyper("momentum", var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov)
def _apply_sparse(self, grad, var, state):
mom = state.get_slot(var, "momentum")
return training_ops.sparse_apply_momentum(
var,
mom,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad.values,
grad.indices,
state.get_hyper("momentum", var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov).op
def _resource_apply_sparse(self, grad, var, indices, state):
mom = state.get_slot(var, "momentum")
return training_ops.resource_sparse_apply_momentum(
var.handle,
mom.handle,
state.get_hyper("learning_rate", var.dtype.base_dtype),
grad,
indices,
state.get_hyper("momentum", var.dtype.base_dtype),
use_locking=self._use_locking,
use_nesterov=self._use_nesterov)
|
kevin-coder/tensorflow-fork
|
tensorflow/contrib/optimizer_v2/momentum.py
|
Python
|
apache-2.0
| 4,934
|
import datetime
from django.db import DEFAULT_DB_ALIAS
from django.test import TestCase
from django.utils import tzinfo
from models import Donut, RumBaba
from django.conf import settings
class DataTypesTestCase(TestCase):
def test_boolean_type(self):
d = Donut(name='Apple Fritter')
self.assertFalse(d.is_frosted)
self.assertTrue(d.has_sprinkles is None)
d.has_sprinkles = True
self.assertTrue(d.has_sprinkles)
d.save()
d2 = Donut.objects.get(name='Apple Fritter')
self.assertFalse(d2.is_frosted)
self.assertTrue(d2.has_sprinkles)
def test_date_type(self):
d = Donut(name='Apple Fritter')
d.baked_date = datetime.date(year=1938, month=6, day=4)
d.baked_time = datetime.time(hour=5, minute=30)
d.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
d.save()
d2 = Donut.objects.get(name='Apple Fritter')
self.assertEqual(d2.baked_date, datetime.date(1938, 6, 4))
self.assertEqual(d2.baked_time, datetime.time(5, 30))
self.assertEqual(d2.consumed_at, datetime.datetime(2007, 4, 20, 16, 19, 59))
def test_time_field(self):
#Test for ticket #12059: TimeField wrongly handling datetime.datetime object.
d = Donut(name='Apple Fritter')
d.baked_time = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
d.save()
d2 = Donut.objects.get(name='Apple Fritter')
self.assertEqual(d2.baked_time, datetime.time(16, 19, 59))
def test_year_boundaries(self):
"""Year boundary tests (ticket #3689)"""
d = Donut.objects.create(name='Date Test 2007',
baked_date=datetime.datetime(year=2007, month=12, day=31),
consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59))
d1 = Donut.objects.create(name='Date Test 2006',
baked_date=datetime.datetime(year=2006, month=1, day=1),
consumed_at=datetime.datetime(year=2006, month=1, day=1))
self.assertEqual("Date Test 2007",
Donut.objects.filter(baked_date__year=2007)[0].name)
self.assertEqual("Date Test 2006",
Donut.objects.filter(baked_date__year=2006)[0].name)
d2 = Donut.objects.create(name='Apple Fritter',
consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59))
self.assertEqual([u'Apple Fritter', u'Date Test 2007'],
list(Donut.objects.filter(consumed_at__year=2007).order_by('name').values_list('name', flat=True)))
self.assertEqual(0, Donut.objects.filter(consumed_at__year=2005).count())
self.assertEqual(0, Donut.objects.filter(consumed_at__year=2008).count())
def test_textfields_unicode(self):
"""Regression test for #10238: TextField values returned from the
database should be unicode."""
d = Donut.objects.create(name=u'Jelly Donut', review=u'Outstanding')
newd = Donut.objects.get(id=d.id)
self.assert_(isinstance(newd.review, unicode))
def test_tz_awareness_mysql(self):
"""Regression test for #8354: the MySQL backend should raise an error
if given a timezone-aware datetime object."""
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] == 'django.db.backends.mysql':
dt = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=tzinfo.FixedOffset(0))
d = Donut(name='Bear claw', consumed_at=dt)
self.assertRaises(ValueError, d.save)
# ValueError: MySQL backend does not support timezone-aware datetimes.
def test_datefield_auto_now_add(self):
"""Regression test for #10970, auto_now_add for DateField should store
a Python datetime.date, not a datetime.datetime"""
b = RumBaba.objects.create()
# Verify we didn't break DateTimeField behavior
self.assert_(isinstance(b.baked_timestamp, datetime.datetime))
# We need to test this this way because datetime.datetime inherits
# from datetime.date:
self.assert_(isinstance(b.baked_date, datetime.date) and not isinstance(b.baked_date, datetime.datetime))
|
adviti/melange
|
thirdparty/google_appengine/lib/django_1_2/tests/regressiontests/datatypes/tests.py
|
Python
|
apache-2.0
| 4,274
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'QueryHistory.server_name'
db.add_column('beeswax_queryhistory', 'server_name', self.gf('django.db.models.fields.CharField')(default='', max_length=128), keep_default=False)
# Adding field 'QueryHistory.server_host'
db.add_column('beeswax_queryhistory', 'server_host', self.gf('django.db.models.fields.CharField')(default='', max_length=128), keep_default=False)
# Adding field 'QueryHistory.server_port'
db.add_column('beeswax_queryhistory', 'server_port', self.gf('django.db.models.fields.SmallIntegerField')(default=0), keep_default=False)
# Changing field 'QueryHistory.query'
db.alter_column('beeswax_queryhistory', 'query', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Deleting field 'QueryHistory.server_name'
db.delete_column('beeswax_queryhistory', 'server_name')
# Deleting field 'QueryHistory.server_host'
db.delete_column('beeswax_queryhistory', 'server_host')
# Deleting field 'QueryHistory.server_port'
db.delete_column('beeswax_queryhistory', 'server_port')
# Changing field 'QueryHistory.query'
db.alter_column('beeswax_queryhistory', 'query', self.gf('django.db.models.fields.CharField')(max_length=1024))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'beeswax.metainstall': {
'Meta': {'object_name': 'MetaInstall'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed_example': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'beeswax.queryhistory': {
'Meta': {'object_name': 'QueryHistory'},
'design': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['beeswax.SavedQuery']", 'null': 'True'}),
'has_results': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_state': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'log_context': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'notify': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'query': ('django.db.models.fields.TextField', [], {}),
'server_host': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'server_id': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}),
'server_port': ('django.db.models.fields.SmallIntegerField', [], {'default': "''"}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'beeswax.savedquery': {
'Meta': {'object_name': 'SavedQuery'},
'data': ('django.db.models.fields.TextField', [], {'max_length': '65536'}),
'desc': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auto': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'mtime': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['beeswax']
|
vmanoria/bluemix-hue-filebrowser
|
hue-3.8.1-bluemix/apps/beeswax/src/beeswax/migrations/0003_auto__add_field_queryhistory_server_name__add_field_queryhistory_serve.py
|
Python
|
gpl-2.0
| 7,185
|
#!/usr/bin/env python
# JDownloader/src/jd/controlling/LinkGrabberPackager.java
import re
from urlparse import urlparse
def matchFirst(string, *args):
""" matches against list of regexp and returns first match"""
for patternlist in args:
for pattern in patternlist:
r = pattern.search(string)
if r is not None:
name = r.group(1)
return name
return string
def parseNames(files):
""" Generates packages names from name, data lists
:param files: list of (name, data)
:return: packagenames mapt to data lists (eg. urls)
"""
packs = {}
endings = "\\.(3gp|7zip|7z|abr|ac3|aiff|aifc|aif|ai|au|avi|bin|bz2|cbr|cbz|ccf|cue|cvd|chm|dta|deb|divx|djvu|dlc|dmg|doc|docx|dot|eps|exe|ff|flv|f4v|gsd|gif|gz|iwd|iso|ipsw|java|jar|jpg|jpeg|jdeatme|load|mws|mw|m4v|m4a|mkv|mp2|mp3|mp4|mov|movie|mpeg|mpe|mpg|msi|msu|msp|nfo|npk|oga|ogg|ogv|otrkey|pkg|png|pdf|pptx|ppt|pps|ppz|pot|psd|qt|rmvb|rm|rar|ram|ra|rev|rnd|r\\d+|rpm|run|rsdf|rtf|sh(!?tml)|srt|snd|sfv|swf|tar|tif|tiff|ts|txt|viv|vivo|vob|wav|wmv|xla|xls|xpi|zeno|zip|z\\d+|_[_a-z]{2}|\\d+$)"
rarPats = [re.compile("(.*)(\\.|_|-)pa?r?t?\\.?[0-9]+.(rar|exe)$", re.I),
re.compile("(.*)(\\.|_|-)part\\.?[0]*[1].(rar|exe)$", re.I),
re.compile("(.*)\\.rar$", re.I),
re.compile("(.*)\\.r\\d+$", re.I),
re.compile("(.*)(\\.|_|-)\\d+$", re.I)]
zipPats = [re.compile("(.*)\\.zip$", re.I),
re.compile("(.*)\\.z\\d+$", re.I),
re.compile("(?is).*\\.7z\\.[\\d]+$", re.I),
re.compile("(.*)\\.a.$", re.I)]
ffsjPats = [re.compile("(.*)\\._((_[a-z])|([a-z]{2}))(\\.|$)"),
re.compile("(.*)(\\.|_|-)[\\d]+(" + endings + "$)", re.I)]
iszPats = [re.compile("(.*)\\.isz$", re.I),
re.compile("(.*)\\.i\\d{2}$", re.I)]
pat1 = re.compile("(\\.?CD\\d+)", re.I)
pat2 = re.compile("(\\.?part\\d+)", re.I)
pat3 = re.compile("(.+)[\\.\\-_]+$")
pat4 = re.compile("(.+)\\.\\d+\\.xtm$")
for file, url in files:
patternMatch = False
if file is None:
continue
# remove trailing /
name = file.rstrip('/')
# extract last path part .. if there is a path
split = name.rsplit("/", 1)
if len(split) > 1:
name = split.pop(1)
#check if a already existing package may be ok for this file
# found = False
# for pack in packs:
# if pack in file:
# packs[pack].append(url)
# found = True
# break
#
# if found: continue
# unrar pattern, 7zip/zip and hjmerge pattern, isz pattern, FFSJ pattern
before = name
name = matchFirst(name, rarPats, zipPats, iszPats, ffsjPats)
if before != name:
patternMatch = True
# xtremsplit pattern
r = pat4.search(name)
if r is not None:
name = r.group(1)
# remove part and cd pattern
r = pat1.search(name)
if r is not None:
name = name.replace(r.group(0), "")
patternMatch = True
r = pat2.search(name)
if r is not None:
name = name.replace(r.group(0), "")
patternMatch = True
# additional checks if extension pattern matched
if patternMatch:
# remove extension
index = name.rfind(".")
if index <= 0:
index = name.rfind("_")
if index > 0:
length = len(name) - index
if length <= 4:
name = name[:-length]
# remove endings like . _ -
r = pat3.search(name)
if r is not None:
name = r.group(1)
# replace . and _ with space
name = name.replace(".", " ")
name = name.replace("_", " ")
name = name.strip()
else:
name = ""
# fallback: package by hoster
if not name:
name = urlparse(file).hostname
if name: name = name.replace("www.", "")
# fallback : default name
if not name:
name = "unknown"
# build mapping
if name in packs:
packs[name].append(url)
else:
packs[name] = [url]
return packs
if __name__ == "__main__":
from os.path import join
from pprint import pprint
f = open(join("..", "..", "testlinks2.txt"), "rb")
urls = [(x.strip(), x.strip()) for x in f.readlines() if x.strip()]
f.close()
print "Having %d urls." % len(urls)
packs = parseNames(urls)
pprint(packs)
print "Got %d urls." % sum([len(x) for x in packs.itervalues()])
|
manuelm/pyload
|
module/common/packagetools.py
|
Python
|
gpl-3.0
| 4,869
|
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from . import attendance_summary
|
Endika/hr
|
hr_report_payroll_attendance_summary/wizard/__init__.py
|
Python
|
agpl-3.0
| 850
|
"""The ign_sismologia component."""
|
fbradyirl/home-assistant
|
homeassistant/components/ign_sismologia/__init__.py
|
Python
|
apache-2.0
| 36
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('Student')
class TestStudent(unittest.TestCase):
pass
|
hassanibi/erpnext
|
erpnext/schools/doctype/student/test_student.py
|
Python
|
gpl-3.0
| 270
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage mysql replication
(c) 2013, Balazs Pocze <banyek@gawker.com>
Certain parts are taken from Mark Theunissen's mysqldb module
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: mysql_replication
short_description: Manage MySQL replication
description:
- Manages MySQL server replication, slave, master status get and change master host.
version_added: "1.3"
author: "Balazs Pocze (@banyek)"
options:
mode:
description:
- module operating mode. Could be getslave (SHOW SLAVE STATUS), getmaster (SHOW MASTER STATUS), changemaster (CHANGE MASTER TO), startslave
(START SLAVE), stopslave (STOP SLAVE), resetslave (RESET SLAVE), resetslaveall (RESET SLAVE ALL)
required: False
choices:
- getslave
- getmaster
- changemaster
- stopslave
- startslave
- resetslave
- resetslaveall
default: getslave
master_host:
description:
- same as mysql variable
master_user:
description:
- same as mysql variable
master_password:
description:
- same as mysql variable
master_port:
description:
- same as mysql variable
master_connect_retry:
description:
- same as mysql variable
master_log_file:
description:
- same as mysql variable
master_log_pos:
description:
- same as mysql variable
relay_log_file:
description:
- same as mysql variable
relay_log_pos:
description:
- same as mysql variable
master_ssl:
description:
- same as mysql variable
choices: [ 0, 1 ]
master_ssl_ca:
description:
- same as mysql variable
master_ssl_capath:
description:
- same as mysql variable
master_ssl_cert:
description:
- same as mysql variable
master_ssl_key:
description:
- same as mysql variable
master_ssl_cipher:
description:
- same as mysql variable
master_auto_position:
description:
- does the host uses GTID based replication or not
required: false
default: null
version_added: "2.0"
extends_documentation_fragment: mysql
'''
EXAMPLES = '''
# Stop mysql slave thread
- mysql_replication:
mode: stopslave
# Get master binlog file name and binlog position
- mysql_replication:
mode: getmaster
# Change master to master server 192.0.2.1 and use binary log 'mysql-bin.000009' with position 4578
- mysql_replication:
mode: changemaster
master_host: 192.0.2.1
master_log_file: mysql-bin.000009
master_log_pos: 4578
# Check slave status using port 3308
- mysql_replication:
mode: getslave
login_host: ansible.example.com
login_port: 3308
'''
import os
import warnings
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect
from ansible.module_utils.pycompat24 import get_exception
def get_master_status(cursor):
cursor.execute("SHOW MASTER STATUS")
masterstatus = cursor.fetchone()
return masterstatus
def get_slave_status(cursor):
cursor.execute("SHOW SLAVE STATUS")
slavestatus = cursor.fetchone()
return slavestatus
def stop_slave(cursor):
try:
cursor.execute("STOP SLAVE")
stopped = True
except:
stopped = False
return stopped
def reset_slave(cursor):
try:
cursor.execute("RESET SLAVE")
reset = True
except:
reset = False
return reset
def reset_slave_all(cursor):
try:
cursor.execute("RESET SLAVE ALL")
reset = True
except:
reset = False
return reset
def start_slave(cursor):
try:
cursor.execute("START SLAVE")
started = True
except:
started = False
return started
def changemaster(cursor, chm, chm_params):
sql_param = ",".join(chm)
query = 'CHANGE MASTER TO %s' % sql_param
cursor.execute(query, chm_params)
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None),
login_password=dict(default=None, no_log=True),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
mode=dict(default="getslave", choices=["getmaster", "getslave", "changemaster", "stopslave", "startslave", "resetslave", "resetslaveall"]),
master_auto_position=dict(default=False, type='bool'),
master_host=dict(default=None),
master_user=dict(default=None),
master_password=dict(default=None, no_log=True),
master_port=dict(default=None, type='int'),
master_connect_retry=dict(default=None, type='int'),
master_log_file=dict(default=None),
master_log_pos=dict(default=None, type='int'),
relay_log_file=dict(default=None),
relay_log_pos=dict(default=None, type='int'),
master_ssl=dict(default=False, type='bool'),
master_ssl_ca=dict(default=None),
master_ssl_capath=dict(default=None),
master_ssl_cert=dict(default=None),
master_ssl_key=dict(default=None),
master_ssl_cipher=dict(default=None),
connect_timeout=dict(default=30, type='int'),
config_file=dict(default="~/.my.cnf", type='path'),
ssl_cert=dict(default=None),
ssl_key=dict(default=None),
ssl_ca=dict(default=None),
)
)
mode = module.params["mode"]
master_host = module.params["master_host"]
master_user = module.params["master_user"]
master_password = module.params["master_password"]
master_port = module.params["master_port"]
master_connect_retry = module.params["master_connect_retry"]
master_log_file = module.params["master_log_file"]
master_log_pos = module.params["master_log_pos"]
relay_log_file = module.params["relay_log_file"]
relay_log_pos = module.params["relay_log_pos"]
master_ssl = module.params["master_ssl"]
master_ssl_ca = module.params["master_ssl_ca"]
master_ssl_capath = module.params["master_ssl_capath"]
master_ssl_cert = module.params["master_ssl_cert"]
master_ssl_key = module.params["master_ssl_key"]
master_ssl_cipher = module.params["master_ssl_cipher"]
master_auto_position = module.params["master_auto_position"]
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
connect_timeout = module.params['connect_timeout']
config_file = module.params['config_file']
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
else:
warnings.filterwarnings('error', category=MySQLdb.Warning)
login_password = module.params["login_password"]
login_user = module.params["login_user"]
try:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca, None, 'MySQLdb.cursors.DictCursor',
connect_timeout=connect_timeout)
except Exception:
e = get_exception()
if os.path.exists(config_file):
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or %s has the credentials. "
"Exception message: %s" % (config_file, e))
else:
module.fail_json(msg="unable to find %s. Exception message: %s" % (config_file, e))
if mode in "getmaster":
status = get_master_status(cursor)
if not isinstance(status, dict):
status = dict(Is_Master=False, msg="Server is not configured as mysql master")
else:
status['Is_Master'] = True
module.exit_json(**status)
elif mode in "getslave":
status = get_slave_status(cursor)
if not isinstance(status, dict):
status = dict(Is_Slave=False, msg="Server is not configured as mysql slave")
else:
status['Is_Slave'] = True
module.exit_json(**status)
elif mode in "changemaster":
chm = []
chm_params = {}
result = {}
if master_host:
chm.append("MASTER_HOST=%(master_host)s")
chm_params['master_host'] = master_host
if master_user:
chm.append("MASTER_USER=%(master_user)s")
chm_params['master_user'] = master_user
if master_password:
chm.append("MASTER_PASSWORD=%(master_password)s")
chm_params['master_password'] = master_password
if master_port is not None:
chm.append("MASTER_PORT=%(master_port)s")
chm_params['master_port'] = master_port
if master_connect_retry is not None:
chm.append("MASTER_CONNECT_RETRY=%(master_connect_retry)s")
chm_params['master_connect_retry'] = master_connect_retry
if master_log_file:
chm.append("MASTER_LOG_FILE=%(master_log_file)s")
chm_params['master_log_file'] = master_log_file
if master_log_pos is not None:
chm.append("MASTER_LOG_POS=%(master_log_pos)s")
chm_params['master_log_pos'] = master_log_pos
if relay_log_file:
chm.append("RELAY_LOG_FILE=%(relay_log_file)s")
chm_params['relay_log_file'] = relay_log_file
if relay_log_pos is not None:
chm.append("RELAY_LOG_POS=%(relay_log_pos)s")
chm_params['relay_log_pos'] = relay_log_pos
if master_ssl:
chm.append("MASTER_SSL=1")
if master_ssl_ca:
chm.append("MASTER_SSL_CA=%(master_ssl_ca)s")
chm_params['master_ssl_ca'] = master_ssl_ca
if master_ssl_capath:
chm.append("MASTER_SSL_CAPATH=%(master_ssl_capath)s")
chm_params['master_ssl_capath'] = master_ssl_capath
if master_ssl_cert:
chm.append("MASTER_SSL_CERT=%(master_ssl_cert)s")
chm_params['master_ssl_cert'] = master_ssl_cert
if master_ssl_key:
chm.append("MASTER_SSL_KEY=%(master_ssl_key)s")
chm_params['master_ssl_key'] = master_ssl_key
if master_ssl_cipher:
chm.append("MASTER_SSL_CIPHER=%(master_ssl_cipher)s")
chm_params['master_ssl_cipher'] = master_ssl_cipher
if master_auto_position:
chm.append("MASTER_AUTO_POSITION = 1")
try:
changemaster(cursor, chm, chm_params)
except MySQLdb.Warning:
e = get_exception()
result['warning'] = str(e)
except Exception:
e = get_exception()
module.fail_json(msg='%s. Query == CHANGE MASTER TO %s' % (e, chm))
result['changed'] = True
module.exit_json(**result)
elif mode in "startslave":
started = start_slave(cursor)
if started is True:
module.exit_json(msg="Slave started ", changed=True)
else:
module.exit_json(msg="Slave already started (Or cannot be started)", changed=False)
elif mode in "stopslave":
stopped = stop_slave(cursor)
if stopped is True:
module.exit_json(msg="Slave stopped", changed=True)
else:
module.exit_json(msg="Slave already stopped", changed=False)
elif mode in "resetslave":
reset = reset_slave(cursor)
if reset is True:
module.exit_json(msg="Slave reset", changed=True)
else:
module.exit_json(msg="Slave already reset", changed=False)
elif mode in "resetslaveall":
reset = reset_slave_all(cursor)
if reset is True:
module.exit_json(msg="Slave reset", changed=True)
else:
module.exit_json(msg="Slave already reset", changed=False)
if __name__ == '__main__':
main()
warnings.simplefilter("ignore")
|
andreaso/ansible
|
lib/ansible/modules/database/mysql/mysql_replication.py
|
Python
|
gpl-3.0
| 13,039
|
# stdlib
import threading
import time
from types import ListType
import unittest
# 3p
from nose.plugins.attrib import attr
# project
from aggregator import MetricsAggregator
from dogstatsd import Server
from jmxfetch import JMXFetch
from tests.checks.common import Fixtures
STATSD_PORT = 8121
class DummyReporter(threading.Thread):
def __init__(self, metrics_aggregator):
threading.Thread.__init__(self)
self.finished = threading.Event()
self.metrics_aggregator = metrics_aggregator
self.interval = 10
self.metrics = None
self.finished = False
self.start()
def run(self):
while not self.finished:
time.sleep(self.interval)
self.flush()
def flush(self):
metrics = self.metrics_aggregator.flush()
if metrics:
self.metrics = metrics
@attr(requires='cassandra')
class JMXTestCase(unittest.TestCase):
def setUp(self):
aggregator = MetricsAggregator("test_host")
self.server = Server(aggregator, "localhost", STATSD_PORT)
self.reporter = DummyReporter(aggregator)
self.t1 = threading.Thread(target=self.server.start)
self.t1.start()
confd_path = Fixtures.directory()
self.jmx_daemon = JMXFetch(confd_path, {'dogstatsd_port': STATSD_PORT})
self.t2 = threading.Thread(target=self.jmx_daemon.run)
self.t2.start()
def tearDown(self):
self.server.stop()
self.reporter.finished = True
self.jmx_daemon.terminate()
def testCustomJMXMetric(self):
count = 0
while self.reporter.metrics is None:
time.sleep(1)
count += 1
if count > 25:
raise Exception("No metrics were received in 25 seconds")
metrics = self.reporter.metrics
self.assertTrue(isinstance(metrics, ListType))
self.assertTrue(len(metrics) > 0)
self.assertTrue(len([t for t in metrics if "cassandra.db." in t['metric'] and "instance:cassandra_instance" in t['tags']]) > 40, metrics)
|
WPMedia/dd-agent
|
tests/checks/integration/test_cassandra.py
|
Python
|
bsd-3-clause
| 2,080
|
#!/usr/bin/python
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This tests correct handling of dependencies, specifically, on generated
# sources, and from generated sources.
import BoostBuild
import string
t = BoostBuild.Tester(pass_toolset=0)
t.write("core-dependency-helpers", """
rule hdrrule
{
INCLUDES $(1) : $(2) ;
}
actions copy
{
cp $(>) $(<)
}
""")
code = """include core-dependency-helpers ;
DEPENDS all : a ;
DEPENDS a : b ;
actions create-b
{
echo '#include <foo.h>' > $(<)
}
copy a : b ;
create-b b ;
HDRRULE on b foo.h bar.h = hdrrule ;
HDRSCAN on b foo.h bar.h = \"#include <(.*)>\" ;
"""
# This creates 'a' which depends on 'b', which is generated. The generated 'b'
# contains '#include <foo.h>' and no rules for foo.h are given. The system
# should error out on the first invocation.
t.run_build_system("-f-", stdin=code)
t.fail_test(string.find(t.stdout(), "...skipped a for lack of foo.h...") == -1)
t.rm('b')
# Now test that if target 'c' also depends on 'b', then it will not be built, as
# well.
t.run_build_system("-f-", stdin=code + " copy c : b ; DEPENDS c : b ; DEPENDS all : c ; ")
t.fail_test(string.find(t.stdout(), "...skipped c for lack of foo.h...") == -1)
t.rm('b')
# Now add a rule for creating foo.h.
code += """
actions create-foo
{
echo // > $(<)
}
create-foo foo.h ;
"""
t.run_build_system("-f-", stdin=code)
# Run two times, adding explicit dependency from all to foo.h at the beginning
# and at the end, to make sure that foo.h is generated before 'a' in all cases.
def mk_correct_order_func(s1, s2):
def correct_order(s):
n1 = string.find(s, s1)
n2 = string.find(s, s2)
return ( n1 != -1 ) and ( n2 != -1 ) and ( n1 < n2 )
return correct_order
correct_order = mk_correct_order_func("create-foo", "copy a")
t.rm(["a", "b", "foo.h"])
t.run_build_system("-d+2 -f-", stdin=code + " DEPENDS all : foo.h ;")
t.fail_test(not correct_order(t.stdout()))
t.rm(["a", "b", "foo.h"])
t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : foo.h ; " + code)
t.fail_test(not correct_order(t.stdout()))
# Now foo.h exists. Test include from b -> foo.h -> bar.h -> biz.h. b and foo.h
# already have updating actions.
t.rm(["a", "b"])
t.write("foo.h", "#include <bar.h>")
t.write("bar.h", "#include <biz.h>")
t.run_build_system("-d+2 -f-", stdin=code)
t.fail_test(string.find(t.stdout(), "...skipped a for lack of biz.h...") == -1)
# Add an action for biz.h.
code += """
actions create-biz
{
echo // > $(<)
}
create-biz biz.h ;
"""
t.rm(["b"])
correct_order = mk_correct_order_func("create-biz", "copy a")
t.run_build_system("-d+2 -f-", stdin=code + " DEPENDS all : biz.h ;")
t.fail_test(not correct_order(t.stdout()))
t.rm(["a", "biz.h"])
t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : biz.h ; " + code)
t.fail_test(not correct_order(t.stdout()))
t.write("a", "")
code="""
DEPENDS all : main d ;
actions copy
{
cp $(>) $(<) ;
}
DEPENDS main : a ;
copy main : a ;
INCLUDES a : <1>c ;
NOCARE <1>c ;
SEARCH on <1>c = . ;
actions create-c
{
echo d > $(<)
}
actions create-d
{
echo // > $(<)
}
create-c <2>c ;
LOCATE on <2>c = . ;
create-d d ;
HDRSCAN on <1>c = (.*) ;
HDRRULE on <1>c = hdrrule ;
rule hdrrule
{
INCLUDES $(1) : d ;
}
"""
correct_order = mk_correct_order_func("create-d", "copy main")
t.run_build_system("-d2 -f-", stdin=code)
t.fail_test(not correct_order(t.stdout()))
t.cleanup()
|
NixaSoftware/CVis
|
venv/bin/tools/build/v2/test/core_dependencies.py
|
Python
|
apache-2.0
| 3,575
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('Student Batch Name')
class TestStudentBatchName(unittest.TestCase):
pass
|
manqala/erpnext
|
erpnext/schools/doctype/student_batch_name/test_student_batch_name.py
|
Python
|
gpl-3.0
| 300
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
from .escsm import HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, ISO2022KRSMModel
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [ \
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM: continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM: continue
if not codingSM.active: continue
codingState = codingSM.next_state(c)
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine()
return self.get_state()
return self.get_state()
|
Branlala/docker-sickbeardfr
|
sickbeard/lib/requests/packages/chardet2/escprober.py
|
Python
|
mit
| 3,094
|
import numpy as np
from numpy.testing import assert_
import scipy.special.orthogonal as orth
from scipy.special._testutils import FuncData
def test_eval_chebyt():
n = np.arange(0, 10000, 7)
x = 2*np.random.rand() - 1
v1 = np.cos(n*np.arccos(x))
v2 = orth.eval_chebyt(n, x)
assert_(np.allclose(v1, v2, rtol=1e-15))
def test_warnings():
# ticket 1334
olderr = np.seterr(all='raise')
try:
# these should raise no fp warnings
orth.eval_legendre(1, 0)
orth.eval_laguerre(1, 1)
orth.eval_gegenbauer(1, 1, 0)
finally:
np.seterr(**olderr)
class TestPolys(object):
"""
Check that the eval_* functions agree with the constructed polynomials
"""
def check_poly(self, func, cls, param_ranges=[], x_range=[], nn=10,
nparam=10, nx=10, rtol=1e-8):
np.random.seed(1234)
dataset = []
for n in np.arange(nn):
params = [a + (b-a)*np.random.rand(nparam) for a,b in param_ranges]
params = np.asarray(params).T
if not param_ranges:
params = [0]
for p in params:
if param_ranges:
p = (n,) + tuple(p)
else:
p = (n,)
x = x_range[0] + (x_range[1] - x_range[0])*np.random.rand(nx)
x[0] = x_range[0] # always include domain start point
x[1] = x_range[1] # always include domain end point
poly = np.poly1d(cls(*p))
z = np.c_[np.tile(p, (nx,1)), x, poly(x)]
dataset.append(z)
dataset = np.concatenate(dataset, axis=0)
def polyfunc(*p):
p = (p[0].astype(int),) + p[1:]
return func(*p)
olderr = np.seterr(all='raise')
try:
ds = FuncData(polyfunc, dataset, range(len(param_ranges)+2), -1,
rtol=rtol)
ds.check()
finally:
np.seterr(**olderr)
def test_jacobi(self):
self.check_poly(orth.eval_jacobi, orth.jacobi,
param_ranges=[(-0.99, 10), (-0.99, 10)], x_range=[-1, 1],
rtol=1e-5)
def test_sh_jacobi(self):
self.check_poly(orth.eval_sh_jacobi, orth.sh_jacobi,
param_ranges=[(1, 10), (0, 1)], x_range=[0, 1],
rtol=1e-5)
def test_gegenbauer(self):
self.check_poly(orth.eval_gegenbauer, orth.gegenbauer,
param_ranges=[(-0.499, 10)], x_range=[-1, 1],
rtol=1e-7)
def test_chebyt(self):
self.check_poly(orth.eval_chebyt, orth.chebyt,
param_ranges=[], x_range=[-1, 1])
def test_chebyu(self):
self.check_poly(orth.eval_chebyu, orth.chebyu,
param_ranges=[], x_range=[-1, 1])
def test_chebys(self):
self.check_poly(orth.eval_chebys, orth.chebys,
param_ranges=[], x_range=[-2, 2])
def test_chebyc(self):
self.check_poly(orth.eval_chebyc, orth.chebyc,
param_ranges=[], x_range=[-2, 2])
def test_sh_chebyt(self):
olderr = np.seterr(all='ignore')
try:
self.check_poly(orth.eval_sh_chebyt, orth.sh_chebyt,
param_ranges=[], x_range=[0, 1])
finally:
np.seterr(**olderr)
def test_sh_chebyu(self):
self.check_poly(orth.eval_sh_chebyu, orth.sh_chebyu,
param_ranges=[], x_range=[0, 1])
def test_legendre(self):
self.check_poly(orth.eval_legendre, orth.legendre,
param_ranges=[], x_range=[-1, 1])
def test_sh_legendre(self):
olderr = np.seterr(all='ignore')
try:
self.check_poly(orth.eval_sh_legendre, orth.sh_legendre,
param_ranges=[], x_range=[0, 1])
finally:
np.seterr(**olderr)
def test_genlaguerre(self):
self.check_poly(orth.eval_genlaguerre, orth.genlaguerre,
param_ranges=[(-0.99, 10)], x_range=[0, 100])
def test_laguerre(self):
self.check_poly(orth.eval_laguerre, orth.laguerre,
param_ranges=[], x_range=[0, 100])
def test_hermite(self):
self.check_poly(orth.eval_hermite, orth.hermite,
param_ranges=[], x_range=[-100, 100])
def test_hermitenorm(self):
self.check_poly(orth.eval_hermitenorm, orth.hermitenorm,
param_ranges=[], x_range=[-100, 100])
|
lesserwhirls/scipy-cwt
|
scipy/special/tests/test_orthogonal_eval.py
|
Python
|
bsd-3-clause
| 4,547
|
# -*- twisted.conch.test.test_mixin -*-
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
import time
from twisted.internet import reactor, protocol
from twisted.trial import unittest
from twisted.test.proto_helpers import StringTransport
from twisted.conch import mixin
class TestBufferingProto(mixin.BufferingMixin):
scheduled = False
rescheduled = 0
def schedule(self):
self.scheduled = True
return object()
def reschedule(self, token):
self.rescheduled += 1
class BufferingTest(unittest.TestCase):
def testBuffering(self):
p = TestBufferingProto()
t = p.transport = StringTransport()
self.failIf(p.scheduled)
L = ['foo', 'bar', 'baz', 'quux']
p.write('foo')
self.failUnless(p.scheduled)
self.failIf(p.rescheduled)
for s in L:
n = p.rescheduled
p.write(s)
self.assertEquals(p.rescheduled, n + 1)
self.assertEquals(t.value(), '')
p.flush()
self.assertEquals(t.value(), 'foo' + ''.join(L))
|
sorenh/cc
|
vendor/Twisted-10.0.0/twisted/conch/test/test_mixin.py
|
Python
|
apache-2.0
| 1,110
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .iq import IqProtocolEntity
class PingIqProtocolEntity(IqProtocolEntity):
'''
Receive
<iq type="get" xmlns="urn:xmpp:ping" from="s.whatsapp.net" id="1416174955-ping">
</iq>
Send
<iq type="get" xmlns="w:p" to="s.whatsapp.net" id="1416174955-ping">
</iq>
'''
def __init__(self, _from = None, to = None, _id = None):
super(PingIqProtocolEntity, self).__init__("urn:xmpp:ping" if _from else "w:p", _id = _id, _type = "get", _from = _from, to = to)
|
felix-dumit/campusbot
|
yowsup2/yowsup/layers/protocol_iq/protocolentities/iq_ping.py
|
Python
|
mit
| 555
|
#
# Python Imaging Library
# $Id$
#
# stuff to read simple, teragon-style palette files
#
# History:
# 97-08-23 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1997.
#
# See the README file for information on usage and redistribution.
#
from PIL._binary import o8
##
# File handler for Teragon-style palette files.
class PaletteFile(object):
rawmode = "RGB"
def __init__(self, fp):
self.palette = [(i, i, i) for i in range(256)]
while True:
s = fp.readline()
if not s:
break
if s[0:1] == b"#":
continue
if len(s) > 100:
raise SyntaxError("bad palette file")
v = [int(x) for x in s.split()]
try:
[i, r, g, b] = v
except ValueError:
[i, r] = v
g = b = r
if 0 <= i <= 255:
self.palette[i] = o8(r) + o8(g) + o8(b)
self.palette = b"".join(self.palette)
def getpalette(self):
return self.palette, self.rawmode
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/Python/lib/site-packages/PIL/PaletteFile.py
|
Python
|
gpl-3.0
| 1,113
|
"""
Gnome keyring parser.
Sources:
- Gnome Keyring source code,
function generate_file() in keyrings/gkr-keyring.c,
Author: Victor Stinner
Creation date: 2008-04-09
"""
from hachoir_core.tools import paddingSize
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet,
Bit, NullBits, NullBytes,
UInt8, UInt32, String, RawBytes, Enum,
TimestampUnix64, CompressedField,
SubFile)
from hachoir_core.endian import BIG_ENDIAN
try:
import hashlib
def sha256(data):
hash = hashlib.new('sha256')
hash.update(data)
return hash.digest()
except ImportError:
def sha256(data):
raise ImportError("hashlib module is missing")
try:
from Crypto.Cipher import AES
class DeflateStream:
def __init__(self, stream):
hash_iterations = 1234
password = "x" * 8
salt = "\0" * 8
key, iv = generate_key(password, salt, hash_iterations)
self.cipher = AES.new(key, AES.MODE_CBC, iv)
def __call__(self, size, data=None):
if data is None:
return ''
return self.cipher.decrypt(data)
def Deflate(field):
CompressedField(field, DeflateStream)
return field
except ImportError:
def Deflate(field):
return field
class KeyringString(FieldSet):
def createFields(self):
yield UInt32(self, "length")
length = self["length"].value
if length == 0xffffffff:
return
yield String(self, "text", length, charset="UTF-8")
def createValue(self):
if "text" in self:
return self["text"].value
else:
return u''
def createDescription(self):
if "text" in self:
return self["text"].value
else:
return u"(empty string)"
class Attribute(FieldSet):
def createFields(self):
yield KeyringString(self, "name")
yield UInt32(self, "type")
type = self["type"].value
if type == 0:
yield KeyringString(self, "value")
elif type == 1:
yield UInt32(self, "value")
else:
raise TypeError("Unknown attribute type (%s)" % type)
def createDescription(self):
return 'Attribute "%s"' % self["name"].value
class ACL(FieldSet):
def createFields(self):
yield UInt32(self, "types_allowed")
yield KeyringString(self, "display_name")
yield KeyringString(self, "pathname")
yield KeyringString(self, "reserved[]")
yield UInt32(self, "reserved[]")
class Item(FieldSet):
def createFields(self):
yield UInt32(self, "id")
yield UInt32(self, "type")
yield UInt32(self, "attr_count")
for index in xrange(self["attr_count"].value):
yield Attribute(self, "attr[]")
def createDescription(self):
return "Item #%s: %s attributes" % (self["id"].value, self["attr_count"].value)
class Items(FieldSet):
def createFields(self):
yield UInt32(self, "count")
for index in xrange(self["count"].value):
yield Item(self, "item[]")
class EncryptedItem(FieldSet):
def createFields(self):
yield KeyringString(self, "display_name")
yield KeyringString(self, "secret")
yield TimestampUnix64(self, "mtime")
yield TimestampUnix64(self, "ctime")
yield KeyringString(self, "reserved[]")
for index in xrange(4):
yield UInt32(self, "reserved[]")
yield UInt32(self, "attr_count")
for index in xrange(self["attr_count"].value):
yield Attribute(self, "attr[]")
yield UInt32(self, "acl_count")
for index in xrange(self["acl_count"].value):
yield ACL(self, "acl[]")
# size = 8 # paddingSize((self.stream.size - self.current_size) // 8, 16)
# if size:
# yield NullBytes(self, "hash_padding", size, "16 bytes alignment")
class EncryptedData(Parser):
PARSER_TAGS = {
"id": "gnomeencryptedkeyring",
"min_size": 16*8,
"description": u"Gnome encrypted keyring",
}
endian = BIG_ENDIAN
def validate(self):
return True
def createFields(self):
yield RawBytes(self, "md5", 16)
while True:
size = (self.size - self.current_size) // 8
if size < 77:
break
yield EncryptedItem(self, "item[]")
size = paddingSize(self.current_size // 8, 16)
if size:
yield NullBytes(self, "padding_align", size)
class GnomeKeyring(Parser):
MAGIC = "GnomeKeyring\n\r\0\n"
PARSER_TAGS = {
"id": "gnomekeyring",
"category": "misc",
"magic": ((MAGIC, 0),),
"min_size": 47*8,
"description": u"Gnome keyring",
}
CRYPTO_NAMES = {
0: u"AEL",
}
HASH_NAMES = {
0: u"MD5",
}
endian = BIG_ENDIAN
def validate(self):
if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
return u"Invalid magic string"
return True
def createFields(self):
yield String(self, "magic", len(self.MAGIC), 'Magic string (%r)' % self.MAGIC, charset="ASCII")
yield UInt8(self, "major_version")
yield UInt8(self, "minor_version")
yield Enum(UInt8(self, "crypto"), self.CRYPTO_NAMES)
yield Enum(UInt8(self, "hash"), self.HASH_NAMES)
yield KeyringString(self, "keyring_name")
yield TimestampUnix64(self, "mtime")
yield TimestampUnix64(self, "ctime")
yield Bit(self, "lock_on_idle")
yield NullBits(self, "reserved[]", 31, "Reserved for future flags")
yield UInt32(self, "lock_timeout")
yield UInt32(self, "hash_iterations")
yield RawBytes(self, "salt", 8)
yield NullBytes(self, "reserved[]", 16)
yield Items(self, "items")
yield UInt32(self, "encrypted_size")
yield Deflate(SubFile(self, "encrypted", self["encrypted_size"].value, "AES128 CBC", parser_class=EncryptedData))
def generate_key(password, salt, hash_iterations):
sha = sha256(password+salt)
for index in xrange(hash_iterations-1):
sha = sha256(sha)
return sha[:16], sha[16:]
|
kreatorkodi/repository.torrentbr
|
plugin.video.yatp/site-packages/hachoir_parser/misc/gnome_keyring.py
|
Python
|
gpl-2.0
| 6,255
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
diogocs1/comps
|
web/addons/hw_scanner/__init__.py
|
Python
|
apache-2.0
| 1,075
|
"""Get log settings."""
import os
import platform
import sys
from logging.handlers import SysLogHandler
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
def get_logger_config(log_dir,
logging_env="no_env",
tracking_filename="tracking.log",
edx_filename="edx.log",
dev_env=False,
syslog_addr=None,
debug=False,
local_loglevel='INFO',
console_loglevel=None,
service_variant=None):
"""
Return the appropriate logging config dictionary. You should assign the
result of this to the LOGGING var in your settings. The reason it's done
this way instead of registering directly is because I didn't want to worry
about resetting the logging state if this is called multiple times when
settings are extended.
If dev_env is set to true logging will not be done via local rsyslogd,
instead, tracking and application logs will be dropped in log_dir.
"tracking_filename" and "edx_filename" are ignored unless dev_env
is set to true since otherwise logging is handled by rsyslogd.
"""
# Revert to INFO if an invalid string is passed in
if local_loglevel not in LOG_LEVELS:
local_loglevel = 'INFO'
if console_loglevel is None or console_loglevel not in LOG_LEVELS:
console_loglevel = 'DEBUG' if debug else 'INFO'
if service_variant is None:
# default to a blank string so that if SERVICE_VARIANT is not
# set we will not log to a sub directory
service_variant = ''
hostname = platform.node().split(".")[0]
syslog_format = ("[service_variant={service_variant}]"
"[%(name)s][env:{logging_env}] %(levelname)s "
"[{hostname} %(process)d] [%(filename)s:%(lineno)d] "
"- %(message)s").format(service_variant=service_variant,
logging_env=logging_env,
hostname=hostname)
handlers = ['console', 'local']
if syslog_addr:
handlers.append('syslogger-remote')
logger_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(process)d '
'[%(name)s] %(filename)s:%(lineno)d - %(message)s',
},
'syslog_format': {'format': syslog_format},
'raw': {'format': '%(message)s'},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
}
},
'handlers': {
'console': {
'level': console_loglevel,
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stderr,
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'newrelic': {
'level': 'ERROR',
'class': 'lms.lib.newrelic_logging.NewRelicHandler',
'formatter': 'raw',
}
},
'loggers': {
'tracking': {
'handlers': ['tracking'],
'level': 'DEBUG',
'propagate': False,
},
'': {
'handlers': handlers,
'level': 'DEBUG',
'propagate': False
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
if syslog_addr:
logger_config['handlers'].update({
'syslogger-remote': {
'level': 'INFO',
'class': 'logging.handlers.SysLogHandler',
'address': syslog_addr,
'formatter': 'syslog_format',
},
})
if dev_env:
tracking_file_loc = os.path.join(log_dir, tracking_filename)
edx_file_loc = os.path.join(log_dir, edx_filename)
logger_config['handlers'].update({
'local': {
'class': 'logging.handlers.RotatingFileHandler',
'level': local_loglevel,
'formatter': 'standard',
'filename': edx_file_loc,
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': tracking_file_loc,
'formatter': 'raw',
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
})
else:
# for production environments we will only
# log INFO and up
logger_config['loggers']['']['level'] = 'INFO'
logger_config['handlers'].update({
'local': {
'level': local_loglevel,
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'formatter': 'syslog_format',
'facility': SysLogHandler.LOG_LOCAL0,
},
'tracking': {
'level': 'DEBUG',
'class': 'logging.handlers.SysLogHandler',
'address': '/dev/log',
'facility': SysLogHandler.LOG_LOCAL1,
'formatter': 'raw',
},
})
return logger_config
|
zadgroup/edx-platform
|
openedx/core/lib/logsettings.py
|
Python
|
agpl-3.0
| 5,765
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
SQS Message
A Message represents the data stored in an SQS queue. The rules for what is allowed within an SQS
Message are here:
http://docs.amazonwebservices.com/AWSSimpleQueueService/2008-01-01/SQSDeveloperGuide/Query_QuerySendMessage.html
So, at it's simplest level a Message just needs to allow a developer to store bytes in it and get the bytes
back out. However, to allow messages to have richer semantics, the Message class must support the
following interfaces:
The constructor for the Message class must accept a keyword parameter "queue" which is an instance of a
boto Queue object and represents the queue that the message will be stored in. The default value for
this parameter is None.
The constructor for the Message class must accept a keyword parameter "body" which represents the
content or body of the message. The format of this parameter will depend on the behavior of the
particular Message subclass. For example, if the Message subclass provides dictionary-like behavior to the
user the body passed to the constructor should be a dict-like object that can be used to populate
the initial state of the message.
The Message class must provide an encode method that accepts a value of the same type as the body
parameter of the constructor and returns a string of characters that are able to be stored in an
SQS message body (see rules above).
The Message class must provide a decode method that accepts a string of characters that can be
stored (and probably were stored!) in an SQS message and return an object of a type that is consistent
with the "body" parameter accepted on the class constructor.
The Message class must provide a __len__ method that will return the size of the encoded message
that would be stored in SQS based on the current state of the Message object.
The Message class must provide a get_body method that will return the body of the message in the
same format accepted in the constructor of the class.
The Message class must provide a set_body method that accepts a message body in the same format
accepted by the constructor of the class. This method should alter to the internal state of the
Message object to reflect the state represented in the message body parameter.
The Message class must provide a get_body_encoded method that returns the current body of the message
in the format in which it would be stored in SQS.
"""
import base64
import boto
from boto.compat import StringIO
from boto.compat import six
from boto.sqs.attributes import Attributes
from boto.sqs.messageattributes import MessageAttributes
from boto.exception import SQSDecodeError
class RawMessage(object):
"""
Base class for SQS messages. RawMessage does not encode the message
in any way. Whatever you store in the body of the message is what
will be written to SQS and whatever is returned from SQS is stored
directly into the body of the message.
"""
def __init__(self, queue=None, body=''):
self.queue = queue
self.set_body(body)
self.id = None
self.receipt_handle = None
self.md5 = None
self.attributes = Attributes(self)
self.message_attributes = MessageAttributes(self)
self.md5_message_attributes = None
def __len__(self):
return len(self.encode(self._body))
def startElement(self, name, attrs, connection):
if name == 'Attribute':
return self.attributes
if name == 'MessageAttribute':
return self.message_attributes
return None
def endElement(self, name, value, connection):
if name == 'Body':
self.set_body(value)
elif name == 'MessageId':
self.id = value
elif name == 'ReceiptHandle':
self.receipt_handle = value
elif name == 'MD5OfBody':
self.md5 = value
elif name == 'MD5OfMessageAttributes':
self.md5_message_attributes = value
else:
setattr(self, name, value)
def endNode(self, connection):
self.set_body(self.decode(self.get_body()))
def encode(self, value):
"""Transform body object into serialized byte array format."""
return value
def decode(self, value):
"""Transform seralized byte array into any object."""
return value
def set_body(self, body):
"""Override the current body for this object, using decoded format."""
self._body = body
def get_body(self):
return self._body
def get_body_encoded(self):
"""
This method is really a semi-private method used by the Queue.write
method when writing the contents of the message to SQS.
You probably shouldn't need to call this method in the normal course of events.
"""
return self.encode(self.get_body())
def delete(self):
if self.queue:
return self.queue.delete_message(self)
def change_visibility(self, visibility_timeout):
if self.queue:
self.queue.connection.change_message_visibility(self.queue,
self.receipt_handle,
visibility_timeout)
class Message(RawMessage):
"""
The default Message class used for SQS queues. This class automatically
encodes/decodes the message body using Base64 encoding to avoid any
illegal characters in the message body. See:
https://forums.aws.amazon.com/thread.jspa?threadID=13067
for details on why this is a good idea. The encode/decode is meant to
be transparent to the end-user.
"""
def encode(self, value):
if not isinstance(value, six.binary_type):
value = value.encode('utf-8')
return base64.b64encode(value).decode('utf-8')
def decode(self, value):
try:
value = base64.b64decode(value.encode('utf-8')).decode('utf-8')
except:
boto.log.warning('Unable to decode message')
return value
return value
class MHMessage(Message):
"""
The MHMessage class provides a message that provides RFC821-like
headers like this:
HeaderName: HeaderValue
The encoding/decoding of this is handled automatically and after
the message body has been read, the message instance can be treated
like a mapping object, i.e. m['HeaderName'] would return 'HeaderValue'.
"""
def __init__(self, queue=None, body=None, xml_attrs=None):
if body is None or body == '':
body = {}
super(MHMessage, self).__init__(queue, body)
def decode(self, value):
try:
msg = {}
fp = StringIO(value)
line = fp.readline()
while line:
delim = line.find(':')
key = line[0:delim]
value = line[delim+1:].strip()
msg[key.strip()] = value.strip()
line = fp.readline()
except:
raise SQSDecodeError('Unable to decode message', self)
return msg
def encode(self, value):
s = ''
for item in value.items():
s = s + '%s: %s\n' % (item[0], item[1])
return s
def __contains__(self, key):
return key in self._body
def __getitem__(self, key):
if key in self._body:
return self._body[key]
else:
raise KeyError(key)
def __setitem__(self, key, value):
self._body[key] = value
self.set_body(self._body)
def keys(self):
return self._body.keys()
def values(self):
return self._body.values()
def items(self):
return self._body.items()
def has_key(self, key):
return key in self._body
def update(self, d):
self._body.update(d)
self.set_body(self._body)
def get(self, key, default=None):
return self._body.get(key, default)
class EncodedMHMessage(MHMessage):
"""
The EncodedMHMessage class provides a message that provides RFC821-like
headers like this:
HeaderName: HeaderValue
This variation encodes/decodes the body of the message in base64 automatically.
The message instance can be treated like a mapping object,
i.e. m['HeaderName'] would return 'HeaderValue'.
"""
def decode(self, value):
try:
value = base64.b64decode(value.encode('utf-8')).decode('utf-8')
except:
raise SQSDecodeError('Unable to decode message', self)
return super(EncodedMHMessage, self).decode(value)
def encode(self, value):
value = super(EncodedMHMessage, self).encode(value)
return base64.b64encode(value.encode('utf-8')).decode('utf-8')
|
ubirch/aws-tools
|
virtual-env/lib/python2.7/site-packages/boto/sqs/message.py
|
Python
|
apache-2.0
| 9,892
|
# -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import werkzeug
from openerp import http
from openerp.http import request
_logger = logging.getLogger(__name__)
class SipsController(http.Controller):
_notify_url = '/payment/sips/ipn/'
_return_url = '/payment/sips/dpn/'
def _get_return_url(self, **post):
""" Extract the return URL from the data coming from sips. """
return_url = post.pop('return_url', '')
if not return_url:
tx_obj = request.registry['payment.transaction']
data = tx_obj._sips_data_to_object(post.get('Data'))
custom = json.loads(data.pop('returnContext', False) or '{}')
return_url = custom.get('return_url', '/')
return return_url
def sips_validate_data(self, **post):
res = False
env = request.env
tx_obj = env['payment.transaction']
acquirer_obj = env['payment.acquirer']
sips = acquirer_obj.search([('provider', '=', 'sips')], limit=1)
security = sips._sips_generate_shasign(post)
if security == post['Seal']:
_logger.debug('Sips: validated data')
res = tx_obj.sudo().form_feedback(post, 'sips')
else:
_logger.warning('Sips: data are corrupted')
return res
@http.route([
'/payment/sips/ipn/'],
type='http', auth='none', methods=['POST'])
def sips_ipn(self, **post):
""" Sips IPN. """
self.sips_validate_data(**post)
return ''
@http.route([
'/payment/sips/dpn'], type='http', auth="none", methods=['POST'])
def sips_dpn(self, **post):
""" Sips DPN """
return_url = self._get_return_url(**post)
self.sips_validate_data(**post)
return werkzeug.utils.redirect(return_url)
|
solintegra/addons
|
payment_sips/controllers/main.py
|
Python
|
agpl-3.0
| 1,864
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
)
class AnySexIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?anysex\.com/(?P<id>\d+)'
_TEST = {
'url': 'http://anysex.com/156592/',
'md5': '023e9fbb7f7987f5529a394c34ad3d3d',
'info_dict': {
'id': '156592',
'ext': 'mp4',
'title': 'Busty and sexy blondie in her bikini strips for you',
'description': 'md5:de9e418178e2931c10b62966474e1383',
'categories': ['Erotic'],
'duration': 270,
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r"video_url\s*:\s*'([^']+)'", webpage, 'video URL')
title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
description = self._html_search_regex(
r'<div class="description"[^>]*>([^<]+)</div>', webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'preview_url\s*:\s*\'(.*?)\'', webpage, 'thumbnail', fatal=False)
categories = re.findall(
r'<a href="http://anysex\.com/categories/[^"]+" title="[^"]*">([^<]+)</a>', webpage)
duration = parse_duration(self._search_regex(
r'<b>Duration:</b> (?:<q itemprop="duration">)?(\d+:\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<b>Views:</b> (\d+)', webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
}
|
apllicationCOM/youtube-dl-api-server
|
youtube_dl_server/youtube_dl/extractor/anysex.py
|
Python
|
unlicense
| 2,085
|
__version__ = '0.4'
__author__ = 'Martin Natano <natano@natano.net>'
_repository = None
_branch = 'git-orm'
_remote = 'origin'
class GitError(Exception): pass
def set_repository(value):
from pygit2 import discover_repository, Repository
global _repository
if value is None:
_repository = None
return
try:
path = discover_repository(value)
except KeyError:
raise GitError('no repository found in "{}"'.format(value))
_repository = Repository(path)
def get_repository():
return _repository
def set_branch(value):
global _branch
_branch = value
def get_branch():
return _branch
def set_remote(value):
global _remote
_remote = value
def get_remote():
return _remote
|
natano/python-git-orm
|
git_orm/__init__.py
|
Python
|
isc
| 758
|
"""
mod_customized Controllers
===================
In this module, users can test their fork branch with customized set of regression tests
"""
from flask import Blueprint, g, request, redirect, url_for, flash
from github import GitHub, ApiError
from datetime import datetime, timedelta
from decorators import template_renderer, get_menu_entries
from mod_auth.controllers import login_required, check_access_rights
from mod_auth.models import Role, User
from mod_test.models import Fork, Test, TestType, TestPlatform
from mod_customized.forms import TestForkForm
from mod_customized.models import TestFork, CustomizedTest
from mod_regression.models import Category, regressionTestLinkTable, RegressionTest
from mod_test.controllers import get_data_for_test, TestNotFoundException
from mod_auth.controllers import fetch_username_from_token
from sqlalchemy import and_
mod_customized = Blueprint('custom', __name__)
@mod_customized.before_app_request
def before_app_request():
if g.user is not None:
g.menu_entries['custom'] = {
'title': 'Customize Test',
'icon': 'code-fork',
'route': 'custom.index',
'access': [Role.tester, Role.contributor, Role.admin]
}
@mod_customized.route('/', methods=['GET', 'POST'])
@login_required
@check_access_rights([Role.tester, Role.contributor, Role.admin])
@template_renderer()
def index():
"""
Display a form to allow users to run tests.
User can enter commit or select the commit from their repo that are not more than 30 days old.
User can customized test based on selected regression tests and platforms.
Also Display list of customized tests started by user.
User will be redirected to the same page on submit.
"""
fork_test_form = TestForkForm(request.form)
username = fetch_username_from_token()
commit_options = False
if username is not None:
gh = GitHub(access_token=g.github['bot_token'])
repository = gh.repos(username)(g.github['repository'])
# Only commits since last month
last_month = datetime.now() - timedelta(days=30)
commit_since = last_month.isoformat() + 'Z'
commits = repository.commits().get(since=commit_since)
commit_arr = []
for commit in commits:
commit_url = commit['html_url']
commit_sha = commit['sha']
commit_option = (
'<a href="{url}">{sha}</a>').format(url=commit_url, sha=commit_sha)
commit_arr.append((commit_sha, commit_option))
# If there are commits present, display it on webpage
if len(commit_arr) > 0:
fork_test_form.commit_select.choices = commit_arr
commit_options = True
fork_test_form.regression_test.choices = [(regression_test.id, regression_test)
for regression_test in RegressionTest.query.all()]
if fork_test_form.add.data and fork_test_form.validate_on_submit():
import requests
regression_tests = fork_test_form.regression_test.data
commit_hash = fork_test_form.commit_hash.data
repo = g.github['repository']
platforms = fork_test_form.platform.data
api_url = ('https://api.github.com/repos/{user}/{repo}/commits/{hash}').format(
user=username, repo=repo, hash=commit_hash
)
# Show error if github fails to recognize commit
response = requests.get(api_url)
if response.status_code == 500:
fork_test_form.commit_hash.errors.append('Error contacting Github')
elif response.status_code != 200:
fork_test_form.commit_hash.errors.append('Wrong Commit Hash')
else:
add_test_to_kvm(username, commit_hash, platforms, regression_tests)
return redirect(url_for('custom.index'))
populated_categories = g.db.query(regressionTestLinkTable.c.category_id).subquery()
categories = Category.query.filter(Category.id.in_(populated_categories)).order_by(Category.name.asc()).all()
tests = Test.query.filter(and_(TestFork.user_id == g.user.id, TestFork.test_id == Test.id)).order_by(
Test.id.desc()).limit(50).all()
return {
'addTestFork': fork_test_form,
'commit_options': commit_options,
'tests': tests,
'TestType': TestType,
'GitUser': username,
'categories': categories,
'customize': True
}
def add_test_to_kvm(username, commit_hash, platforms, regression_tests):
"""
Create new tests and add it to CustomizedTests based on parameters.
:param username: git username required to find fork
:type username: str
:param commit_hash: commit hash of the repo user selected to run test
:type commit_hash: str
:param platforms: platforms user selected to run test
:type platforms: list
:param regression_tests: regression tests user selected to run tests
:type regression_tests: list
"""
fork_url = ('https://github.com/{user}/{repo}.git').format(
user=username, repo=g.github['repository']
)
fork = Fork.query.filter(Fork.github == fork_url).first()
if fork is None:
fork = Fork(fork_url)
g.db.add(fork)
g.db.commit()
for platform in platforms:
platform = TestPlatform.from_string(platform)
test = Test(platform, TestType.commit, fork.id, 'master', commit_hash)
g.db.add(test)
g.db.commit()
for regression_test in regression_tests:
customized_test = CustomizedTest(test.id, regression_test)
g.db.add(customized_test)
test_fork = TestFork(g.user.id, test.id)
g.db.add(test_fork)
g.db.commit()
|
satyammittal/sample-platform
|
mod_customized/controllers.py
|
Python
|
isc
| 5,822
|
"""Identity related views."""
from reversion import revisions as reversion
from django.contrib.auth import mixins as auth_mixins
from django.contrib.auth.decorators import (
login_required, permission_required, user_passes_test
)
from django.shortcuts import render
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _, ungettext
from django.views import generic
from django.views.decorators.csrf import ensure_csrf_cookie
from modoboa.core.models import User
from modoboa.lib.exceptions import BadRequest, PermDeniedException
from modoboa.lib.listing import get_listing_page, get_sort_order
from modoboa.lib.web_utils import render_to_json_response
from .. import signals
from ..forms import AccountForm, AccountWizard
from ..lib import get_identities
from ..models import Domain, Mailbox
@login_required
@user_passes_test(
lambda u: u.has_perm("core.add_user") or
u.has_perm("admin.add_alias")
)
def _identities(request):
filters = {
fname: request.GET.get(fname, None)
for fname in ["searchquery", "idtfilter", "grpfilter"]
}
request.session["identities_filters"] = filters
idents_list = get_identities(request.user, **filters)
sort_order, sort_dir = get_sort_order(request.GET, "identity",
["identity", "name_or_rcpt", "tags"])
if sort_order in ["identity", "name_or_rcpt"]:
objects = sorted(idents_list, key=lambda o: getattr(o, sort_order),
reverse=sort_dir == "-")
else:
objects = sorted(idents_list, key=lambda o: o.tags[0]["label"],
reverse=sort_dir == "-")
context = {
"handle_mailboxes": request.localconfig.parameters.get_value(
"handle_mailboxes", raise_exception=False)
}
page = get_listing_page(objects, request.GET.get("page", 1))
context["headers"] = render_to_string(
"admin/identity_headers.html", {}, request)
if page is None:
context["length"] = 0
else:
context["rows"] = render_to_string(
"admin/identities_table.html", {
"identities": page.object_list
}, request
)
context["pages"] = [page.number]
return render_to_json_response(context)
@login_required
@permission_required("admin.add_mailbox")
def list_quotas(request):
from modoboa.lib.db_utils import db_type
sort_order, sort_dir = get_sort_order(request.GET, "address")
mboxes = Mailbox.objects.get_for_admin(
request.user, request.GET.get("searchquery", None)
)
mboxes = mboxes.exclude(quota=0)
if sort_order in ["address", "quota"]:
mboxes = mboxes.order_by("%s%s" % (sort_dir, sort_order))
elif sort_order in ("quota_value__bytes", "quota_usage"):
db_type = db_type()
if db_type == "mysql":
where = "CONCAT(admin_mailbox.address,'@',admin_domain.name)"
else:
where = "admin_mailbox.address||'@'||admin_domain.name"
if sort_order == "quota_value__bytes":
mboxes = mboxes.extra(
select={"quota_value__bytes": "admin_quota.bytes"},
where=["admin_quota.username=%s" % where],
tables=["admin_quota", "admin_domain"],
order_by=["%s%s" % (sort_dir, sort_order)]
)
else:
if db_type == "postgres":
select = (
"(admin_quota.bytes::float / (CAST(admin_mailbox.quota "
"AS BIGINT) * 1048576)) * 100"
)
else:
select = (
"(admin_quota.bytes * 1.0 / (admin_mailbox.quota "
"* 1048576)) * 100"
)
mboxes = mboxes.extra(
select={"quota_usage": select},
where=["admin_quota.username=%s" % where],
tables=["admin_quota", "admin_domain"],
order_by=["%s%s" % (sort_dir, sort_order)]
)
else:
raise BadRequest(_("Invalid request"))
page = get_listing_page(mboxes, request.GET.get("page", 1))
context = {
"headers": render_to_string(
"admin/identities_quota_headers.html", {}, request
)
}
if page is None:
context["length"] = 0
else:
context["rows"] = render_to_string(
"admin/identities_quotas.html", {"mboxes": page}, request
)
context["pages"] = [page.number]
return render_to_json_response(context)
@login_required
@user_passes_test(
lambda u: u.has_perm("core.add_user") or
u.has_perm("admin.add_alias") or
u.has_perm("admin.add_mailbox")
)
def get_next_page(request):
"""Return the next page of the identity list."""
if request.GET.get("objtype", "identity") == "identity":
return _identities(request)
return list_quotas(request)
@login_required
@user_passes_test(
lambda u: u.has_perm("core.add_user") or
u.has_perm("admin.add_alias")
)
@ensure_csrf_cookie
def identities(request, tplname="admin/identities.html"):
return render(request, tplname, {
"selection": "identities",
"deflocation": "list/"
})
@login_required
@permission_required("core.add_user")
def accounts_list(request):
accs = User.objects.filter(is_superuser=False) \
.exclude(groups__name="SimpleUsers")
res = [a.username for a in accs.all()]
return render_to_json_response(res)
@login_required
@permission_required("core.add_user")
@reversion.create_revision()
def newaccount(request):
"""Create a new account."""
return AccountWizard(request).process()
@login_required
@permission_required("core.change_user")
@reversion.create_revision()
def editaccount(request, pk):
account = User.objects.get(pk=pk)
if not request.user.can_access(account):
raise PermDeniedException
mb = account.mailbox if hasattr(account, "mailbox") else None
instances = {
"general": account, "profile": account, "mail": mb, "perms": account
}
results = signals.get_account_form_instances.send(
sender="editaccount", user=request.user, account=account)
for result in results:
instances.update(result[1])
return AccountForm(request, instances=instances).process()
@login_required
@permission_required("core.delete_user")
def delaccount(request, pk):
User.objects.get(pk=pk).delete()
return render_to_json_response(
ungettext("Account deleted", "Accounts deleted", 1)
)
@login_required
@permission_required("admin.add_domain")
def remove_permission(request):
domid = request.GET.get("domid", None)
daid = request.GET.get("daid", None)
if domid is None or daid is None:
raise BadRequest(_("Invalid request"))
try:
account = User.objects.get(pk=daid)
domain = Domain.objects.get(pk=domid)
except (User.DoesNotExist, Domain.DoesNotExist):
raise BadRequest(_("Invalid request"))
if not request.user.can_access(account) or \
not request.user.can_access(domain):
raise PermDeniedException
domain.remove_admin(account)
return render_to_json_response({})
class AccountDetailView(
auth_mixins.PermissionRequiredMixin, generic.DetailView):
"""DetailView for Account."""
model = User
permission_required = "core.add_user"
template_name = "admin/account_detail.html"
def has_permission(self):
"""Check object-level access."""
result = super(AccountDetailView, self).has_permission()
if not result:
return result
return self.request.user.can_access(self.get_object())
def get_context_data(self, **kwargs):
"""Add information to context."""
context = super(AccountDetailView, self).get_context_data(**kwargs)
del context["user"]
result = signals.extra_account_dashboard_widgets.send(
self.__class__, user=self.request.user, account=self.object)
context["templates"] = {"left": [], "right": []}
for _receiver, widgets in result:
for widget in widgets:
context["templates"][widget["column"]].append(
widget["template"])
context.update(widget["context"])
if self.object.role in ["Resellers", "DomainAdmins"]:
context["domains"] = Domain.objects.get_for_admin(self.object)
context["selection"] = "identities"
return context
|
modoboa/modoboa
|
modoboa/admin/views/identity.py
|
Python
|
isc
| 8,529
|
#!/usr/bin/env python
from gi.repository import GObject, Gtk, Gedit, PeasGtk
import ConfigParser
UI_XML = '''<ui>
<menubar name="MenuBar">
<menu name="ToolsMenu" action="Tools">
<placeholder name="ToolsOps_2">
<menuitem name="Change Indent" action="ChangeIndentPlugin"/>
</placeholder>
</menu>
</menubar>
</ui>'''
class ChangeIndentPlugin(GObject.Object, Gedit.WindowActivatable, PeasGtk.Configurable):
__gtype_name__ = 'ChangeIndentPlugin'
window = GObject.property(type=Gedit.Window)
# config
config = ConfigParser.ConfigParser()
config_file = 'indent.cfg'
spaces = 2
tab = False
def __init__(self):
GObject.Object.__init__(self)
self._get_config()
def _add_ui(self):
manager = self.window.get_ui_manager()
self._actions = Gtk.ActionGroup('ChangeIndentActions')
self._actions.add_actions([
(
'ChangeIndentPlugin',
Gtk.STOCK_INFO,
'Change Indent',
'<control><alt>i',
'Change indent in current document',
self.on_change_indent
),
])
manager.insert_action_group(self._actions)
self._ui_merge_id = manager.add_ui_from_string(UI_XML)
manager.ensure_update()
def do_activate(self):
self._add_ui()
def do_deactivate(self):
self._remove_ui()
def do_update_state(self):
pass
def do_create_configure_widget(self):
box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=10)
box.set_border_width(20)
label = Gtk.Label('Change Indent Configuration (Tab to spaces).')
box.pack_start(label, False, False, 0)
section = Gtk.Box(spacing=10)
label = Gtk.Label('Spaces')
section.pack_start(label, False, False, 0)
adjustment = Gtk.Adjustment(self.spaces, 2, 10, 1, 10, 0)
spinbutton = Gtk.SpinButton()
spinbutton.set_adjustment(adjustment)
spinbutton.connect("value-changed", self.on_spaces_value_changed)
section.pack_start(spinbutton, False, False, 0)
change_tab = Gtk.CheckButton("Spaces to Tab")
change_tab.connect("toggled", self.on_change_tab_toggled)
change_tab.set_active(True if self.tab == 1 else False)
section.pack_start(change_tab, False, False, 0)
box.pack_start(section, False, False, 0)
return box
def on_spaces_value_changed(self, button):
self.spaces = int(button.get_value())
self._save_config()
def on_change_tab_toggled(self, button):
self.tab = button.get_active()
self._save_config()
def on_change_indent(self, action, data=None):
self._get_config()
doc = self.window.get_active_document()
text = ''
builded_spaces = ''
for i in range(self.spaces):
builded_spaces += ' '
if doc:
start, end = doc.get_bounds()
text = doc.get_text(start, end, False)
stripped_text = []
for line in text.split('\n'):
if self.tab:
stripped_text.append(line.replace(builded_spaces, '\t'))
else:
stripped_text.append(line.replace('\t', builded_spaces))
doc.set_text('\n'.join(stripped_text))
def _get_config(self):
self.config.read(self.config_file)
if self.config.has_option('settings', 'tab'):
self.tab = self.config.getint('settings', 'tab')
if self.config.has_option('settings', 'spaces'):
self.spaces = self.config.getint('settings', 'spaces')
def _save_config(self):
f = open(self.config_file, 'w')
if not self.config.has_section('settings'):
self.config.add_section('settings')
self.config.set('settings', 'tab', 1 if self.tab else 0)
self.config.set('settings', 'spaces', self.spaces)
self.config.write(f)
f.close()
def _remove_ui(self):
manager = self.window.get_ui_manager()
manager.remove_ui(self._ui_merge_id)
manager.remove_action_group(self._actions)
manager.ensure_update()
|
jongha/gedit-indent
|
src/gedit/plugins/indent.py
|
Python
|
mit
| 4,209
|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.controller import Controller
from netforce.template import render
from netforce.model import get_model
from netforce.database import get_connection, get_active_db # XXX: move this
from netforce.locale import set_active_locale, get_active_locale
from .cms_base import BaseController
def list_parent(obj, lst):
if obj.parent_id:
lst = list_parent(obj.parent_id, lst)
lst.append(obj)
return lst
def get_categs(condition):
print("get_categs")
res=get_model("product").read_group(["categ_id"],condition=condition)
categ_nums={}
for r in res:
categ_id=r["categ_id"][0] if r["categ_id"] else None
categ_nums.setdefault(categ_id,0)
categ_nums[categ_id]+=r["_count"]
res=get_model("product.categ").search_read([],["code","name","parent_id"])
categ_ids={}
for r in res:
categ_ids[r["id"]]=r
top_categs=[]
for r in res:
parent_id=r["parent_id"][0] if r["parent_id"] else None
if parent_id:
parent=categ_ids[parent_id]
parent.setdefault("sub_categories",[]).append(r)
else:
top_categs.append(r)
for categ_id,num in categ_nums.items():
if not categ_id:
continue
categ=categ_ids[categ_id]
categ["num_products"]=num
def _set_total_num(c):
for s in c.get("sub_categories",[]):
_set_total_num(s)
if c.get("num_products") is None:
c["num_products"]=0
for s in c.get("sub_categories",[]):
c["num_products"]+=s["num_products"]
for c in top_categs:
_set_total_num(c)
return top_categs
def get_brands(condition):
print("get_brands")
res=get_model("product").read_group(["brand_id"],condition=condition)
brand_nums={}
for r in res:
brand_id=r["brand_id"][0] if r["brand_id"] else None
brand_nums.setdefault(brand_id,0)
brand_nums[brand_id]+=r["_count"]
res=get_model("product.brand").search_read([],["code","name","parent_id"])
brand_ids={}
for r in res:
brand_ids[r["id"]]=r
top_brands=[]
for r in res:
parent_id=r["parent_id"][0] if r["parent_id"] else None
if parent_id:
parent=brand_ids[parent_id]
parent.setdefault("sub_brands",[]).append(r)
else:
top_brands.append(r)
for brand_id,num in brand_nums.items():
if not brand_id:
continue
brand=brand_ids[brand_id]
brand["num_products"]=num
def _set_total_num(c):
for s in c.get("sub_brands",[]):
_set_total_num(s)
if c.get("num_products") is None:
c["num_products"]=0
for s in c.get("sub_brands",[]):
c["num_products"]+=s["num_products"]
for c in top_brands:
_set_total_num(c)
return top_brands
def get_price_range(products, checked):
if not products: return []
price_min = int(min(map(lambda product: product.type in ["stock", "master"] and product.sale_price or 0, products)))
price_max = int(max(map(lambda product: product.type in ["stock", "master"] and product.sale_price or 0, products)))
i, price_range = 0, list(range(price_min, price_max , 500))
for r in price_range:
try:
data = (str(price_range[i]), str(price_range[i + 1] - 1))
except IndexError:
data = (str(price_range[i]), str(price_max))
print(checked, list(data), checked == list(data))
price_range[i] = {"value": "%s-%s" % data, "text": "%s - %s" % data, "checked": "checked" if checked == list(data) else ""}
i = i + 1
return price_range
def get_supps(products=None):
root_company = ["All Companies"]
if products:
suppliers = []
products = filter(lambda product: product and product.company_id and product.type != "service", products)
companies = map(lambda product: product.company_id, products)
for company in companies:
while company.parent_id and company.parent_id.name not in root_company: company = company.parent_id
if company.name not in [s.name for s in set(suppliers)]: suppliers.append(company)
return set(suppliers)
else:
return None
def get_events():
res = get_model("product.group").search([["code","=","events"]])
if res:
return get_model("product.group").search_browse([["parent_id","=",res[0]]])
else:
return None
def get_last_level(categ):
while(get_model("product.categ").search_browse([["parent_id","=",categ.id]])):
categ = get_model("product.categ").search_browse([["parent_id","=",categ.id]],order="code")[0]
if categ.parent_id:
return get_model("product.categ").search_browse([["parent_id","=",categ.parent_id.id]])
else:
return None
class Products(BaseController):
_path = "/ecom_products"
def get(self):
db = get_connection()
try:
ctx = self.context
categ_id=self.get_argument("categ_id",None)
if categ_id:
categ_id=int(categ_id)
categ_code=self.get_argument("categ_code",None)
if categ_code and not categ_id:
res=get_model("product.categ").search([["code","=",categ_code]])
if not res:
raise Exception("Product categ not found: '%s'"%categ_code)
categ_id = res[0]
brand_id=self.get_argument("brand_id",[])
supp_id=self.get_argument("supp_id",[])
if brand_id:
bids=brand_id.split(",")
brand_id = []
for bid in bids:
bid=int(bid)
brand_id.append(bid)
if supp_id:
bids=supp_id.split(",")
supp_id = []
for bid in bids:
bid=int(bid)
supp_id.append(bid)
ctx["brand_id"] = brand_id
ctx["supp_id"] = supp_id
price=self.get_argument("price",None)
sort_by=self.get_argument("sort_by",None)
cond = [["parent_id","=",None],["is_published","=",True]]
cond_filter_categ = cond[:]
cond_filter_brand = cond[:]
if categ_id:
cond.append(["categ_id","child_of",categ_id])
ctx["list_parent_categ"] = list_parent(get_model("product.categ").browse(categ_id), lst=[]) # XXX
cond_filter_brand.append(["categ_id","child_of",categ_id])
categ = get_model("product.categ").browse(categ_id)
categ_ctx = {
"name": categ.name, "image": categ.image if categ.sub_categories else None,
"last_level_categs": get_last_level(categ),
}
while categ.parent_id:
categ = categ.parent_id
cond_filter_categ.append(["categ_id","child_of",categ.id])
ctx["categ"] = categ_ctx
if brand_id:
cond.append(["brand_id","child_of",brand_id])
cond_filter_categ.append(["brand_id","child_of",brand_id])
if supp_id:
cond.append(["company_id","child_of",supp_id])
cond_filter_categ.append(["company_id","child_of",supp_id])
cond_filter_brand.append(["company_id","child_of",supp_id])
prices = ["0", "0"]
if price:
prices = price.split("-")
if len(prices) != 2:
raise Exception("Incorrect Price format")
if not prices[0].isdigit() or not prices[1].isdigit():
raise Exception("Min/Max prices is not digit")
cond.append(["sale_price",">=",prices[0]])
cond.append(["sale_price","<=",prices[1]])
cond_filter_categ.append(["sale_price",">=",prices[0]])
cond_filter_categ.append(["sale_price","<=",prices[1]])
cond_filter_brand.append(["sale_price",">=",prices[0]])
cond_filter_brand.append(["sale_price","<=",prices[1]])
website=ctx["website"]
browse_ctx={
"pricelist_id": website.sale_channel_id.pricelist_id.id if website.sale_channel_id else None,
"product_filter": cond,
}
user_id=self.get_cookie("user_id",None)
if user_id:
user_id=int(user_id)
user=get_model("base.user").browse(user_id)
contact = user.contact_id
if contact.sale_price_list_id.id:
browse_ctx["pricelist_id"] =contact.sale_price_list_id.id
products = get_model("product").search_browse(condition=cond,order=sort_by,context=browse_ctx)
cond_filter_supp = cond[:]
if supp_id: cond_filter_supp.remove(["company_id","child_of", supp_id])
ctx["products"] = products
ctx["categs"] = get_categs(cond_filter_categ)
ctx["brands"] = get_brands(cond_filter_brand)
ctx["suppliers"] = get_supps(get_model("product").search_browse(condition=cond_filter_supp, order=sort_by, context=browse_ctx))
ctx["events"] = get_events()
ctx["pricerange"] = get_price_range(get_model("product").search_browse([],context=browse_ctx), prices)
ctx["filter_product_groups"]=get_model("product.group").search_browse([["code","=","recommended"]],context=browse_ctx)[0]
data={
"categ_id": categ_id,
"brand_id": brand_id,
"price": price,
"sort_by": sort_by,
"supp_id": supp_id,
}
content = render("ecom_products", ctx, data=data)
ctx["content"] = content
html = render("cms_layout", ctx, data=data)
self.write(html)
db.commit()
except:
self.redirect("/cms_page_not_found")
import traceback
traceback.print_exc()
db.rollback()
Products.register()
|
anastue/netforce
|
netforce_ecom/netforce_ecom/controllers/ecom_products.py
|
Python
|
mit
| 11,270
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bidders', '0002_bidder_photo'),
]
operations = [
migrations.AddField(
model_name='bidder',
name='address',
field=models.CharField(max_length=200, null=True),
),
]
|
kecheon/yablist
|
bidders/migrations/0003_bidder_address.py
|
Python
|
mit
| 407
|
import os
import signal
import subprocess
class Camera(object):
# In order to run mjpg-streamer through Python, make sure
# mjpg-streamer-experimental is installed so the .so objects
# and mjpg-streamer are all on defualt PATH so we don't have
# to specify path (was getting a lot of errors resulting from
# files not being able to be found. Resolution must also be
# specified in "integerxinteger" and not by name.
# default layout for camera
def __init__(self, resolution='1280x720', framerate=30, device='/dev/video0', port=8080, brightness=16, contrast=32):
self.process = None
self.resolution = resolution
self.framerate = framerate
self.device = device
self.port = port
self.brightness = brightness
self.contrast = contrast
self.input = 'input_uvc.so -d {device}'.format(
device=self.device,
)
self.output = 'output_http.so -p {port} {web}'.format(
port=self.port,
web='-w /usr/local/www'
)
self.status = "killed"
# framerate shouldn't be changed: keep at 30, allows for a good image while
# reserving valuable processing power for other devices. Device is formatted as a
# string: /dev/videoNUM where NUM is the number for the order in which camera is
# plugged in, starting at 0. Port is the web port where you want to output image
# to: change as needed
# open video feed for an instance of Camera
def start(self):
self.process = subprocess.Popen(['mjpg_streamer', '-i', self.input, '-o', self.output])
if self.is_alive():
self.status = 'active'
# closes video feed for an instance of Camera: each instance of Camera must be killed
# using this method
def kill(self):
if self.is_alive():
self.process.kill()
self.status = 'killed'
def suspend(self):
os.kill(self.process.pid, signal.SIGSTOP)
self.status = 'suspended'
def unsuspend(self):
if self.status == 'suspended':
os.kill(self.process.pid, signal.SIGCONT)
self.status = 'active'
def is_alive(self):
if self.process is None:
return False
return (self.process.poll() is None)
def get_status(self):
if not self.is_alive():
self.status = 'killed'
return self.status
def set_status(self, status):
if status == 'active':
if self.status == 'suspended':
self.unsuspend()
elif self.status == 'killed':
self.start()
elif status == 'suspended':
if self.status == 'active':
self.suspend()
elif status == 'killed':
self.kill()
|
purduerov/X9-Core
|
rov/camera/camera.py
|
Python
|
mit
| 2,803
|
import _plotly_utils.basevalidators
class VisibleValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(self, plotly_name="visible", parent_name="heatmapgl", **kwargs):
super(VisibleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "info"),
values=kwargs.pop("values", [True, False, "legendonly"]),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/heatmapgl/_visible.py
|
Python
|
mit
| 517
|
from .user import User
from .phone_certificate import PhoneCertificate
|
yevgnenll/but
|
but/users/models/__init__.py
|
Python
|
mit
| 71
|
import logging
def initLogger():
# create logger
logger = logging.getLogger('cam')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
file_handler = logging.FileHandler('cam.log')
file_handler.setLevel(logging.DEBUG)
# create console handler with a higher log level
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(file_handler)
logger.addHandler(console_handler)
def getLogger():
logger = logging.getLogger('cam')
return logger
|
jucoba/IpCamMonitorSystem
|
src/utils/loggerConfig.py
|
Python
|
mit
| 772
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 2 10:56:34 2016
@author: jmjj (Jari Juopperi, jmjj@juopperi.org)
"""
from .main import *
|
jmjj/messages2json
|
messages2json/__init__.py
|
Python
|
mit
| 140
|
from util import app
import hashlib
import os
phase2_url = '/phase2-%s/' % os.environ.get('PHASE2_TOKEN')
admin_password = u'adminpass'
admin_hash = hashlib.sha1(admin_password.encode('utf-8')).hexdigest()
session_key = 'sessionkey'
admin_session_key = 'adminsessionkey'
def init_data(redis):
redis.set('user:test:password', hashlib.sha1(b'test').hexdigest())
redis.set('user:admin:password', admin_hash)
redis.set('user:test:1', 'Buy groceries')
redis.set('user:test:2', 'Clean the patio')
redis.set('user:test:3', 'Take over the world')
redis.rpush('items:test', 1, 2, 3)
redis.set('session:%s' % session_key, 'test')
redis.set('session:%s' % admin_session_key, 'admin')
return app
def test_home(app):
rv = app.get(phase2_url)
assert b'Sign In' in rv.data
assert rv.status_code == 200
def test_404(app):
rv = app.get(phase2_url + 'asdf')
assert rv.status_code == 404
def test_get_405(app):
rv = app.get(phase2_url + 'login/')
assert rv.status_code == 405
def test_403s(app):
"""These should return 403 instead of 404."""
for url in ('dashboard/', 'dashboard/test/1/', 'dashboard/abc/def/'):
rv = app.get(phase2_url + url)
assert rv.status_code == 403
rv = app.get(phase2_url + url, headers={'Cookie': 'session=asdf'})
assert rv.status_code == 403
def test_post_405(app):
"""Be sure this returns 405, instead of 404 or 403."""
for url in ('', 'dashboard/', 'dashboard/test/1/', 'dashboard/abc/def/'):
rv = app.post(phase2_url + url)
assert rv.status_code == 405
def test_bad_login(app):
url = phase2_url + 'login/'
init_data(app.application.redis)
rv = app.post(url)
assert 'dashboard' not in rv.headers.get('Location')
assert rv.status_code == 303
rv = app.post(url, data={'username': 'abcdef', 'password': 'abcdef'})
assert 'dashboard' not in rv.headers.get('Location')
assert rv.status_code == 303
rv = app.post(url, data={'username': 'test'})
assert 'dashboard' not in rv.headers.get('Location')
assert rv.status_code == 303
rv = app.post(url, data={'username': 'test', 'password': 'abcdef'})
assert 'dashboard' not in rv.headers.get('Location')
assert rv.status_code == 303
def test_good_login(app):
url = phase2_url + 'login/'
init_data(app.application.redis)
rv = app.post(url, data={'username': 'test', 'password': 'test'})
assert rv.status_code == 303
assert 'session=' in rv.headers.get('Set-Cookie')
assert 'dashboard' in rv.headers.get('Location')
rv = app.post(url, data={'username': 'admin', 'password': admin_password})
assert rv.status_code == 303
assert 'session=' in rv.headers.get('Set-Cookie')
assert 'dashboard' in rv.headers.get('Location')
def test_dashboard(app):
url = phase2_url + 'dashboard/'
init_data(app.application.redis)
rv = app.get(url, headers={'Cookie': 'session=%s' % session_key})
assert b'Buy groceries' in rv.data
assert b'Take over the world' in rv.data
assert rv.status_code == 200
def test_item_404(app):
url = phase2_url + 'dashboard/'
init_data(app.application.redis)
rv = app.get(url + 'abcdef/0/', headers={
'Cookie': 'session=%s' % session_key})
assert rv.status_code == 404
rv = app.get(url + 'test/0/', headers={
'Cookie': 'session=%s' % session_key})
assert rv.status_code == 404
rv = app.get(url + 'admin/1/', headers={
'Cookie': 'session=%s' % session_key})
assert rv.status_code == 404
def test_solution(app):
url = phase2_url + 'dashboard/admin/password/'
init_data(app.application.redis)
rv = app.get(url, headers={'Cookie': 'session=%s' % session_key})
assert admin_hash.encode('utf-8') in rv.data
assert rv.status_code == 200
def test_admin_dashboard(app):
url = phase2_url + 'dashboard/'
init_data(app.application.redis)
rv = app.get(url, headers={'Cookie': 'session=%s' % admin_session_key})
assert b'Challenge complete!' in rv.data
assert rv.status_code == 200
|
nickfrostatx/polyrents-challenge
|
tests/test_phase2.py
|
Python
|
mit
| 4,200
|
import click
from do_cli.contexts import CTX
from do_cli.commands.common import host_commands
@click.command('list')
@click.option('-f', '--force-refresh', is_flag=True, help='Pull data from the API')
@click.option('-h', '--host-names', help='Comma separated list of host names')
@CTX
def cli(ctx, force_refresh, host_names):
"""
Show minimal data for droplets
--host-names -h Comma separated list of host names
Show minimal data for specific droplets
"""
if ctx.verbose:
click.echo("Show minimal data for droplets")
click.echo(host_commands(ctx, force_refresh, host_names))
if ctx.verbose:
click.echo('---- cmd_list done ----')
|
meganlkm/do-cli
|
do_cli/commands/cmd_list.py
|
Python
|
mit
| 689
|
class Request(object):
def __init__(self, value):
self.__value = value
def get_value(self):
return self.__value
|
danieldeveloper001/Learning
|
Python/02_DesignPatterns/Behavioral/Command/Request.py
|
Python
|
mit
| 137
|
# -*- uft-8 -*-
import matplotlib
class SinglePeak():
"""Creat a class as a Peak Container"""
def __init__(self,StandardPeak):
self.StandardPeak=StandardPeak
self.Indicator=False
self.RecordIntegrationTime=0
self.MaxCounts=0
self.PeakPixel=0
self.pixel=[]
self.CountsRange=[20000,50000]
def GetPixel(self,Wavelength):
# get pixel of the peak
Wavelength=list(Wavelength)
for i in range(0,len(Wavelength)-1):
if Wavelength[i]<self.StandardPeak-5:
a=i
if Wavelength[i]<self.StandardPeak+5:
b=i+1
self.pixel=[a,b]
self.Wavelength=Wavelength[a:b] # get wavelength of the peak
return self.pixel
def GetSingleSpectrum(self,Spectrum):
# get spectrum of the peak
Spectrum=list(Spectrum)
self.Spectrum=Spectrum[self.pixel[0]:self.pixel[1]]
return self.Spectrum
def GetMaxCounts(self):
# get max counts of the peak
for i in range(0,len(self.Spectrum)-1):
if self.MaxCounts<self.Spectrum[i]:
self.MaxCounts=self.Spectrum[i]
self.MaxCountsPixel=i+self.pixel[0]
return self.MaxCounts
def UpdateIndicator(self,CountsRange):
# if MaxCounts is between CountsRange[0] and CountsRange[1]
# then Indicator = True pass the check
self.CountsRange=CountsRange
if self.MaxCounts>self.CountsRange[0] and self.MaxCounts<self.CountsRange[1]:
self.Indicator=True
return self.Indicator
|
baojiwei/WavelengthCalibration
|
SinglePeak.py
|
Python
|
mit
| 1,344
|
from matplotlib import pylab as plt
(x,y,yEA) = in_object
print(in_object)
plt.plot(x,y/y.max(),label="Fully coherent")
plt.plot(x,yEA/yEA.max(),label="Partial coherent")
plt.xlabel("Z [um]")
plt.ylabel("Intensity [Arbitrary Units]")
plt.legend()
plt.show()
|
srio/ShadowOui-Tutorial
|
SCRIPTS/script26_plot.py
|
Python
|
mit
| 260
|
#!/usr/bin/env python
# -*- coding: utf-8 -8-
"""
By starting at the top of the triangle below and moving to adjacent numbers
on the row below, the maximum total from top to bottom is 23.
3
7 4
2 4 6
8 5 9 3
That is, 3 + 7 + 4 + 9 = 23.
Find the maximum total from top to bottom of the triangle below:
"""
triangle = """
75
95 64
17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
"""
def largest_triangle_sum(values, row=0, column=0, sums={}):
if (row, column) in sums:
return sums[row, column]
s = values[row][column]
if row + 1 < len(values):
left = largest_triangle_sum(values, row + 1, column, sums)
right = largest_triangle_sum(values, row + 1, column + 1, sums)
s += max([left, right])
sums[row, column] = s
return s
def main():
values = [[int(j) for j in i.split()] for i in triangle.split('\n') if i]
print(largest_triangle_sum(values))
if __name__ == '__main__':
main()
|
goldsborough/euler
|
18.py
|
Python
|
mit
| 1,380
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2014 l8orre
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from PyQt4 import Qt , QtCore # QtGui,
from PyQt4.QtCore import SIGNAL , QObject, pyqtSignal, pyqtSlot
# timer:
#import os
#import time
#from werkzeug.wrappers import Request, Response
#from werkzeug.serving import run_simple
#from jsonrpc import JSONRPCResponseManager, dispatcher
#from nxtPwt.nxtApiPrototypes import nxtQs
class Bridge1Ctrl(QObject):
""" Bridge Control - container and controller for useCase logic
"""
def __init__(self, app ):
super(QObject, self, ).__init__()
self.app = app
self.app.nxtBridge1 = self
#make this WinControl1 known in the app namespace. When this Win throws sigs, they can be recvd anywhere where this isknown.
#self.timer1 = Qt.QTimer()
#self.time1 = 10000
def init(self):
""" Here QT signals must be connected """
pass
#QObject.connect(self.timer1, SIGNAL("timeout()"), self.timer1_CB)
def timer1_CB(self):
pass #print("t1 CB!")
###########################
############################
############################
############################
########## Window Maintenance
def activate(self):
self.init()
self.app.sessMan.uc_bridge.mm.jsonServ_Slot()
def quitApp(self):
print("QUITTING - not!?!?!")
self.app.app.flush()
self.app.app.emit(SIGNAL("aboutToQuit()") )
self.app.app.exit(0)
|
l8orre/nxtBridge
|
nxtPwt/nxtBridgeCtrl.py
|
Python
|
mit
| 2,586
|
"""
Utility functions for atmospheric data wrangling / preparation.
- ndarrays
- netCDF files
- Lat-lon geophysical data
- Pressure level data and topography
"""
from __future__ import division
import numpy as np
import pandas as pd
import collections
import scipy.interpolate as interp
from mpl_toolkits import basemap
import xarray as xray
from xarray import Dataset
import time
from atmos.utils import print_if, disptime
import atmos.utils as utils
import atmos.xrhelper as xr
from atmos.constants import const as constants
# ======================================================================
# NDARRAYS AND XRAY.DATAARRAYS
# ======================================================================
# ----------------------------------------------------------------------
def biggify(small, big, tile=False):
"""Add dimensions or tile an array for broadcasting.
Parameters
----------
small : ndarray
Array which singleton dimensions will be added to. Its
dimensions must be a subset of big's dimensions.
big : ndarray
Array whose shape will be used to determine the shape of
the output.
tile : bool, optional
If True, tile the array along the additional dimensions.
If False, add singleton dimensions.
Returns
-------
biggified : ndarray
Array of data from small, with dimensions added
for any dimension that is in big but not in small.
"""
debug = False
dbig, dsmall = big.shape, small.shape
# Check that all of the dimensions of small are contained within big
check = [d in dbig or d == 1 for d in dsmall]
if not np.all(check):
msg = ('Dimensions of small ' + str(dsmall) +
' are not a subset of big ' + str(dbig))
raise ValueError(msg)
# Check that the dimensions appear in a compatible order
inds = list()
for d in dsmall:
try:
inds.append(dbig.index(d))
except ValueError:
inds.append(-1)
if not utils.non_decreasing(inds):
msg = ('Dimensions of small ' + str(dsmall) +
' are not in an order compatible with big ' + str(dbig))
raise ValueError(msg)
# Biggify the small array
biggified = small
ibig = big.ndim - 1
ismall = small.ndim - 1
n = -1
# First add singleton dimensions
while ismall >= 0 and ibig >= 0:
print_if('ibig %d, ismall %d, n %d' % (ibig, ismall, n), debug)
if dbig[ibig] == dsmall[ismall] or dsmall[ismall] == 1:
print_if(' Same %d' % dbig[ibig], debug)
ismall -= 1
else:
print_if(' Different. Big %d, small %d' %
(dbig[ibig], dsmall[ismall]), debug)
biggified = np.expand_dims(biggified, n)
n -= 1
ibig -= 1
# Expand with tiles if selected
if tile:
dims = list(biggified.shape)
# First add any additional singleton dimensions needed to make
# biggified of the same dimension as big\
for i in range(len(dims), len(dbig)):
dims.insert(0, 1)
# Tile the array
for i in range(-1, -1 - len(dims), -1):
if dims[i] == dbig[i]:
dims[i] = 1
else:
dims[i] = dbig[i]
biggified = np.tile(biggified, dims)
return biggified
# ----------------------------------------------------------------------
def collapse(arr, axis=-1):
"""Collapse singleton dimension (first or last) in an array.
Parameters
----------
arr : ndarray
Array to collapse.
axis : {0, -1}
Axis to collapse.
Returns
-------
output : ndarray
Array with singleton dimension at beginning or end removed.
"""
if axis not in [0, -1]:
raise ValueError('Invalid axis %d. Must be 0 or -1.' % axis)
dims = arr.shape
if dims[axis] > 1:
raise ValueError('Dimension %d of input array is not singleton.' % axis)
if axis == 0:
output = arr[0]
else:
output = arr[...,0]
return output
# ----------------------------------------------------------------------
def nantrapz(y, x=None, axis=-1):
"""
Integrate using the composite trapezoidal rule, ignoring NaNs
Integrate `ym` (`x`) along given axis, where `ym` is a masked
array of `y` with NaNs masked.
Parameters
----------
y : array_like
Input array to integrate.
x : array_like, optional
If `x` is None, then spacing between all `y` elements is `dx`.
axis : int, optional
Specify the axis.
Returns
-------
trapz : float
Definite integral as approximated by trapezoidal rule.
"""
ym = np.ma.masked_array(y, np.isnan(y))
trapz = np.trapz(ym, x, axis=axis)
# Convert from masked array back to regular ndarray
if isinstance(trapz, np.ma.masked_array):
trapz = trapz.filled(np.nan)
return trapz
# ----------------------------------------------------------------------
def rolling_mean(data, nroll, axis=-1, center=True, **kwargs):
"""Return the rolling mean along an axis.
Parameters
----------
data : ndarray or xray.DataArray
Input data.
nroll : int
Size of window for rolling mean.
axis : int, optional
Axis to compute along.
center : bool, optional
Align to center of window.
**kwargs : other keyword arguments
See pandas.rolling_mean.
Returns
-------
rolling : ndarray or DataArray
Rolling mean data.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
name, attrs, coords, dimnames = xr.meta(data)
vals = data.values.copy()
else:
vals = data
# Roll axis to end
vals = np.rollaxis(vals, axis, ndim)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
# Initialize output
rolling = np.ones(vals.shape, dtype=vals.dtype)
# Compute rolling mean, iterating over additional dimensions
dims = vals.shape[:-1]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
rolling[i,j,k,m] = pd.rolling_mean(vals[i,j,k,m], nroll,
center=center, **kwargs)
# Collapse any additional dimensions that were added
for i in range(ndim, rolling.ndim):
rolling = rolling[0]
# Roll axis back to its original position
rolling = np.rollaxis(rolling, -1, axis)
if isinstance(data, xray.DataArray):
rolling = xray.DataArray(rolling, name=name, coords=coords,
dims=dimnames, attrs=attrs)
return rolling
# ----------------------------------------------------------------------
def gradient(data, vec, axis=-1):
"""Compute gradient along an axis.
Parameters
----------
data : np.ndarray or xray.DataArray
Input data.
vec : 1-dimensional np.ndarray
Array of coordinates corresponding to axis of differentiation.
axis : int, optional
Axis to differentiate along.
Returns
-------
grad : np.ndarray or xray.DataArray
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
name, attrs, coords, dimnames = xr.meta(data)
vals = data.values.copy()
else:
vals = data
# Roll axis to end
vals = np.rollaxis(vals, axis, ndim)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
# Initialize output
grad = np.ones(vals.shape, dtype=vals.dtype)
# Compute gradient, iterating over additional dimensions
dvec = np.gradient(vec)
dims = vals.shape[:-1]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
grad[i,j,k,m] = np.gradient(vals[i,j,k,m], dvec)
# Collapse any additional dimensions that were added
for i in range(ndim, grad.ndim):
grad = grad[0]
# Roll axis back to its original position
grad = np.rollaxis(grad, -1, axis)
if isinstance(data, xray.DataArray):
grad = xray.DataArray(grad, coords=coords, dims=dimnames)
return grad
# ======================================================================
# UNIT CONVERSIONS
# ======================================================================
# ----------------------------------------------------------------------
def pres_units(units):
"""
Return a standardized name (hPa or Pa) for the input pressure units.
"""
hpa = ['mb', 'millibar', 'millibars', 'hpa', 'hectopascal', 'hectopascals']
pa = ['pascal', 'pascals', 'pa']
if units.lower() in hpa:
return 'hPa'
elif units.lower() in pa:
return 'Pa'
else:
raise ValueError('Unknown units ' + units)
# ----------------------------------------------------------------------
def pres_convert(pres, units_in, units_out):
"""Convert pressure array from units_in to units_out."""
if pres_units(units_in) == pres_units(units_out):
pres_out = pres
elif pres_units(units_in) == 'hPa' and pres_units(units_out) == 'Pa':
pres_out = pres * 100
elif pres_units(units_in) == 'Pa' and pres_units(units_out) == 'hPa':
pres_out = pres / 100
else:
raise ValueError('Problem with input/output units.')
return pres_out
# ----------------------------------------------------------------------
def precip_units(units):
"""
Return a standardized name for precip units.
"""
kgm2s = ['kg/m2/s', '(kg/m^2)/s', 'kg/m^2/s', 'kg m^-2 s^-1',
'kg/(m^2 s)', 'kg m-2 s-1']
mmday = ['mm/day', 'mm day^-1']
if units.lower() in kgm2s:
return 'kg m^-2 s^-1'
elif units.lower() in mmday:
return 'mm day^-1'
else:
raise ValueError('Unknown units ' + units)
# ----------------------------------------------------------------------
def precip_convert(precip, units_in, units_out):
"""Convert precipitation from units_in to units_out."""
if isinstance(precip, xray.DataArray):
name, attrs, coords, dims = xr.meta(precip)
attrs['units'] = units_out
i_DataArray = True
else:
i_DataArray = False
kgm2s = 'kg m^-2 s^-1'
mmday = 'mm day^-1'
# Convert between (kg/m^2)/s to mm/day
SCALE = 60 * 60 * 24
if precip_units(units_in) == precip_units(units_out):
precip_out = precip
elif precip_units(units_in) == kgm2s and precip_units(units_out) == mmday:
precip_out = precip * SCALE
elif precip_units(units_in) == mmday and precip_units(units_out) == kgm2s:
precip_out = precip / SCALE
else:
msg = "Don't know how to convert between %s and %s"
raise ValueError(msg % (units_in, units_out))
if i_DataArray:
precip_out = xray.DataArray(precip_out, name=name, dims=dims,
coords=coords, attrs=attrs)
return precip_out
# ======================================================================
# COORDINATES AND SUBSETS
# ======================================================================
# ----------------------------------------------------------------------
def get_coord(data, coord_name, return_type='values'):
"""Return values, name or dimension of coordinate in DataArray.
Parameters
----------
data : xray.DataArray
Data array to search for latitude coords.
coord_name : str
Coordinate to extract. Can be the exact ID of the variable or
a generic ID ('lat', 'lon', 'plev', 'time', 'day', 'year').
If a generic ID is provided then lists of common names for that ID
will be searched for a match.
return_type : {'values', 'name', 'dim'}, optional
'values' : Return an array of coordinate values.
'name' : Return the name of the coordinate.
'dim' : Return the dimension of the coordinate.
Returns
-------
output : ndarray, string or int
The generic coordinate names searched through are:
'lat' : ['lats', 'latitude', 'YDim','Y', 'y']
'lon' : ['long', 'lons', 'longitude', 'XDim', 'X', 'x']
'plev' : ['plevel', 'plevels', 'lev', 'level',
'levels', 'Height']
as well as capitalization options for coord_name (.upper(),
.lower(), .capitalize())
"""
def name_options(nm):
opts = {'lat' : ['lats', 'latitude', 'YDim','Y', 'y'],
'lon' : ['long', 'lons', 'longitude', 'XDim', 'X', 'x'],
'plev' : ['plevel', 'plevels', 'lev', 'level', 'levels',
'Height']}
nms = list(set([nm, nm.lower(), nm.upper(), nm.capitalize()]))
if opts.get(nm) is not None:
nms = list(nms) + opts[nm]
return nms
names = name_options(coord_name)
# Look in list of common coordinate names
if coord_name not in data.coords:
found = [i for i, s in enumerate(names) if s in data.coords]
if len(found) == 0:
raise ValueError("Can't find coordinate name in data coords %s" %
data.coords.keys())
if len(found) > 1:
raise ValueError('Conflicting possible coord names in coords %s'
% data.coords.keys())
else:
coord_name = names[found[0]]
if return_type == 'values':
output = data[coord_name].values.copy()
elif return_type == 'name':
output = coord_name
elif return_type == 'dim':
output = data.dims.index(coord_name)
else:
raise ValueError('Invalid return_type ' + return_type)
return output
# ----------------------------------------------------------------------
def subset(data, subset_dict, incl_lower=True, incl_upper=True, search=True,
copy=True, squeeze=False):
"""Extract a subset of a DataArray or Dataset along named dimensions.
Returns a DataArray or Dataset sub extracted from input data,
such that:
sub[dim_name] >= lower_or_list & sub[dim_name] <= upper,
OR sub[dim_name] == lower_or_list (if lower_or_list is a list)
for each dim_name in subset_dict.
This function calls atmos.xrhelper.subset with the additional
feature of calling the get_coord function to find common
dimension names (e.g. 'XDim' for latitude)
Parameters
----------
data : xray.DataArray or xray.Dataset
Data source for extraction.
subset_dict : dict of 2-tuples
Dimensions and subsets to extract. Each entry in subset_dict
is in the form {dim_name : (lower_or_list, upper)}, where:
- dim_name : string
Name of dimension to extract from. If dim_name is not in
data.dims, then the get_coord() function is used
to search for a similar dimension name (if search is True).
- lower_or_list : scalar or list of int or float
If scalar, then used as the lower bound for the subset range.
If list, then the subset matching the list will be extracted.
- upper : int, float, or None
Upper bound for subset range. If lower_or_list is a list,
then upper is ignored and should be set to None.
incl_lower, incl_upper : bool, optional
If True lower / upper bound is inclusive, with >= or <=.
If False, lower / upper bound is exclusive with > or <.
If lower_or_list is a list, then the whole list is included
and these parameters are ignored.
search : bool, optional
If True, call the get_coord function if dim_name is not found
in the dimension names of data.
copy : bool, optional
If True, return a copy of the data, otherwise a pointer.
squeeze : bool, optional
If True, squeeze any singleton dimensions out.
Returns
-------
sub : xray.DataArray or xray.Dataset
"""
if search:
nms = ['lat', 'lon', 'plev']
for dim_name in subset_dict:
if dim_name in nms and dim_name not in data.dims:
dim_name_new = get_coord(data, dim_name, 'name')
subset_dict[dim_name_new] = subset_dict.pop(dim_name)
return xr.subset(data, subset_dict, incl_lower, incl_upper, copy, squeeze)
# ----------------------------------------------------------------------
def dim_mean(data, dimname, lower=None, upper=None, minfrac=0.5):
"""Return the mean of a DataArray along dimension, preserving attributes.
Parameters
----------
data : xray.DataArray or xray.Dataset
Data to average.
dimname : str
Dimension to average along. Can be a generic name (e.g. 'lon')
or exact ID (e.g. 'XDim').
lower, upper : float, optional
Lower and upper bounds (inclusive) of subset to extract along
the dimension before averaging.
minfrac : float, optional
Mininum fraction of non-missings required for non-NaN output.
Returns
-------
databar : xray.DataArray or xray.Dataset
"""
def one_variable(var, dimname, dimvals, minfrac):
try:
axis = get_coord(var, dimname, 'dim')
except ValueError:
# Dimension isn't in the data variable
return var
attrs = var.attrs
attrs['avg_over_' + dimname] = dimvals
attrs['minfrac'] = minfrac
# Create mask for any point where more than minfrac fraction is missing
missings = np.isnan(var)
missings = missings.sum(dim=dimname)
min_num = var.shape[axis] * minfrac
mask = missings > min_num
# Compute mean and apply mask
var = var.mean(dim=dimname)
name, _, coords, dims = xr.meta(var)
vals = np.ma.masked_array(var.values, mask).filled(np.nan)
var_out = xray.DataArray(vals, name=name, attrs=attrs, dims=dims,
coords=coords)
return var_out
if dimname not in data.dims:
try:
dimname = get_coord(data, dimname, 'name')
except ValueError:
# Dimension isn't in the data variable
return data
if lower is not None:
data = subset(data, {dimname : (lower, upper)}, copy=False)
dimvals = get_coord(data, coord_name=dimname)
if isinstance(data, xray.DataArray):
databar = one_variable(data, dimname, dimvals, minfrac)
elif isinstance(data, xray.Dataset):
databar = xray.Dataset()
databar.attrs = data.attrs
for nm in data.data_vars:
databar[nm] = one_variable(data[nm], dimname, dimvals, minfrac)
else:
raise ValueError('Input data must be xray.DataArray or xray.Dataset')
return databar
# ======================================================================
# NETCDF FILE I/O
# ======================================================================
# ----------------------------------------------------------------------
def ncdisp(filename, verbose=True, decode_cf=False, indent=2, width=None):
"""Display the attributes of data in a netcdf file."""
with xray.open_dataset(filename, decode_cf=decode_cf) as ds:
if verbose:
xr.ds_print(ds, indent, width)
else:
print(ds)
# ----------------------------------------------------------------------
def ncload(filename, verbose=True, unpack=True, missing_name=u'missing_value',
offset_name=u'add_offset', scale_name=u'scale_factor',
decode_cf=False):
"""
Read data from netcdf file into xray dataset.
If options are selected, unpacks from compressed form and/or replaces
missing values with NaN. Returns data as an xray.Dataset object.
"""
with xray.open_dataset(filename, decode_cf=decode_cf) as ds:
print_if('****** Reading file: ' + filename + '********', verbose)
print_if(ds, verbose, printfunc=xr.ds_print)
if unpack:
print_if('****** Unpacking data *********', verbose)
ds = xr.ds_unpack(ds, verbose=verbose, missing_name=missing_name,
offset_name=offset_name, scale_name=scale_name)
# Use the load() function so that the dataset is available after
# the file is closed
ds.load()
return ds
# ----------------------------------------------------------------------
def load_concat(paths, var_ids=None, concat_dim='TIME', subset_dict=None,
func=None, func_args=None, func_kw=None, squeeze=True, verbose=True):
"""Load a variable from multiple files and concatenate into one.
Especially useful for extracting variables split among multiple
OpenDAP files.
Parameters
----------
paths : list of strings
List of file paths or OpenDAP urls to process.
var_ids : str or list of str, optional
Name(s) of variable(s) to extract. If None then all variables
are extracted and a Dataset is returned.
concat_dim : str
Name of dimension to concatenate along. If this dimension
doesn't exist in the input data, a new one is created.
subset_dict : dict of 2-tuples, optional
Dimensions and subsets to extract. Each entry in subset_dict
is in the form {dim_name : (lower_or_list, upper)}, where:
- dim_name : string
Name of dimension to extract from.
The dimension name can be the actual dimension name
(e.g. 'XDim') or a generic name (e.g. 'lon') and get_coord()
is called to find the specific name.
- lower_or_list : scalar or list of int or float
If scalar, then used as the lower bound for the subset range.
If list, then the subset matching the list will be extracted.
- upper : int, float, or None
Upper bound for subset range. If lower_or_list is a list,
then upper is ignored and should be set to None.
func : function, optional
Function to apply to each variable in each file before concatenating.
e.g. compute zonal mean. Takes one DataArray as first input parameter.
func_args : list, optional
List of numbered arguments to pass to func.
func_kw : dict or list of dict, optional
Dict of keyword arguments to pass to func. To use different values for
different files, make func_kw a list of the same length as the list of
file paths, with func_kw[i] containing a dict of keyword args for
path[i]. Otherwise, make func_kw a single dict to use for all paths.
squeeze : bool, optional
If True, squeeze out extra dimensions and add info to attributes.
verbose : bool, optional
If True, print updates while processing files.
Returns:
--------
data : xray.DataArray or xray.Dataset
Data extracted from input files.
"""
# Number of times to attempt opening file (in case of server problems)
NMAX = 3
# Wait time (seconds) between attempts
WAIT = 5
if var_ids is not None:
var_ids = utils.makelist(var_ids)
def get_data(path, var_ids, subset_dict, func, func_args, func_kw):
with xray.open_dataset(path) as ds:
if var_ids is None:
# All variables
data = ds
else:
# Extract specific variables
data = ds[var_ids]
if subset_dict is not None:
data = subset(data, subset_dict, copy=False)
if func is not None:
data_out = xray.Dataset()
if func_args is None:
func_args = []
if func_kw is None:
func_kw = {}
for nm in data.data_vars:
vars_out = func(data[nm], *func_args, **func_kw)
if not isinstance(vars_out, xray.Dataset):
vars_out = vars_out.to_dataset()
for nm2 in vars_out.data_vars:
data_out[nm2] = vars_out[nm2]
data = data_out
data.load()
return data
pieces = []
func_kw = utils.makelist(func_kw)
paths = utils.makelist(paths)
if len(func_kw) == 1:
func_kw *= len(paths)
for p, kw in zip(paths, func_kw):
print_if(None, verbose, printfunc=disptime)
print_if('Loading ' + p, verbose)
attempt = 0
while attempt < NMAX:
try:
piece = get_data(p, var_ids, subset_dict, func, func_args, kw)
print_if('Appending data', verbose)
pieces.append(piece)
attempt = NMAX
except RuntimeError as err:
attempt += 1
if attempt < NMAX:
print('Error reading file. Attempting again in %d s' %
WAIT)
time.sleep(WAIT)
else:
raise err
print_if('Concatenating data', verbose)
data = xray.concat(pieces, dim=concat_dim)
print_if(None, verbose, printfunc=disptime)
if squeeze:
data = xr.squeeze(data)
if len(data.data_vars) == 1:
# Convert from Dataset to DataArray for output
data = data[data.data_vars.keys()[0]]
return data
# ----------------------------------------------------------------------
def save_nc(filename, *args):
"""Save xray.DataArray variables to a netcdf file.
Call Signatures
---------------
save_nc(filename, var1)
save_nc(filename, var1, var2)
save_nc(filename, var1, var2, var3)
etc...
Parameters
----------
filename : string
File path for saving.
var1, var2, ... : xray.DataArrays
List of xray.DataArrays with compatible coordinates.
"""
ds = xr.vars_to_dataset(*args)
ds.to_netcdf(filename)
return None
# ----------------------------------------------------------------------
def mean_over_files(files, nms=None):
"""Return data averaged over all input files.
Parameters
----------
files : list of str
Names of files to average over, e.g. yearly files.
nms : list of str, optional
Subset of data variables to include. If None, then all data
variables are included.
Returns
-------
ds_out : xray.Dataset
Dataset of variables averaged over all the input files.
"""
# Initialize with first file
print('Reading ' + files[0])
with xray.open_dataset(files[0]) as ds:
if nms is None:
nms = ds.data_vars.keys()
ds_out = ds[nms].load()
# Sum the variables from each subsequent file
for i, filenm in enumerate(files[1:]):
print('Reading ' + filenm)
with xray.open_dataset(filenm) as ds:
ds_out = ds_out + ds[nms]
ds_out.load()
# Divide by number of files for mean
ds_out = ds_out / float(len(files))
return ds_out
# ======================================================================
# LAT-LON GEOPHYSICAL DATA
# ======================================================================
# ----------------------------------------------------------------------
def latlon_equal(data1, data2, latname1=None, lonname1=None,
latname2=None, lonname2=None):
"""Return True if input DataArrays have the same lat-lon coordinates."""
lat1 = get_coord(data1, 'lat', coord_name=latname1)
lon1 = get_coord(data1, 'lon', coord_name=lonname1)
lat2 = get_coord(data2, 'lat', coord_name=latname2)
lon2 = get_coord(data2, 'lon', coord_name=lonname2)
is_equal = np.array_equal(lat1, lat2) and np.array_equal(lon1, lon2)
return is_equal
# ----------------------------------------------------------------------
def lon_convention(lon):
"""Return 360 if longitudes are 0-360E, 180 if 180W-180E.
The output of this function can be used in the set_lon() function
to make two data arrays use a consistent longitude convention.
"""
if lon.min() < 0:
return 180
else:
return 360
# ----------------------------------------------------------------------
def set_lon(data, lonmax=360, lon=None, lonname=None):
"""Set data longitudes to 0-360E or 180W-180E convention.
Parameters
----------
data : ndarray or xray.DataArray
Input data array with longitude as the last dimension
lonmax : int, optional
Maximum longitude for output data. Set to 360 for 0-360E,
or set to 180 for 180W-180E.
lon : 1-D ndarray or list, optional
Longitudes of input data. Only used if data is an ndarray.
If data is an xray.DataArray, then lon = data['lon']
lonname : string, optional
Name of longitude coordinate in data, if data is a DataArray
Returns
-------
If argument data is an ndarray:
data_out, lon_out : ndarray
The data and longitude arrays shifted to the selected
convention.
If argument data is an xray.DataArray:
data_out : xray.DataArray
DataArray object with data and longitude values shifted to
the selected convention.
"""
if isinstance(data, xray.DataArray):
lon = get_coord(data, 'lon')
if lonname is None:
lonname = get_coord(data, 'lon', 'name')
name, attrs, coords, dims_list = xr.meta(data)
vals = data.values
else:
vals = data
lonmin = lonmax - 360
if lonmin >= lon.min() and lonmin <= lon.max():
lon0 = lonmin
start = True
else:
lon0 = lonmax
start = False
vals_out, lon_out = basemap.shiftgrid(lon0, vals, lon, start=start)
if isinstance(data, xray.DataArray):
coords[lonname].values = lon_out
data_out = xray.DataArray(vals_out, name=name, dims=dims_list,
coords=coords, attrs=attrs)
return data_out
else:
return vals_out, lon_out
# ----------------------------------------------------------------------
def interp_latlon(data, lat_out, lon_out, lat_in=None, lon_in=None,
checkbounds=False, masked=False, order=1):
"""Interpolate data onto a new lat-lon grid.
Parameters
----------
data : ndarray or xray.DataArray
Data to interpolate, with latitude as second-last dimension,
longitude as last dimension. Maximum array dimensions: 5-D.
lat_out, lon_out : 1-D float or int array
Latitude and longitudes to interpolate onto.
lat_in, lon_in : 1-D float or int array, optional
Latitude and longitude arrays of input data. Only used if data
is an ndarray. If data is an xray.DataArray then
lat_in = data['lat'] and lon_in = data['lon']
checkbounds : bool, optional
If True, values of lat_out and lon_out are checked to see
that they lie within the range specified by lat_in, lon_in.
If False, and lat_out, lon_out are outside lat_in, lon_in,
interpolated values will be clipped to values on boundary
of input grid lat_in, lon_in
masked : bool or float, optional
If True, points outside the range of lat_in, lon_in are masked
(in a masked array).
If masked is set to a number, then points outside the range of
lat_in, lon_in will be set to that number.
order : int, optional
0 for nearest-neighbor interpolation,
1 for bilinear interpolation
3 for cublic spline (requires scipy.ndimage).
Returns
-------
data_out : ndarray or xray.DataArray
Data interpolated onto lat_out, lon_out grid
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
lat_in = get_coord(data, 'lat')
latname = get_coord(data, 'lat', 'name')
lon_in = get_coord(data, 'lon')
lonname = get_coord(data, 'lon', 'name')
name, attrs, coords, dims_list = xr.meta(data)
coords[latname] = xray.DataArray(lat_out, coords={latname : lat_out},
dims=[latname], attrs=data[latname].attrs)
coords[lonname] = xray.DataArray(lon_out, coords={lonname : lon_out},
dims=[lonname], attrs=data[lonname].attrs)
vals = data.values.copy()
else:
vals = data
# Check for the common case that lat_in and/or lat_out are decreasing
# and flip if necessary to work with basemap.interp()
flip = False
if utils.strictly_decreasing(lat_in):
lat_in = lat_in[::-1]
vals = vals[...,::-1, :]
if utils.strictly_decreasing(lat_out):
flip = True
lat_out = lat_out[::-1]
x_out, y_out = np.meshgrid(lon_out, lat_out)
# Initialize output array
dims = vals.shape
dims = dims[:-2]
vals_out = np.empty(dims + x_out.shape)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
vals_out = np.expand_dims(vals_out, axis=0)
# Interp onto new lat-lon grid, iterating over all other dimensions
dims = vals_out.shape[:-2]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
vals_out[i, j, k] = basemap.interp(
vals[i, j, k], lon_in, lat_in, x_out, y_out,
order=order, checkbounds=checkbounds, masked=masked)
# Collapse any additional dimensions that were added
for i in range(ndim, vals_out.ndim):
vals_out = vals_out[0]
if flip:
# Flip everything back to previous order
vals_out = vals_out[...,::-1, :]
lat_out = lat_out[::-1]
if isinstance(data, xray.DataArray):
data_out = xray.DataArray(vals_out, name=name, coords=coords,
dims=dims_list, attrs=attrs)
else:
data_out = vals_out
return data_out
# ----------------------------------------------------------------------
def mask_oceans(data, lat=None, lon=None, inlands=True, resolution='l',
grid=5):
"""Return the data with ocean grid points set to NaN.
Parameters
----------
data : ndarray or xray.DataArray
Data to mask, with latitude as second-last dimension,
longitude as last dimension. Maximum array dimensions: 5-D.
lat, lon : ndarray, optional
Latitude and longitude arrays. Only used if data is an
ndarray and not an xray.DataArray.
inlands : bool, optional
If False, mask only ocean points and not inland lakes.
resolution : {'c','l','i','h', 'f'}, optional
gshhs coastline resolution used to define land/sea mask.
grid : {1.25, 2.5, 5, 10}, optional
Land/sea mask grid spacing in minutes.
Returns
-------
data_out : ndarray or xray.DataArray
Data with ocean grid points set to NaN.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
lat = get_coord(data, 'lat')
lon = get_coord(data, 'lon')
name, attrs, coords, dims_list = xr.meta(data)
vals = data.values.copy()
else:
vals = data
# Convert to 180W-180E convention that basemap.maskoceans requires
lonmax = lon_convention(lon)
if lonmax == 360:
vals, lon = set_lon(vals, lonmax=180, lon=lon)
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
vals = np.expand_dims(vals, axis=0)
# Initialize output
vals_out = np.ones(vals.shape, dtype=float)
vals_out = np.ma.masked_array(vals_out, np.isnan(vals_out))
# Mask oceans, iterating over additional dimensions
x, y = np.meshgrid(lon, lat)
dims = vals_out.shape[:-2]
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
vals_out[i, j, k] = basemap.maskoceans(
x, y, vals[i, j, k], inlands=inlands,
resolution=resolution, grid=grid)
# Convert from masked array to regular array with NaNs
vals_out = vals_out.filled(np.nan)
# Collapse any additional dimensions that were added
for i in range(ndim, vals_out.ndim):
vals_out = vals_out[0]
# Convert back to original longitude convention
if lonmax == 360:
vals_out, lon = set_lon(vals_out, lonmax=lonmax, lon=lon)
if isinstance(data, xray.DataArray):
data_out = xray.DataArray(vals_out, name=name, coords=coords,
dims=dims_list, attrs=attrs)
else:
data_out = vals_out
return data_out
# ----------------------------------------------------------------------
def mean_over_geobox(data, lat1, lat2, lon1, lon2, lat=None, lon=None,
area_wtd=True, land_only=False):
"""Return the mean of an array over a lat-lon region.
Parameters
----------
data : ndarray or xray.DataArray
Data to average, with latitude as second-last dimension and
longitude as last dimension.
lat1, lat2, lon1, lon2 : float
Latitude and longitude limits for averaging region, with
lon1 <= lon2 and lat1 <= lat2.
lat, lon : ndarray, optional
Latitude and longitude arrays. Only used if data is an
ndarray and not an xray.DataArray.
area_wtd : bool, optional
Return the area-weighted average (weighted by cos(lat))
land_only : bool, optional
Mask out ocean grid points so that only data over land is
included in the mean.
Returns
-------
avg : ndarray or xray.DataArray
The data averaged over the lat-lon region.
"""
if not isinstance(data, xray.DataArray):
if lat is None or lon is None:
raise ValueError('Latitude and longitude arrays must be provided '
'if data is not an xray.DataArray.')
latname, lonname = 'lat', 'lon'
coords = xr.coords_init(data)
coords = xr.coords_assign(coords, -1, lonname, lon)
coords = xr.coords_assign(coords, -2, latname, lat)
data_out = xray.DataArray(data, coords=coords)
attrs = {}
else:
data_out = data
name, attrs, coords, _ = xr.meta(data)
latname = get_coord(data, 'lat', 'name')
lonname = get_coord(data, 'lon', 'name')
lon = get_coord(data, 'lon')
lat = get_coord(data, 'lat')
coords = utils.odict_delete(coords, latname)
coords = utils.odict_delete(coords, lonname)
attrs['description'] = 'Mean over lat-lon subset'
attrs['lon1'], attrs['lon2'] = lon1, lon2
attrs['lat1'], attrs['lat2'] = lat1, lat2
attrs['area_weighted'] = area_wtd
attrs['land_only'] = land_only
if land_only:
data_out = mask_oceans(data_out)
if lat1 == lat2:
if not lat1 in lat:
raise ValueError('lat1=lat2=%f not in latitude grid' % lat1)
if lon1 == lon2:
if not lon1 in lon:
raise ValueError('lon1=lon2=%f not in longitude grid' % lon1)
subset_dict = {latname : (lat1, lat2), lonname : (lon1, lon2)}
data_out = subset(data_out, subset_dict)
attrs['subset_lons'] = get_coord(data_out, 'lon')
attrs['subset_lats'] = get_coord(data_out, 'lat')
# Mean over longitudes
data_out = data_out.mean(axis=-1)
# Mean over latitudes
if lat1 == lat2:
# Eliminate singleton dimension
avg = data_out.mean(axis=-1)
avg.attrs = attrs
else:
# Array of latitudes with same NaN mask as the data so that the
# area calculation is correct
lat_rad = np.radians(get_coord(data_out, 'lat'))
lat_rad = biggify(lat_rad, data_out, tile=True)
mdat = np.ma.masked_array(data_out, np.isnan(data_out))
lat_rad = np.ma.masked_array(lat_rad, mdat.mask)
lat_rad = lat_rad.filled(np.nan)
if area_wtd:
# Weight by area with cos(lat)
coslat = np.cos(lat_rad)
data_out = data_out * coslat
area = nantrapz(coslat, lat_rad, axis=-1)
else:
area = nantrapz(np.ones(lat_rad.shape, dtype=float), lat_rad, axis=-1)
# Integrate with trapezoidal method
avg = nantrapz(data_out, lat_rad, axis=-1) / area
# Pack output into DataArray with the metadata that was lost in np.trapz
if isinstance(data, xray.DataArray) and not isinstance(avg, xray.DataArray):
avg = xray.DataArray(avg, name=name, coords=coords, attrs=attrs)
return avg
# ======================================================================
# PRESSURE LEVEL DATA AND TOPOGRAPHY
# ======================================================================
# ----------------------------------------------------------------------
def get_ps_clim(lat, lon, datafile='data/topo/ncep2_ps.nc'):
"""Return surface pressure climatology on selected lat-lon grid.
Parameters
----------
lat, lon : 1-D float array
Latitude and longitude grid to interpolate surface pressure
climatology onto.
datafile : string, optional
Name of file to read for surface pressure climatology.
Returns
-------
ps : xray.DataArray
DataArray of surface pressure climatology interpolated onto
lat-lon grid.
"""
ds = ncload(datafile)
ps = ds['ps']
ps.attrs = utils.odict_insert(ps.attrs, 'title', ds.attrs['title'], pos=0)
# Check what longitude convention is used in the surface pressure
# climatology and switch if necessary
lonmax = lon_convention(lon)
lon_ps = get_coord(ps, 'lon')
if lon_convention(lon_ps) != lonmax:
ps = set_lon(ps, lonmax)
# Interpolate ps onto lat-lon grid
ps = interp_latlon(ps, lat, lon)
return ps
# ----------------------------------------------------------------------
def correct_for_topography(data, topo_ps, plev=None, lat=None, lon=None):
"""Set pressure level data below topography to NaN.
Parameters
----------
data : ndarray or xray.DataArray
Data to correct, with pressure, latitude, longitude as the
last three dimensions.
topo_ps : ndarray or xray.DataArray
Climatological surface pressure to use for topography, on same
lat-lon grid as data.
plev, lat, lon : 1-D float array, optional
Pressure levels, latitudes and longitudes of input data.
Only used if data is an ndarray. If data is an xray.DataArray
then plev, lat and lon are extracted from data.coords.
Returns
-------
data_out : ndarray or xray.DataArray
Data with grid points below topography set to NaN.
"""
if isinstance(data, xray.DataArray):
lat = get_coord(data, 'lat')
lon = get_coord(data, 'lon')
name, attrs, coords, _ = xr.meta(data)
vals = data.values.copy()
# -- Pressure levels in Pascals
plev = get_coord(data, 'plev')
pname = get_coord(data, 'plev', 'name')
plev = pres_convert(plev, data[pname].units, 'Pa')
else:
vals = data
if isinstance(topo_ps, xray.DataArray):
if not latlon_equal(data, topo_ps):
msg = 'Inputs data and topo_ps are not on same latlon grid.'
raise ValueError(msg)
# Surface pressure values in Pascals:
ps_vals = topo_ps.values
ps_vals = pres_convert(ps_vals, topo_ps.units, 'Pa')
else:
ps_vals = topo_ps
# For each vertical level, set any point below topography to NaN
for k, p in enumerate(plev):
ibelow = ps_vals < p
vals[...,k,ibelow] = np.nan
if isinstance(data, xray.DataArray):
data_out = xray.DataArray(vals, name=name, coords=coords, attrs=attrs)
else:
data_out = vals
return data_out
# ----------------------------------------------------------------------
def near_surface(data, pdim=-3, return_inds=False):
"""Return the pressure-level data closest to surface.
At each grid point, the first non-NaN level is taken as the
near-surface level.
Parameters
----------
data : ndarray or xray.DataArray
Input data, maximum of 5 dimensions. Pressure levels must
be the last, second-last or third-last dimension.
pdim : {-3, -2, -1}, optional
Dimension of vertical levels in data.
return_inds : bool, optional
If True, return the pressure-level indices of the extracted
data in a tuple along with the near-surface data.
If False, return only the near-surface data.
Returns
-------
data_s[, ind_s] : ndarray or xray.DataArray[, ndarray]
Near-surface data [and indices of extracted data, if
return_inds is True]. If input data is an xray.DataArray,
data_s is returned as an xray.DataArray, otherwise as
an ndarray.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > nmax:
raise ValueError('Input data has too many dimensions. Max 5-D.')
# Save metadata for output DataArray, if applicable
if isinstance(data, xray.DataArray):
i_DataArray = True
data = data.copy()
name, attrs, coords, _ = xr.meta(data)
title = 'Near-surface data extracted from pressure level data'
attrs = utils.odict_insert(attrs, 'title', title, pos=0)
pname = get_coord(data, 'plev', 'name')
del(coords[pname])
else:
i_DataArray = False
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
data = np.expand_dims(data, axis=0)
# Make sure pdim is indexing from end
pdim_in = pdim
if pdim > 0:
pdim = pdim - nmax
# Iterate over all other dimensions
dims = list(data.shape)
dims.pop(pdim)
data_s = np.nan*np.ones(dims, dtype=float)
ind_s = np.ones(dims, dtype=int)
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
if pdim == -3:
sub = data[i,j,:,k,m]
elif pdim == -2:
sub = data[i,j,k,:,m]
elif pdim == -1:
sub = data[i,j,k,m,:]
else:
raise ValueError('Invalid p dimension ' + str(pdim_in))
ind = np.where(~np.isnan(sub))[0][0]
data_s[i,j,k,m] = sub[ind]
ind_s[i,j,k,m] = ind
# Collapse any additional dimensions that were added
for i in range(ndim - 1, data_s.ndim):
data_s = data_s[0]
ind_s = ind_s[0]
# Pack data_s into an xray.DataArray if input was in that form
if i_DataArray:
data_s = xray.DataArray(data_s, name=name, coords=coords, attrs=attrs)
# Return data only, or tuple of data plus array of indices extracted
if return_inds:
return data_s, ind_s
else:
return data_s
# ----------------------------------------------------------------------
def interp_plevels(data, plev_new, plev_in=None, pdim=-3, kind='linear'):
"""Return the data interpolated onto new pressure level grid.
Parameters
----------
data : ndarray or xray.DataArray
Input data, maximum of 5 dimensions. Pressure levels must
be the last, second-last or third-last dimension.
plev_new : ndarray
New pressure levels to interpolate onto.
plev_in : ndarray
Original pressure levels of data. If data is an xray.DataArray,
then the values from data.coords are used.
pdim : {-3, -2, -1}, optional
Dimension of vertical levels in data.
kind : string, optional
Type of interpolation, e.g. 'linear', 'cubic', 'nearest', etc.
See scipy.interpolate.interp1d for all options.
Returns
-------
data_i : ndarray or xray.DataArray
Interpolated data. If input data is an xray.DataArray,
data_i is returned as an xray.DataArray, otherwise as
an ndarray.
"""
# Maximum number of dimensions handled by this code
nmax = 5
ndim = data.ndim
if ndim > 5:
raise ValueError('Input data has too many dimensions. Max 5-D.')
if isinstance(data, xray.DataArray):
i_DataArray = True
data = data.copy()
name, attrs, coords, _ = xr.meta(data)
title = 'Pressure-level data interpolated onto new pressure grid'
attrs = utils.odict_insert(attrs, 'title', title, pos=0)
pname = get_coord(data, 'plev', 'name')
plev_in = get_coord(data, 'plev')
coords[pname] = xray.DataArray(plev_new, coords={pname : plev_new},
attrs=data.coords[pname].attrs)
else:
i_DataArray = False
# Make sure pressure units are consistent
if plev_new.min() < plev_in.min() or plev_new.max() > plev_in.max():
raise ValueError('Output pressure levels are not contained '
'within input pressure levels. Check units on each.')
# Add singleton dimensions for looping, if necessary
for i in range(ndim, nmax):
data = np.expand_dims(data, axis=0)
# Make sure pdim is indexing from end
pdim_in = pdim
if pdim > 0:
pdim = pdim - nmax
# Iterate over all other dimensions
dims = list(data.shape)
dims[pdim] = len(plev_new)
data_i = np.nan*np.ones(dims, dtype=float)
dims.pop(pdim)
for i in range(dims[0]):
for j in range(dims[1]):
for k in range(dims[2]):
for m in range(dims[3]):
if pdim == -3:
sub = data[i,j,:,k,m]
view = data_i[i,j,:,k,m]
elif pdim == -2:
sub = data[i,j,k,:,m]
view = data_i[i,j,k,:,m]
elif pdim == -1:
sub = data[i,j,k,m,:]
view = data_i[i,j,k,m,:]
else:
raise ValueError('Invalid p dimension ' + str(pdim_in))
vals_i = interp.interp1d(plev_in, sub, kind=kind)(plev_new)
view[:] = vals_i
# Collapse any additional dimensions that were added
for i in range(ndim, data_i.ndim):
data_i = data_i[0]
# Pack data_s into an xray.DataArray if input was in that form
if i_DataArray:
data_i = xray.DataArray(data_i, name=name, coords=coords,
attrs=attrs)
return data_i
# ----------------------------------------------------------------------
def int_pres(data, plev=None, pdim=-3, pmin=0, pmax=1e6):
"""Return the mass-weighted vertical integral of the data.
Parameters
----------
data : xray.DataArray or ndarray
Data to be integrated, on pressure levels.
plev : ndarray, optional
Vertical pressure levels in Pascals. Only used if data
is an ndarray. If data is a DataArray, plev is extracted
from data and converted to Pa if necessary.
pdim : int, optional
Dimension of vertical pressure levels in data.
pmin, pmax : float, optional
Lower and upper bounds (inclusive) of pressure levels (Pa)
to include in integration.
Returns
-------
data_int : xray.DataArray or ndarray
Mass-weighted vertical integral of data from pmin to pmax.
"""
if isinstance(data, xray.DataArray):
i_DataArray = True
data = data.copy()
name, _, coords, _ = xr.meta(data)
attrs = collections.OrderedDict()
title = 'Vertically integrated by dp/g'
attrs['title'] = title
if 'long_name' in data.attrs.keys():
attrs['long_name'] = data.attrs['long_name']
if 'units' in data.attrs.keys():
attrs['units'] = '(' + data.attrs['units'] + ') * kg'
pname = get_coord(data, 'plev', 'name')
del(coords[pname])
if plev is None:
# -- Make sure pressure levels are in Pa
plev = get_coord(data, 'plev')
plev = pres_convert(plev, data[pname].units, 'Pa')
data[pname].values = plev
else:
i_DataArray = False
# Pack into DataArray to easily extract pressure level subset
pname = 'plev'
coords = xr.coords_init(data)
coords = xr.coords_assign(coords, pdim, pname, plev)
data = xray.DataArray(data, coords=coords)
# Extract subset and integrate
data = subset(data, {pname : (pmin, pmax)})
vals_int = nantrapz(data.values, data[pname].values, axis=pdim)
vals_int /= constants.g.values
if utils.strictly_decreasing(plev):
vals_int = -vals_int
if i_DataArray:
data_int = xray.DataArray(vals_int, name=name, coords=coords,
attrs=attrs)
else:
data_int = vals_int
return data_int
# ======================================================================
# TIME
# ======================================================================
# ----------------------------------------------------------------------
def split_timedim(data, n, slowfast=True, timename=None, time0_name='time0',
time0_vals=None, time1_name='time1', time1_vals=None):
"""Split time dimension into two dimensions.
Parameters
----------
data : ndarray or xray.DataArray
Data array with time as the first dimension.
n : int
Number of periods per split (e.g. 12 for months).
slowfast : bool, optional
If True, then the slowest changing time index is first, e.g.
year, month. If False, then the fastest changing time index is
first, e.g. month, year.
timename : str, optional
Name of time dimension. Only used if data is a DataArray.
If omitted, the name is extracted from data with get_coord().
time0_name, time1_name : str, optional
Names for new time dimensions. Only used if data is a
DataArray.
time0_vals, time1_vals : ndarray, optional
Values for new time dimensions. Defaults to array of
integers. Only used if data is a DataArray.
Returns
-------
data_out : ndarray or xray.DataArray
Data array with the first dimension split into two. If dims
is the shape of the input data, and nt = dims[0], then:
- If slowfast=True: data_out.shape is [nt/n, n] + dims[1:]
- If slowfast=False: data_out.shape is [n, nt/n] + dims[1:]
"""
if isinstance(data, xray.DataArray):
i_DataArray = True
if timename is None:
timename = get_coord(data, 'time', 'name')
name, attrs, coords, dim_names = xr.meta(data)
dim_names = list(dim_names)
dim_names.remove(timename)
coords = utils.odict_delete(coords, timename)
data = data.values.copy()
else:
i_DataArray = False
dims = list(data.shape)
nt = dims[0]
nn = nt /n
data_out = np.reshape(data, [nn, n] + dims[1:])
if not slowfast:
data_out = np.swapaxes(data_out, 0, 1)
def time_coord(name, size, vals, coords):
if vals is None:
vals = np.arange(size)
time_arr = xray.DataArray(vals, coords={name : vals}, name=name)
return utils.odict_insert(coords, name, time_arr)
if i_DataArray:
coords = time_coord(time0_name, data_out.shape[0], time0_vals, coords)
coords = time_coord(time1_name, data_out.shape[1], time1_vals, coords)
dim_names = [time0_name, time1_name] + dim_names
data_out = xray.DataArray(data_out, name=name, dims=dim_names,
coords=coords, attrs=attrs)
return data_out
# ----------------------------------------------------------------------
def splitdays(days):
"""Return a list of each set of consecutive days within an array."""
daysets = []
consec = np.diff(days) == 1
while not consec.all():
isplit = consec.argmin() + 1
daysets.append(days[:isplit])
days = days[isplit:]
consec = np.diff(days) == 1
else:
daysets.append(days)
return daysets
# ----------------------------------------------------------------------
def daily_from_subdaily(data, n, method='mean', timename=None, dayname='day',
dayvals=None):
"""Return daily data from sub-daily data.
Parameters
----------
data : ndarray, xray.DataArray, or xray.Dataset
Data array (or set of data arrays) with time as the first dimension.
n : int
Number of values per day (e.g. n=8 for 3-hourly data).
method : {'mean'} or int, optional
Method for computing daily values from sub-daily values.
Default is the daily mean. If method is an integer in
range(n), then the daily value is the sub-sample at that
index (e.g. method=0 returns the first sub-daily value from
each day).
timename : str, optional
Name of time dimension in input. Only used if data is a DataArray.
If omitted, the name is extracted from data with get_coord().
dayname : str, optional
Name of time dimension in output. Only used if data is a DataArray.
dayvals : ndarray, optional
Values for time dimension in output, e.g. np.arange(1, 366).
Only used if data is a DataArray.
Returns
-------
data_out : ndarray or xray.DataArray
Daily values of data (mean or subsample).
"""
def process_one(data, n, method, timename, dayname, dayvals):
"""Process one data array."""
# Split the time dimension
data_out = split_timedim(data, n, slowfast=False, timename=timename,
time1_name=dayname, time1_vals=dayvals)
if isinstance(method, int):
if method in range(n):
data_out = data_out[method]
else:
msg = 'Subsample index %d exceeds valid range 0-%d.'
raise ValueError(msg % (method, n))
elif isinstance(method, str) and method.lower() == 'mean':
if isinstance(data, xray.DataArray):
_, attrs, _, _ = xr.meta(data)
data_out = data_out.mean(axis=0)
data_out.attrs = attrs
else:
data_out = np.nanmean(data_out, axis=0)
else:
raise ValueError('Invalid method ' + str(method))
return data_out
if isinstance(data, xray.Dataset):
data_out = xray.Dataset()
for nm in data.data_vars:
data_out[nm] = process_one(data[nm], n, method, timename, dayname,
dayvals)
else:
data_out = process_one(data, n, method, timename, dayname, dayvals)
return data_out
# ----------------------------------------------------------------------
def combine_daily_years(varnames, files, years, yearname='Year',
subset_dict=None):
"""Combine daily mean data from multiple files.
Parameters
----------
varnames : list of str
List of variables to extract. If None, then all variables
in the first file are used as varnames.
files : list of str
List of filenames to read. Each file should contain one year's
worth of daily data, with day of year as the first dimension
of each variable.
years : list of ints
List of years corresponding to each file.
yearname : str, optional
Name for year dimension in DataArrays.
subset_dict : dict of 2-tuples, optional
Dimensions and subsets to extract. Each entry in subset_dict
is in the form {dim_name : (lower_or_list, upper)}, where:
- dim_name : string
Name of dimension to extract from.
The dimension name can be the actual dimension name
(e.g. 'XDim') or a generic name (e.g. 'lon') and get_coord()
is called to find the specific name.
- lower_or_list : scalar or list of int or float
If scalar, then used as the lower bound for the subset range.
If list, then the subset matching the list will be extracted.
- upper : int, float, or None
Upper bound for subset range. If lower_or_list is a list,
then upper is ignored and should be set to None.
Returns
-------
data : xray.Dataset or xray.DataArray
Dataset with each variable as an array with year as the first
dimension, day of year as the second dimension. If a single
variable is selected, then the output is a DataArray rather
than a Dataset.
"""
# Read daily data from each year and concatenate
if varnames is None:
with xray.open_dataset(files[0]) as ds0:
varlist = ds0.data_vars.keys()
else:
varlist = utils.makelist(varnames)
ds = xray.Dataset()
for y, filn in enumerate(files):
print('Loading ' + filn)
ds1 = xray.Dataset()
with xray.open_dataset(filn) as ds_in:
if subset_dict is not None:
ds_in = subset(ds_in, subset_dict)
for nm in varlist:
var = ds_in[nm].load()
var.coords[yearname] = years[y]
ds1[nm] = var
if y == 0:
ds = ds1
dayname = ds1[varlist[0]].dims[0]
days = ds1[dayname].values
else:
days = np.union1d(days, ds1[dayname].values)
ds = ds.reindex(**{dayname : days})
ds1 = ds1.reindex(**{dayname : days})
ds = xray.concat([ds, ds1], dim=yearname)
# Collapse to single DataArray if only one variable, otherwise
# return Dataset
if len(varlist) == 1:
data = ds[varlist[0]]
else:
data = ds
return data
|
jenfly/atmos-tools
|
atmos/data.py
|
Python
|
mit
| 62,848
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import logging
from pymatgen.core import Molecule
from pymatgen.io.qchem_io.inputs import QCInput
from pymatgen.io.qchem_io.utils import lower_and_check_unique
# Classes for reading/manipulating/writing QChem ouput files.
__author__ = "Samuel Blau, Brandon Wood, Shyam Dwaraknath"
__copyright__ = "Copyright 2018, The Materials Project"
__version__ = "0.1"
logger = logging.getLogger(__name__)
class QChemDictSet(QCInput):
"""
Build a QCInput given all the various input parameters. Can be extended by standard implementations below.
"""
def __init__(self,
molecule,
job_type,
basis_set,
scf_algorithm,
dft_rung=4,
pcm_dielectric=None,
max_scf_cycles=200,
geom_opt_max_cycles=200,
overwrite_inputs=None):
"""
Args:
molecule (Pymatgen molecule object)
job_type (str)
basis_set (str)
scf_algorithm (str)
dft_rung (int)
pcm_dielectric (str)
max_scf_cycles (int)
geom_opt_max_cycles (int)
overwrite_inputs (dict): This is dictionary of QChem input sections to add or overwrite variables,
the available sections are currently rem, pcm, and solvent. So the accepted keys are rem, pcm, or solvent
and the value is a dictionary of key value pairs relevant to the section. An example would be adding a
new variable to the rem section that sets symmetry to false.
ex. overwrite_inputs = {"rem": {"symmetry": "false"}}
***It should be noted that if something like basis is added to the rem dict it will overwrite
the default basis.***
"""
self.molecule = molecule
self.job_type = job_type
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.dft_rung = dft_rung
self.pcm_dielectric = pcm_dielectric
self.max_scf_cycles = max_scf_cycles
self.geom_opt_max_cycles = geom_opt_max_cycles
self.overwrite_inputs = overwrite_inputs
pcm_defaults = {
"heavypoints": "194",
"hpoints": "194",
"radii": "uff",
"theory": "cpcm",
"vdwscale": "1.1"
}
mypcm = {}
mysolvent = {}
myrem = {}
myrem["job_type"] = job_type
myrem["basis"] = self.basis_set
myrem["max_scf_cycles"] = self.max_scf_cycles
myrem["gen_scfman"] = "true"
myrem["scf_algorithm"] = self.scf_algorithm
if self.dft_rung == 1:
myrem["exchange"] = "B3LYP"
elif self.dft_rung == 2:
myrem["method"] = "B97-D3"
myrem["dft_D"] = "D3_BJ"
elif self.dft_rung == 3:
myrem["method"] = "B97M-rV"
elif self.dft_rung == 4:
myrem["method"] = "wb97xd"
elif self.dft_rung == 5:
myrem["method"] = "wB97M-V"
else:
raise ValueError("dft_rung should be between 1 and 5!")
if self.job_type.lower() == "opt":
myrem["geom_opt_max_cycles"] = self.geom_opt_max_cycles
if self.pcm_dielectric != None:
mypcm = pcm_defaults
mysolvent["dielectric"] = self.pcm_dielectric
myrem["solvent_method"] = 'pcm'
if self.overwrite_inputs:
for sec, sec_dict in self.overwrite_inputs.items():
if sec == "rem":
temp_rem = lower_and_check_unique(sec_dict)
for k, v in temp_rem.items():
myrem[k] = v
if sec == "pcm":
temp_pcm = lower_and_check_unique(sec_dict)
for k, v in temp_pcm.items():
mypcm[k] = v
if sec == "solvent":
temp_solvent = lower_and_check_unique(sec_dict)
for k, v in temp_solvent.items():
mysolvent[k] = v
super(QChemDictSet, self).__init__(
self.molecule, rem=myrem, pcm=mypcm, solvent=mysolvent)
class OptSet(QChemDictSet):
"""
QChemDictSet for a geometry optimization
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
geom_opt_max_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
self.geom_opt_max_cycles = geom_opt_max_cycles
super(OptSet, self).__init__(
molecule=molecule,
job_type="opt",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
geom_opt_max_cycles=self.geom_opt_max_cycles,
overwrite_inputs=overwrite_inputs)
class SinglePointSet(QChemDictSet):
"""
QChemDictSet for a single point calculation
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
super(SinglePointSet, self).__init__(
molecule=molecule,
job_type="sp",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
overwrite_inputs=overwrite_inputs)
class FreqSet(QChemDictSet):
"""
QChemDictSet for a single point calculation
"""
def __init__(self,
molecule,
dft_rung=4,
basis_set="6-311++G*",
pcm_dielectric=None,
scf_algorithm="diis",
max_scf_cycles=200,
overwrite_inputs=None):
self.basis_set = basis_set
self.scf_algorithm = scf_algorithm
self.max_scf_cycles = max_scf_cycles
super(FreqSet, self).__init__(
molecule=molecule,
job_type="freq",
dft_rung=dft_rung,
pcm_dielectric=pcm_dielectric,
basis_set=self.basis_set,
scf_algorithm=self.scf_algorithm,
max_scf_cycles=self.max_scf_cycles,
overwrite_inputs=overwrite_inputs)
|
nisse3000/pymatgen
|
pymatgen/io/qchem_io/sets.py
|
Python
|
mit
| 7,003
|
from typing import Any
import pytest
from run_test_service_helper import start_service
def test_invalid_filename(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(SystemExit):
services, future = start_service("tests/services/no_service_existing.py", monkeypatch)
out, err = capsys.readouterr()
assert "Invalid service, no such service" in err
def test_invalid_service(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(NameError):
services, future = start_service("tests/services/invalid_service.py", monkeypatch)
out, err = capsys.readouterr()
assert "Unable to load service file" in err
def test_syntax_error_service(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(SyntaxError):
services, future = start_service("tests/services/syntax_error_service.py", monkeypatch)
out, err = capsys.readouterr()
assert "Unable to load service file" in err
def test_import_error(monkeypatch: Any, capsys: Any, loop: Any) -> None:
with pytest.raises(ImportError):
services, future = start_service("tests/services/import_error_service.py", monkeypatch)
out, err = capsys.readouterr()
assert "Invalid service, unable to load service file" in err
|
kalaspuff/tomodachi
|
tests/test_invalid_services.py
|
Python
|
mit
| 1,285
|
"""Test that arguments passed to a script Menu.main(loop=True) execute
properly."""
##==============================================================#
## SECTION: Imports #
##==============================================================#
from testlib import *
##==============================================================#
## SECTION: Global Definitions #
##==============================================================#
SCRIPT = "script_1.py"
##==============================================================#
## SECTION: Class Definitions #
##==============================================================#
class TestCase(unittest.TestCase):
def _cleanup(self):
rmfile("foo")
rmfile("bar")
rmfile("caz")
def setUp(self):
self._cleanup()
self.assertFalse(op.exists("foo"))
self.assertFalse(op.exists("bar"))
self.assertFalse(op.exists("caz"))
def tearDown(self):
self._cleanup()
def test_script_1(self):
result = os.system("python %s x" % SCRIPT)
self.assertEqual(0, result)
self.assertFalse(op.exists("foo"))
self.assertFalse(op.exists("bar"))
self.assertFalse(op.exists("caz"))
def test_script_2(self):
result = os.system("python %s f" % SCRIPT)
self.assertEqual(0, result)
self.assertTrue(op.exists("foo"))
self.assertFalse(op.exists("bar"))
self.assertFalse(op.exists("caz"))
def test_script_3(self):
result = os.system("python %s b" % SCRIPT)
self.assertEqual(0, result)
self.assertFalse(op.exists("foo"))
self.assertTrue(op.exists("bar"))
self.assertFalse(op.exists("caz"))
def test_script_4(self):
result = os.system("python %s f b" % SCRIPT)
self.assertEqual(0, result)
self.assertTrue(op.exists("foo"))
self.assertTrue(op.exists("bar"))
self.assertFalse(op.exists("caz"))
def test_script_5(self):
result = os.system("python %s c" % SCRIPT)
self.assertEqual(0, result)
self.assertFalse(op.exists("foo"))
self.assertFalse(op.exists("bar"))
self.assertTrue(op.exists("caz"))
def test_script_6(self):
result = os.system("python %s c f" % SCRIPT)
self.assertEqual(0, result)
self.assertTrue(op.exists("foo"))
self.assertFalse(op.exists("bar"))
self.assertTrue(op.exists("caz"))
def test_script_7(self):
result = os.system("python %s -d" % SCRIPT)
self.assertEqual(0, result)
self.assertFalse(op.exists("foo"))
self.assertTrue(op.exists("bar"))
self.assertFalse(op.exists("caz"))
##==============================================================#
## SECTION: Main Body #
##==============================================================#
if __name__ == '__main__':
unittest.main()
|
jeffrimko/Qprompt
|
tests/script_test_1.py
|
Python
|
mit
| 3,045
|
from app import db
from sqlalchemy import Column, String, Integer, ForeignKey
class VotingVariant(db.Model):
__tablename__ = 'voting_variants'
id = Column(Integer, primary_key=True)
voting_id = Column(Integer, ForeignKey('votings.id'))
title = Column(String(255))
description = Column(String(1000))
voting = db.relationship('Voting')
|
sokil/VotingEngine
|
models/voting_variant.py
|
Python
|
mit
| 361
|
# Copyright (c) 2020, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/spherical_functions/blob/master/LICENSE>
### NOTE: The functions in this file are intended purely for inclusion in the Grid class. In
### particular, they assume that the first argument, `self` is an instance of Grid. They should
### probably not be used outside of that class.
def modes(self, ell_max=None, **kwargs):
"""Return mode weights of function decomposed into SWSHs
This method uses `spinsfast` to convert values on an equiangular grid to mode weights.
The output array has one less dimension than this object; rather than the last two axes giving
the values on the two-dimensional grid, the last axis gives the mode weights.
Parameters
==========
ell_max: None or int [defaults to None]
Maximum ell value in the output. If None, the result will have enough ell values to express
the data on the grid without aliasing: (max(n_phi, n_theta) - 1) // 2.
**kwargs: any types
Additional keyword arguments are passed through to the Modes constructor on output
"""
import copy
import numpy as np
import spinsfast
from .. import Modes
ell_max = ell_max or (max(n_phi, n_theta) - 1) // 2
metadata = copy.copy
return Modes(spinsfast.map2salm(self.view(np.ndarray), self.s, ell_max),
spin_weight=self.s, ell_min=0, ell_max=ell_max, **metadata)
def _check_broadcasting(self, array, reverse=False):
"""Test whether or not the given array can broadcast against this object"""
import numpy as np
if isinstance(array, type(self)):
try:
if reverse:
np.broadcast(array, self)
else:
np.broadcast(self, array)
except ValueError:
return False
else:
return True
else:
if np.ndim(array) > np.ndim(self)-2:
raise ValueError(f"Cannot broadcast array of {np.ndim(array)} dimensions against {type(self).__name__} "
f"object of fewer ({np.ndim(self)-2}) non-grid dimensions.\n"
"This is to ensure that scalars do not operate on individual "
"grid values; they must operate on all simultaneously.\n"
"If that is the case and you still want to broadcast, add more "
"dimensions before this object's first dimension.")
try:
if reverse:
np.broadcast(array, self[..., 0, 0])
else:
np.broadcast(self[..., 0, 0], array)
except ValueError:
return False
else:
return True
|
moble/spherical_functions
|
spherical_functions/SWSH_grids/utilities.py
|
Python
|
mit
| 2,746
|
#!/usr/bin/env python
import os
import sys
sys.path.insert(0, os.pardir)
from testing_harness import TestHarness, PyAPITestHarness
import openmc
from openmc.stats import Box
from openmc.source import Source
class MultipoleTestHarness(PyAPITestHarness):
def _build_inputs(self):
####################
# Materials
####################
moderator = openmc.Material(material_id=1)
moderator.set_density('g/cc', 1.0)
moderator.add_nuclide('H1', 2.0)
moderator.add_nuclide('O16', 1.0)
moderator.add_s_alpha_beta('c_H_in_H2O')
dense_fuel = openmc.Material(material_id=2)
dense_fuel.set_density('g/cc', 4.5)
dense_fuel.add_nuclide('U235', 1.0)
mats_file = openmc.Materials([moderator, dense_fuel])
mats_file.export_to_xml()
####################
# Geometry
####################
c1 = openmc.Cell(cell_id=1, fill=moderator)
mod_univ = openmc.Universe(universe_id=1, cells=(c1,))
r0 = openmc.ZCylinder(R=0.3)
c11 = openmc.Cell(cell_id=11, fill=dense_fuel, region=-r0)
c11.temperature = [500, 0, 700, 800]
c12 = openmc.Cell(cell_id=12, fill=moderator, region=+r0)
fuel_univ = openmc.Universe(universe_id=11, cells=(c11, c12))
lat = openmc.RectLattice(lattice_id=101)
lat.dimension = [2, 2]
lat.lower_left = [-2.0, -2.0]
lat.pitch = [2.0, 2.0]
lat.universes = [[fuel_univ]*2]*2
lat.outer = mod_univ
x0 = openmc.XPlane(x0=-3.0)
x1 = openmc.XPlane(x0=3.0)
y0 = openmc.YPlane(y0=-3.0)
y1 = openmc.YPlane(y0=3.0)
for s in [x0, x1, y0, y1]:
s.boundary_type = 'reflective'
c101 = openmc.Cell(cell_id=101, fill=lat, region=+x0 & -x1 & +y0 & -y1)
root_univ = openmc.Universe(universe_id=0, cells=(c101,))
geometry = openmc.Geometry(root_univ)
geometry.export_to_xml()
####################
# Settings
####################
sets_file = openmc.Settings()
sets_file.batches = 5
sets_file.inactive = 0
sets_file.particles = 1000
sets_file.source = Source(space=Box([-1, -1, -1], [1, 1, 1]))
sets_file.output = {'summary': True}
sets_file.temperature = {'tolerance': 1000, 'multipole': True}
sets_file.export_to_xml()
####################
# Plots
####################
plots_file = openmc.Plots()
plot = openmc.Plot(plot_id=1)
plot.basis = 'xy'
plot.color_by = 'cell'
plot.filename = 'cellplot'
plot.origin = (0, 0, 0)
plot.width = (7, 7)
plot.pixels = (400, 400)
plots_file.append(plot)
plot = openmc.Plot(plot_id=2)
plot.basis = 'xy'
plot.color_by = 'material'
plot.filename = 'matplot'
plot.origin = (0, 0, 0)
plot.width = (7, 7)
plot.pixels = (400, 400)
plots_file.append(plot)
plots_file.export_to_xml()
def execute_test(self):
if not 'OPENMC_MULTIPOLE_LIBRARY' in os.environ:
raise RuntimeError("The 'OPENMC_MULTIPOLE_LIBRARY' environment "
"variable must be specified for this test.")
else:
super(MultipoleTestHarness, self).execute_test()
def _get_results(self):
outstr = super(MultipoleTestHarness, self)._get_results()
su = openmc.Summary('summary.h5')
outstr += str(su.geometry.get_all_cells()[11])
return outstr
def _cleanup(self):
f = os.path.join(os.getcwd(), 'plots.xml')
if os.path.exists(f):
os.remove(f)
super(MultipoleTestHarness, self)._cleanup()
if __name__ == '__main__':
harness = MultipoleTestHarness('statepoint.5.h5')
harness.main()
|
bhermanmit/openmc
|
tests/test_multipole/test_multipole.py
|
Python
|
mit
| 3,870
|
"""
Serialize data to/from JSON
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import, unicode_literals
import datetime
import decimal
import json
import sys
import uuid
from io import BytesIO
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import (
Deserializer as PythonDeserializer, Serializer as PythonSerializer,
)
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import six
from django.utils.timezone import is_aware
class Serializer(PythonSerializer):
"""
Convert a queryset to JSON.
"""
internal_use_only = False
def _init_options(self):
if json.__version__.split('.') >= ['2', '1', '3']:
# Use JS strings to represent Python Decimal instances (ticket #16850)
self.options.update({'use_decimal': False})
self._current = None
self.json_kwargs = self.options.copy()
self.json_kwargs.pop('stream', None)
self.json_kwargs.pop('fields', None)
def start_serialization(self):
self._init_options()
def end_serialization(self):
'''
Do nothing
'''
def end_object(self, obj):
# self._current has the field data
json.dump(self.get_dump_object(obj), self.stream,
cls=DjangoJSONEncoder, **self.json_kwargs)
self.stream.write('\n')
self._current = None
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
if isinstance(stream_or_string, (bytes, six.string_types)):
stream_or_string = BytesIO(stream_or_string)
try:
def line_generator():
for line in stream_or_string:
yield json.loads(line.strip())
for obj in PythonDeserializer(line_generator(), **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])
|
superisaac/django-mljson-serializer
|
django_mljson/serializer.py
|
Python
|
mit
| 2,206
|
#Caleb
#normalizes shapes in size and sets the lower left corner to 0
import math
def calc_dist(a,b):
ax=a[0]
ay=a[1]
bx=b[0]
by=b[1]
return math.sqrt((ax-bx)**2+(ay-by)**2)
def normalize(shapes):
"""Normalize shapes
>>> normalize([[(0,2.5),(2.5,2.5),(0,0),(2.5,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)]])
[[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)],[(0,1),(1,1),(0,0),(1,0)]]
"""
right=shapes[0]
left=shapes[1]
top=shapes[2]
back=shapes[3]
front=shapes[4]
bottom=shapes[5]
leftscale=calc_dist(front[0],front[2])/calc_dist(left[1],left[3])
topscale=calc_dist(front[0],front[1])/calc_dist(top[2],top[3])
rightscale=calc_dist(front[1],front[3])/calc_dist(right[0],right[2])
bottomscale=calc_dist(front[2],front[3])/calc_dist(bottom[0],bottom[1])
backscale=bottomscale*calc_dist(bottom[2],bottom[3])/calc_dist(back[0],back[1])
scaleFactors=[rightscale,leftscale,topscale,backscale,1,bottomscale]
#scale everything by a factor determined by adjacent sides
scaledShapes=[]
for shape in enumerate(shapes):
scaledShape=[]
for point in shape[1]:
newpoint=tuple([i * scaleFactors[shape[0]] for i in point])
scaledShape.append(newpoint)
scaledShapes.append(scaledShape)
#normalize to 0 (sets the bottom left corner to 0,0)
shiftedShapes=[]
for shape in scaledShapes:
x=shape[2][0]
y=shape[2][1]
newShape=[]
for point in shape:
newpoint=tuple([point[0]-x,point[1]-y])
newShape.append(newpoint)
shiftedShapes.append(newShape)
return shiftedShapes
if __name__ == "__main__":
import doctest
doctest.testmod()
|
ctada/napCAD
|
revisions/basic_cube/normalizer.py
|
Python
|
mit
| 1,763
|
# Copyright (c) 2013-2016 Christian Geier et al.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""A collection of (reusable) urwid widgets
Widgets that are specific to calendaring/khal should go into __init__.py or,
if they are large, into their own files
"""
from datetime import date, datetime, timedelta
import re
import urwid
class DateConversionError(Exception):
pass
def delete_last_word(text, number=1):
"""delete last `number` of words from text"""
words = re.findall(r"[\w]+|[^\w\s]", text, re.UNICODE)
for one in range(1, number + 1):
text = text.rstrip()
if text == '':
return text
text = text[:len(text) - len(words[-one])]
return text
def delete_till_beginning_of_line(text):
"""delete till beginning of line"""
if text.rfind("\n") == -1:
return ''
return text[0:text.rfind("\n") + 1]
def delete_till_end_of_line(text):
"""delete till beginning of line"""
if text.find("\n") == -1:
return ''
return text[text.find("\n"):]
def goto_beginning_of_line(text):
if text.rfind("\n") == -1:
return 0
return text.rfind("\n") + 1
def goto_end_of_line(text):
if text.find("\n") == -1:
return len(text)
return text.find("\n")
class ExtendedEdit(urwid.Edit):
"""A text editing widget supporting some more editing commands"""
def keypress(self, size, key):
if key == 'ctrl w':
self._delete_word()
elif key == 'ctrl u':
self._delete_till_beginning_of_line()
elif key == 'ctrl k':
self._delete_till_end_of_line()
elif key == 'ctrl a':
self._goto_beginning_of_line()
elif key == 'ctrl e':
self._goto_end_of_line()
else:
return super(ExtendedEdit, self).keypress(size, key)
def _delete_word(self):
"""delete word before cursor"""
text = self.get_edit_text()
f_text = delete_last_word(text[:self.edit_pos])
self.set_edit_text(f_text + text[self.edit_pos:])
self.set_edit_pos(len(f_text))
def _delete_till_beginning_of_line(self):
"""delete till start of line before cursor"""
text = self.get_edit_text()
f_text = delete_till_beginning_of_line(text[:self.edit_pos])
self.set_edit_text(f_text + text[self.edit_pos:])
self.set_edit_pos(len(f_text))
def _delete_till_end_of_line(self):
"""delete till end of line before cursor"""
text = self.get_edit_text()
f_text = delete_till_end_of_line(text[self.edit_pos:])
self.set_edit_text(text[:self.edit_pos] + f_text)
def _goto_beginning_of_line(self):
text = self.get_edit_text()
self.set_edit_pos(goto_beginning_of_line(text[:self.edit_pos]))
def _goto_end_of_line(self):
text = self.get_edit_text()
self.set_edit_pos(goto_end_of_line(text[self.edit_pos:]) + self.edit_pos)
class DateTimeWidget(ExtendedEdit):
def __init__(self, dateformat, on_date_change=lambda x: None, **kwargs):
self.dateformat = dateformat
self.on_date_change = on_date_change
super().__init__(wrap='any', **kwargs)
def keypress(self, size, key):
if key == 'ctrl x':
self.decrease()
return None
elif key == 'ctrl a':
self.increase()
return None
if (
key in ['up', 'down', 'tab', 'shift tab'] or
(key in ['right'] and self.edit_pos >= len(self.edit_text)) or
(key in ['left'] and self.edit_pos == 0)):
# when leaving the current Widget we check if currently
# entered value is valid and if so pass the new value
try:
new_date = self._get_current_value()
except DateConversionError:
pass
else:
self.on_date_change(new_date)
return super(DateTimeWidget, self).keypress(size, key)
def increase(self):
"""call to increase the datefield by self.timedelta"""
self._crease(self.dtype.__add__)
def decrease(self):
"""call to decrease the datefield by self.timedelta"""
self._crease(self.dtype.__sub__)
def _crease(self, fun):
"""common implementation for `self.increase` and `self.decrease`"""
try:
new_date = fun(self._get_current_value(), self.timedelta)
self.on_date_change(new_date)
self.set_edit_text(new_date.strftime(self.dateformat))
except DateConversionError:
pass
def set_value(self, new_date):
"""set a new value for this widget
:type new_date: datetime.date
"""
self.set_edit_text(new_date.strftime(self.dateformat))
class DateWidget(DateTimeWidget):
dtype = date
timedelta = timedelta(days=1)
def _get_current_value(self):
try:
new_date = datetime.strptime(self.get_edit_text(), self.dateformat).date()
except ValueError:
raise DateConversionError
else:
return new_date
class TimeWidget(DateTimeWidget):
dtype = datetime
timedelta = timedelta(minutes=15)
def _get_current_value(self):
try:
new_datetime = datetime.strptime(self.get_edit_text(), self.dateformat)
except ValueError:
raise DateConversionError
else:
return new_datetime
class Choice(urwid.PopUpLauncher):
def __init__(self, choices, active, decorate_func=None):
self.choices = choices
self._decorate = decorate_func or (lambda x: x)
self.active = self._original = active
def create_pop_up(self):
pop_up = ChoiceList(self)
urwid.connect_signal(pop_up, 'close',
lambda button: self.close_pop_up())
return pop_up
def get_pop_up_parameters(self):
return {'left': 0,
'top': 1,
'overlay_width': 32,
'overlay_height': len(self.choices)}
@property
def changed(self):
return self._active != self._original
@property
def active(self):
return self._active
@active.setter
def active(self, val):
self._active = val
self.button = urwid.Button(self._decorate(self._active))
urwid.PopUpLauncher.__init__(self, self.button)
urwid.connect_signal(self.button, 'click',
lambda button: self.open_pop_up())
class ChoiceList(urwid.WidgetWrap):
signals = ['close']
def __init__(self, parent):
self.parent = parent
buttons = []
for c in parent.choices:
buttons.append(
urwid.Button(parent._decorate(c),
on_press=self.set_choice, user_data=c)
)
pile = NPile(buttons, outermost=True)
fill = urwid.Filler(pile)
urwid.WidgetWrap.__init__(self, urwid.AttrMap(fill, 'popupbg'))
def set_choice(self, button, account):
self.parent.active = account
self._emit("close")
class SupportsNext(object):
"""classes inheriting from SupportsNext must implement the following methods:
_select_first_selectable
_select_last_selectable
"""
def __init__(self, *args, **kwargs):
self.outermost = kwargs.get('outermost', False)
if 'outermost' in kwargs:
kwargs.pop('outermost')
super(SupportsNext, self).__init__(*args, **kwargs)
class NextMixin(SupportsNext):
"""Implements SupportsNext for urwid.Pile and urwid.Columns"""
def _select_first_selectable(self):
"""select our first selectable item (recursivly if that item SupportsNext)"""
i = self._first_selectable()
self.set_focus(i)
if isinstance(self.contents[i][0], SupportsNext):
self.contents[i][0]._select_first_selectable()
def _select_last_selectable(self):
"""select our last selectable item (recursivly if that item SupportsNext)"""
i = self._last_selectable()
self.set_focus(i)
if isinstance(self._contents[i][0], SupportsNext):
self.contents[i][0]._select_last_selectable()
def _first_selectable(self):
"""return sequence number of self.contents last selectable item"""
for j in range(0, len(self._contents)):
if self._contents[j][0].selectable():
return j
return False
def _last_selectable(self):
"""return sequence number of self._contents last selectable item"""
for j in range(len(self._contents) - 1, - 1, - 1):
if self._contents[j][0].selectable():
return j
return False
def keypress(self, size, key):
key = super(NextMixin, self).keypress(size, key)
if key == 'tab':
if self.outermost and self.focus_position == self._last_selectable():
self._select_first_selectable()
else:
for i in range(self.focus_position + 1, len(self._contents)):
if self._contents[i][0].selectable():
self.set_focus(i)
if isinstance(self._contents[i][0], SupportsNext):
self._contents[i][0]._select_first_selectable()
break
else: # no break
return key
elif key == 'shift tab':
if self.outermost and self.focus_position == self._first_selectable():
self._select_last_selectable()
else:
for i in range(self.focus_position - 1, 0 - 1, -1):
if self._contents[i][0].selectable():
self.set_focus(i)
if isinstance(self._contents[i][0], SupportsNext):
self._contents[i][0]._select_last_selectable()
break
else: # no break
return key
else:
return key
class NPile(NextMixin, urwid.Pile):
pass
class NColumns(NextMixin, urwid.Columns):
pass
class NListBox(SupportsNext, urwid.ListBox):
def _select_first_selectable(self):
"""select our first selectable item (recursivly if that item SupportsNext)"""
i = self._first_selectable()
self.set_focus(i)
if isinstance(self.body[i], SupportsNext):
self.body[i]._select_first_selectable()
def _select_last_selectable(self):
"""select our last selectable item (recursivly if that item SupportsNext)"""
i = self._last_selectable()
self.set_focus(i)
if isinstance(self.body[i], SupportsNext):
self.body[i]._select_last_selectable()
def _first_selectable(self):
"""return sequence number of self._contents last selectable item"""
for j in range(0, len(self.body)):
if self.body[j].selectable():
return j
return False
def _last_selectable(self):
"""return sequence number of self.contents last selectable item"""
for j in range(len(self.body) - 1, - 1, - 1):
if self.body[j].selectable():
return j
return False
def keypress(self, size, key):
key = super().keypress(size, key)
if key == 'tab':
if self.outermost and self.focus_position == self._last_selectable():
self._select_first_selectable()
else:
self._keypress_down(size)
elif key == 'shift tab':
if self.outermost and self.focus_position == self._first_selectable():
self._select_last_selectable()
else:
self._keypress_up(size)
else:
return key
class ValidatedEdit(urwid.WidgetWrap):
def __init__(self, *args, EditWidget=ExtendedEdit, validate=False, **kwargs):
assert validate
self._validate_func = validate
self._original_widget = urwid.AttrMap(EditWidget(*args, **kwargs), 'edit', 'editf')
super().__init__(self._original_widget)
@property
def _get_base_widget(self):
return self._original_widget
@property
def base_widget(self):
return self._original_widget.original_widget
def _validate(self):
text = self.base_widget.get_edit_text()
if self._validate_func(text):
self._original_widget.set_attr_map({None: 'edit'})
self._original_widget.set_focus_map({None: 'edit'})
return True
else:
self._original_widget.set_attr_map({None: 'alert'})
self._original_widget.set_focus_map({None: 'alert'})
return False
def get_edit_text(self):
self._validate()
return self.base_widget.get_edit_text()
@property
def edit_pos(self):
return self.base_widget.edit_pos
@property
def edit_text(self):
return self.base_widget.edit_text
def keypress(self, size, key):
if (
key in ['up', 'down', 'tab', 'shift tab'] or
(key in ['right'] and self.edit_pos >= len(self.edit_text)) or
(key in ['left'] and self.edit_pos == 0)):
if not self._validate():
return
return super().keypress(size, key)
|
dzoep/khal
|
khal/ui/widgets.py
|
Python
|
mit
| 14,425
|
class RCInput():
CHANNEL_COUNT = 14
channels = []
def __init__(self):
for i in range(0, self.CHANNEL_COUNT):
try:
f = open("/sys/kernel/rcio/rcin/ch%d" % i, "r")
self.channels.append(f)
except:
print ("Can't open file /sys/kernel/rcio/rcin/ch%d" % i)
def read(self, ch):
value = self.channels[ch].read()
position = self.channels[ch].seek(0, 0)
return value[:-1]
|
adrienemery/auv-control-pi
|
navio/rcinput.py
|
Python
|
mit
| 484
|
from channels.routing import route
from .consumer import party_connected, party_disconnected, party_message
karaoke_routing = [
route("websocket.connect", party_connected, path=r"^/party/(?P<party_id>[a-zA-Z0-9_-]+)"),
route("websocket.receive", party_message),
route("websocket.disconnect", party_disconnected)
]
|
Katharine/ponytone
|
karaoke/routing.py
|
Python
|
mit
| 328
|
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
# Originally contributed by Check Point Software Technologies, Ltd.
import ConfigParser
class Config:
def __init__(self, cfg):
"""@param cfg: configuration file."""
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(cfg)
for section in config.sections():
for name, raw_value in config.items(section):
try:
value = config.getboolean(section, name)
except ValueError:
try:
value = config.getint(section, name)
except ValueError:
value = config.get(section, name)
setattr(self, name, value)
|
cuckoobox/cuckoo
|
cuckoo/data/analyzer/android/lib/core/config.py
|
Python
|
mit
| 881
|
# Generated by Django 3.0.5 on 2020-04-17 14:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import easy_thumbnails.fields
import userena.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=250)),
('primary_contact', models.ForeignKey(help_text='Contact for org.', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('name', 'primary_contact')},
},
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mugshot', easy_thumbnails.fields.ThumbnailerImageField(blank=True, help_text='A personal image displayed in your profile.', upload_to=userena.models.upload_to_mugshot, verbose_name='mugshot')),
('privacy', models.CharField(choices=[('open', 'Open'), ('registered', 'Registered'), ('closed', 'Closed')], default='registered', help_text='Designates who can view your profile.', max_length=15, verbose_name='privacy')),
('email', models.CharField(blank=True, max_length=250, null=True)),
('score', models.IntegerField(default=1)),
('last_activity', models.DateTimeField(auto_now_add=True)),
('openbadge_id', models.CharField(blank=True, max_length=250, null=True)),
('organization', models.ForeignKey(blank=True, help_text="If '------', no Organization records share the email domain.", null=True, on_delete=django.db.models.deletion.PROTECT, to='accounts.Organization')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user')),
],
options={
'permissions': (('view_profile', 'Can view profile'),),
'abstract': False,
'default_permissions': ('add', 'change', 'delete'),
},
),
migrations.CreateModel(
name='UserAuthorization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('authorized', models.BooleanField(help_text='Check this to approve member access.')),
('permission_granted_on', models.DateTimeField(auto_now_add=True)),
('user_accepted_terms_on', models.DateTimeField(blank=True, null=True)),
('permissions_granted_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='permissions_granted_by', to=settings.AUTH_USER_MODEL)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('user_profile', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='accounts.UserProfile')),
],
),
migrations.CreateModel(
name='EmailDomain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email_domain', models.CharField(max_length=50)),
('organization', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='accounts.Organization')),
],
),
]
|
ngageoint/geoq
|
geoq/accounts/migrations/0001_initial.py
|
Python
|
mit
| 3,881
|
from distutils.core import setup
setup(
name = 'moretext',
packages = ['moretext'],
version = '0.1',
description = 'Get dummy Chinese text (lorem ipsum) with Handlino serivce.',
author = 'Pomin Wu',
author_email = 'pomin5@gmail.com',
url = 'https://github.com/pm5/python-moretext',
download_url = 'https://github.com/pm5/python-moretext/tarball/v0.1',
keywords = ['test', 'lorem', 'ipsum', 'placeholder'],
classifiers = [],
)
|
pm5/python-moretext
|
setup.py
|
Python
|
mit
| 466
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class DictionaryOperations(object):
"""DictionaryOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get null dictionary value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get empty dictionary value {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_empty(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{str}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_null_value(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with null value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/nullvalue'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_null_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with null key.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/nullkey'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_empty_string_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with key as empty string.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/keyemptystring'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid Dictionary value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/invalid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_boolean_tfft(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": true, "1": false, "2": false, "3":
true }.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/tfft'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bool}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_boolean_tfft(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": true, "1": false, "2": false, "3":
true }.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/tfft'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{bool}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_boolean_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": true, "1": null, "2": false }.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/true.null.false'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bool}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_boolean_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value '{"0": true, "1": "boolean", "2": false}'.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/true.boolean.false'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bool}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_integer_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": -1, "2": 3, "3": 300}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_integer_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": 1, "1": -1, "2": 3, "3": 300}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{int}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_int_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": null, "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.null.zero'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_int_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": "integer", "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.integer.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_long_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": -1, "2": 3, "3": 300}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{long}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_long_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": 1, "1": -1, "2": 3, "3": 300}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{long}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_long_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get long dictionary value {"0": 1, "1": null, "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.null.zero'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{long}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_long_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get long dictionary value {"0": 1, "1": "integer", "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.integer.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{long}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_float_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_float_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{float}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_float_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0.0, "1": null, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/0.0-null-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_float_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": 1.0, "1": "number", "2": 0.0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/1.number.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_double_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_double_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{float}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_double_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0.0, "1": null, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/0.0-null-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_double_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": 1.0, "1": "number", "2": 0.0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/1.number.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_string_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get string dictionary value {"0": "foo1", "1": "foo2", "2": "foo3"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo1.foo2.foo3'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_string_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "foo1", "1": "foo2", "2": "foo3"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo1.foo2.foo3'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{str}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_string_with_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get string dictionary value {"0": "foo", "1": null, "2": "foo2"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo.null.foo2'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_string_with_invalid(
self, custom_headers=None, raw=False, **operation_config):
"""Get string dictionary value {"0": "foo", "1": 123, "2": "foo2"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo.123.foo2'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": "2000-12-01", "1": "1980-01-02",
"2": "1492-10-12"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{date}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_date_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "2000-12-01", "1": "1980-01-02", "2":
"1492-10-12"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{date}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_date_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2012-01-01", "1": null, "2":
"1776-07-04"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/invalidnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{date}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_invalid_chars(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2011-03-22", "1": "date"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/invalidchars'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{date}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_time_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get date-time dictionary value {"0": "2000-12-01t00:00:01z", "1":
"1980-01-02T00:11:35+01:00", "2": "1492-10-12T10:15:01-08:00"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{iso-8601}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_date_time_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "2000-12-01t00:00:01z", "1":
"1980-01-02T00:11:35+01:00", "2": "1492-10-12T10:15:01-08:00"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{iso-8601}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_date_time_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2000-12-01t00:00:01z", "1": null}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/invalidnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{iso-8601}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_time_invalid_chars(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2000-12-01t00:00:01z", "1":
"date-time"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/invalidchars'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{iso-8601}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_time_rfc1123_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get date-time-rfc1123 dictionary value {"0": "Fri, 01 Dec 2000
00:00:01 GMT", "1": "Wed, 02 Jan 1980 00:11:35 GMT", "2": "Wed, 12
Oct 1492 10:15:01 GMT"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time-rfc1123/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{rfc-1123}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_date_time_rfc1123_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": "Fri, 01 Dec 2000 00:00:01 GMT", "1":
"Wed, 02 Jan 1980 00:11:35 GMT", "2": "Wed, 12 Oct 1492 10:15:01
GMT"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time-rfc1123/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{rfc-1123}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_duration_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get duration dictionary value {"0": "P123DT22H14M12.011S", "1":
"P5DT1H0M0S"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/duration/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{duration}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_duration_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "P123DT22H14M12.011S", "1": "P5DT1H0M0S"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/duration/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{duration}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_byte_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get byte dictionary value {"0": hex(FF FF FF FA), "1": hex(01 02 03),
"2": hex (25, 29, 43)} with each item encoded in base64.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/byte/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bytearray}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_byte_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Put the dictionary value {"0": hex(FF FF FF FA), "1": hex(01 02 03),
"2": hex (25, 29, 43)} with each elementencoded in base 64.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/byte/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{bytearray}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_byte_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get byte dictionary value {"0": hex(FF FF FF FA), "1": null} with the
first item base64 encoded.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/byte/invalidnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bytearray}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_base64_url(
self, custom_headers=None, raw=False, **operation_config):
"""Get base64url dictionary value {"0": "a string that gets encoded with
base64url", "1": "test string", "2": "Lorem ipsum"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/base64url/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{base64}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type null value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get empty dictionary of complex type {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_item_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type with null item {"0": {"integer": 1,
"string": "2"}, "1": null, "2": {"integer": 5, "string": "6"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/itemnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_item_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type with empty item {"0": {"integer": 1,
"string": "2"}, "1:" {}, "2": {"integer": 5, "string": "6"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/itemempty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type with {"0": {"integer": 1, "string":
"2"}, "1": {"integer": 3, "string": "4"}, "2": {"integer": 5,
"string": "6"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_complex_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Put an dictionary of complex type with values {"0": {"integer": 1,
"string": "2"}, "1": {"integer": 3, "string": "4"}, "2": {"integer":
5, "string": "6"}}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{Widget}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_array_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get a null array.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an empty dictionary {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_item_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionary of array of strings {"0": ["1", "2", "3"], "1":
null, "2": ["7", "8", "9"]}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/itemnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_item_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an array of array of strings [{"0": ["1", "2", "3"], "1": [], "2":
["7", "8", "9"]}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/itemempty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get an array of array of strings {"0": ["1", "2", "3"], "1": ["4",
"5", "6"], "2": ["7", "8", "9"]}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_array_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Put An array of array of strings {"0": ["1", "2", "3"], "1": ["4",
"5", "6"], "2": ["7", "8", "9"]}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{[str]}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_dictionary_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries with value null.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_item_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": null, "2":
{"7": "seven", "8": "eight", "9": "nine"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/itemnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_item_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": {}, "2":
{"7": "seven", "8": "eight", "9": "nine"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/itemempty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": {"4":
"four", "5": "five", "6": "six"}, "2": {"7": "seven", "8": "eight",
"9": "nine"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_dictionary_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": {"4":
"four", "5": "five", "6": "six"}, "2": {"7": "seven", "8": "eight",
"9": "nine"}}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{{str}}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
|
fhoring/autorest
|
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDictionary/autorestswaggerbatdictionaryservice/operations/dictionary_operations.py
|
Python
|
mit
| 110,391
|
from django.contrib import admin
# Register your models here.
from Aplicacio.models import Movie, Character, Location, Team, Power
admin.site.register(Movie)
admin.site.register(Character)
admin.site.register(Location)
admin.site.register(Team)
admin.site.register(Power)
|
danic96/Practica1
|
Practica1/Aplicacio/admin.py
|
Python
|
mit
| 274
|
from decorator import decorator
from inspect import getargspec
# ------------------------------------------------------------------------
# decorators
# ------------------------------------------------------------------------
def lazy_property(func):
"""Decorator that makes a property lazy-evaluated.
"""
attr_name = '_lazy_' + func.__name__
@property
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, func(self))
return getattr(self, attr_name)
return _lazy_property
def verify(variables=None, categorical=None, text_keys=None, axis=None, is_str=None):
"""
Decorator to verify arguments.
"""
@decorator
def _var_in_ds(func, *args, **kwargs):
all_args = getargspec(func)[0]
ds = args[0]
for variable, collection in variables.items():
nested = False
if collection.endswith('_nested'):
nested = True
collection = collection.split('_')[0]
# get collection for argument
if collection == 'both':
collection = ['columns', 'masks']
else:
collection = [collection]
c = [key for col in collection for key in ds._meta[col].keys()]
# get the variable argument to check
v_index = all_args.index(variable)
var = kwargs.get(variable, args[v_index])
if var is None:
return func(*args, **kwargs)
if not isinstance(var, list):
var = [var]
if nested:
valid = []
for v in var:
if ' > ' in v:
valid.extend(v.replace(' ', '').split('>'))
else:
valid.append(v)
else:
valid = var
# check the variable
not_valid = [v for v in valid if not v in c + ['@']]
if not_valid:
msg = "'{}' argument for {}() must be in {}.\n"
msg += '{} is not in {}.'
msg = msg.format(variable, func.func_name, collection,
not_valid, collection)
raise KeyError(msg)
return func(*args, **kwargs)
@decorator
def _var_is_cat(func, *args, **kwargs):
all_args = getargspec(func)[0]
ds = args[0]
for cat in categorical:
# get the variable argument to check if it is categorical
v_index = all_args.index(cat)
var = kwargs.get(cat, args[v_index])
if var is None: return func(*args, **kwargs)
if not isinstance(var, list): var = [var]
valid = []
for v in var:
if ' > ' in v:
valid.extend(v.replace(' ', '').split('>'))
elif not '@' == v:
valid.append(v)
# check if varaibles are categorical
not_cat = [v for v in valid if not ds._has_categorical_data(v)]
if not_cat:
msg = "'{}' argument for {}() must reference categorical "
msg += 'variable.\n {} is not categorical.'
msg = msg.format(cat, func.func_name, not_cat)
raise ValueError(msg)
return func(*args, **kwargs)
@decorator
def _verify_text_key(func, *args, **kwargs):
all_args = getargspec(func)[0]
ds = args[0]
for text_key in text_keys:
# get the text_key argument to check
tk_index = all_args.index(text_key)
tks = kwargs.get(text_key, args[tk_index])
if tks is None: return func(*args, **kwargs)
if not isinstance(tks, list): tks = [tks]
# ckeck the text_key
valid_tks = ds.valid_tks
not_supported = [tk for tk in tks if not tk in valid_tks]
if not_supported:
msg = "{} is not a valid text_key! Supported are: \n {}"
raise ValueError(msg.format(not_supported, valid_tks))
return func(*args, **kwargs)
@decorator
def _verify_axis(func, *args, **kwargs):
# get the axis argument to check
all_args = getargspec(func)[0]
ax_index = all_args.index(axis)
a_edit = kwargs.get(axis, args[ax_index])
if a_edit is None: return func(*args, **kwargs)
if not isinstance(a_edit, list): a_edit = [a_edit]
# ckeck the axis
valid_ax = ['x', 'y']
not_supported = [ax for ax in a_edit if not ax in valid_ax]
if not_supported:
msg = "{} is not a valid axis! Supported are: {}"
raise ValueError(msg.format(not_supported, valid_ax))
return func(*args, **kwargs)
@decorator
def _is_str(func, *args, **kwargs):
all_args = getargspec(func)[0]
for val in is_str:
# get the arguments to modify
val_index = all_args.index(val)
v = kwargs.get(val, args[val_index])
if not isinstance(v, (list, tuple)): v = [v]
if not all(isinstance(text, (str, unicode)) for text in v):
raise ValueError('Included value must be str or list of str.')
return func(*args, **kwargs)
@decorator
def _deco(func, *args, **kwargs):
p = [variables, categorical, text_keys, axis, is_str]
d = [_var_in_ds, _var_is_cat, _verify_text_key, _verify_axis, _is_str]
for arg, dec in reversed(zip(p, d)):
if arg is None: continue
func = dec(func)
return func(*args, **kwargs)
if categorical and not isinstance(categorical, list): categorical = [categorical]
if text_keys and not isinstance(text_keys, list): text_keys = [text_keys]
if is_str and not isinstance(is_str, list): is_str = [is_str]
return _deco
def modify(to_list=None):
"""
Decorator to modify arguments.
"""
@decorator
def _to_list(func, *args, **kwargs):
all_args = getargspec(func)[0]
for val in to_list:
# get the arguments to modify
val_index = all_args.index(val)
v = kwargs.get(val, args[val_index])
if v is None: v = []
if not isinstance(v, list): v = [v]
if kwargs.get(val):
kwargs[val] = v
else:
args = tuple(a if not x == val_index else v
for x, a in enumerate(args))
return func(*args, **kwargs)
if to_list:
if not isinstance(to_list, list): to_list = [to_list]
return _to_list
|
Quantipy/quantipy
|
quantipy/core/tools/qp_decorators.py
|
Python
|
mit
| 6,656
|
# naive is a datetime with no timezone.
# Create a new timezone for US/Pacific, which is 8 hours behind UTC (UTC-08:00).
# Then make a new variable named hill_valley that is naive with its tzinfo attribute replaced with the US/Pacific timezone you made.
import datetime
naive = datetime.datetime(2015, 10, 21, 4, 29)
pacific = datetime.timezone(datetime.timedelta(hours=-8))
hill_valley = datetime.datetime(2015, 10, 21, 4, 29, tzinfo=pacific)
# Great, but replace just sets the timezone, it doesn't move the datetime to the new timezone. Let's move one.
# Make a new timezone that is UTC+01:00.
# Create a new variable named paris that uses your new timezone and the astimezone method to change hill_valley to the new timezone.
new_timezone = datetime.timezone(datetime.timedelta(hours=1))
paris = hill_valley.astimezone(new_timezone)
|
CaseyNord/Treehouse
|
Dates and Times in Python/aware.py
|
Python
|
mit
| 862
|
#! /usr/bin/env python3
import math, sys
import shtns
import numpy as np
class shtnsfiledata:
#
# Adopted from https://bitbucket.org/nschaeff/shtns/src/master/examples/shallow_water.py
#
def __init__(
self,
rsphere = 1.0
):
self.rsphere = rsphere
def setup(self, file_info, anti_aliasing=False):
import shtns
import numpy as np
if file_info['modes_m_max'] != file_info['modes_m_max']:
raise Exception("Only num_lon == num_lat supported")
ntrunc = file_info['modes_n_max']
self._shtns = shtns.sht(ntrunc, ntrunc, 1, shtns.sht_orthonormal+shtns.SHT_NO_CS_PHASE)
nlons = (ntrunc + 1) * 2
nlats = (ntrunc + 1)
if anti_aliasing:
if nlons & 1:
raise Exception("Only even numbers of longitudinal coordinates allowed for anti-aliasing")
if nlats & 1:
raise Exception("Only even numbers of latitudinal coordinates allowed for anti-aliasing")
print("Anti-aliasing:")
print(" + old lon/lat: ", nlons, nlats)
nlons += nlons//2
nlats += nlats//2
print(" + new lon/lat: ", nlons, nlats)
if file_info['grid_type'] == 'GAUSSIAN':
#self._shtns.set_grid(nlats,nlons,shtns.sht_gauss_fly|shtns.SHT_PHI_CONTIGUOUS, 1.e-10)
self._shtns.set_grid(nlats, nlons, shtns.sht_quick_init|shtns.SHT_PHI_CONTIGUOUS, 0)
elif file_info['grid_type'] == 'REGULAR':
#self._shtns.set_grid(nlats,nlons,shtns.sht_reg_dct|shtns.SHT_PHI_CONTIGUOUS, 1.e-10)
self._shtns.set_grid(nlats, nlons, shtns.sht_reg_dct|shtns.SHT_PHI_CONTIGUOUS, 0)
else:
raise Exception("Grid type '"+file_info['grid_type']+"' not supported!")
self.lats = np.arcsin(self._shtns.cos_theta)
self.lons = (2.*np.pi/nlons)*np.arange(nlons)
self.nlons = nlons
self.nlats = nlats
self.ntrunc = ntrunc
self.nlm = self._shtns.nlm
self.degree = self._shtns.l
self.lap = -self.degree*(self.degree+1.0).astype(np.complex)
self.invlap = np.zeros(self.lap.shape, self.lap.dtype)
self.invlap[1:] = 1./self.lap[1:]
self.lap = self.lap/self.rsphere**2
self.invlap = self.invlap*self.rsphere**2
def phys2spec(self, data):
return self._shtns.analys(data)
def spec2phys(self, dataspec):
return self._shtns.synth(dataspec)
def vrtdiv2uv(self, vrtspec, divspec):
return self._shtns.synth((self.invlap/self.rsphere)*vrtspec, (self.invlap/self.rsphere)*divspec)
def uv2vrtdiv(self,u,v):
vrtspec, divspec = self._shtns.analys(u, v)
return self.lap*self.rsphere*vrtspec, self.lap*self.rsphere*divspec
def getuv(self,divspec):
vrtspec = np.zeros(divspec.shape, dtype=np.complex)
u,v = self._shtns.synth(vrtspec,divspec)
return u, v
def rotateX90(self, i_field):
return self._shtns.Xrotate90(i_field)
def rotateY90(self, i_field):
return self._shtns.Yrotate90(i_field)
def rotateZ90(self, i_field, angle):
return self._shtns.Zrotate(i_field, angle)
|
schreiberx/sweet
|
mule_local/python/mule_local/postprocessing/shtnsfiledata.py
|
Python
|
mit
| 3,227
|
# -*- encoding:utf8 -*-
"""
使用mongodb作为缓存器
测试本地缓存
"""
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import json
from pymongo import MongoClient
from datetime import datetime, timedelta
from bson.binary import Binary
import zlib
import time
class MongoCache:
def __init__(self, client=None, expires=timedelta(days=30)):
self.client = client or MongoClient(connect=False)
# 使用cache作为缓存的collection
self.db = self.client.cache
# cache过期后自动删除
self.db.webpage.create_index('timestamp', expireAfterSeconds=expires.total_seconds())
def __contains__(self, url):
try:
self[url]
except KeyError:
return False
else:
return True
def __getitem__(self, url):
result = self.db.webpage.find_one({'_id': url})
if result:
result['html'] = zlib.decompress(result['html'])
return result
else:
raise KeyError(url + 'does not exists')
pass
def __setitem__(self, url, result):
result['html'] = Binary(zlib.compress(result['html']))
self.db.webpage.replace_one({'_id': url}, result, upsert=True)
result['html'] = zlib.decompress(result['html'])
def clear(self):
self.db.webpage.drop()
def test(timesleep=60):
cache = MongoCache(expires=timedelta())
time.sleep(timesleep)
cache['http://www.baidu.com'] = {'html': '<p>asd</p>', 'timestamp': str(datetime.utcnow())}
print cache['http://www.baidu.com']
if __name__ == '__main__':
from link_crawler import link_crawler
link_crawler('http://example.webscraping.com/', delay=3, link_regex='/(index|view)',
max_urls=-1, cache=MongoCache())
|
basicworld/pycrawler
|
mongo_cache.py
|
Python
|
mit
| 1,791
|
#!../../../../virtualenv/bin/python3
# -*- coding: utf-8 -*-
# NB: The shebang line above assumes you've installed a python virtual environment alongside your working copy of the
# <4most-4gp-scripts> git repository. It also only works if you invoke this python script from the directory where it
# is located. If these two assumptions are incorrect (e.g. you're using Conda), you can still use this script by typing
# <python run_pipeline_on_spectrum_library.py>, but <./run_pipeline_on_spectrum_library.py> will not work.
"""
Run the contents of a spectrum library through the 4GP pipeline.
"""
import argparse
import logging
from os import path as os_path
from fourgp_pipeline import PipelineFGK
from fourgp_pipeline import PipelineManager
from fourgp_speclib import SpectrumLibrarySqlite
# Implement a pipeline manager which loads spectra for analysis from disk
class PipelineManagerReadFromSpectrumLibrary(PipelineManager):
"""
A pipeline manager which loads spectra for analysis from a spectrum library.
"""
def __init__(self, spectrum_library_to_analyse, workspace, pipeline):
"""
Open the spectrum library containing the spectra that we are to analyse.
:param spectrum_library_to_analyse:
The name of the spectrum library we are to analyse
:type spectrum_library_to_analyse:
str
:param workspace:
Directory where we expect to find spectrum libraries.
:type workspace:
str
:param pipeline:
The Pipeline we are to run spectra through.
:type pipeline:
Pipeline
"""
# Initialise pipeline manager
super(PipelineManagerReadFromSpectrumLibrary, self).__init__(pipeline=pipeline)
# Open the spectrum library we are reading from
self.spectrum_library_to_analyse = spectrum_library_to_analyse
spectra = SpectrumLibrarySqlite.open_and_search(
library_spec=spectrum_library_to_analyse,
workspace=workspace,
extra_constraints={}
)
input_library, input_library_items = [spectra[i] for i in ("library", "items")]
input_library_ids = [i["specId"] for i in input_library_items]
self.input_library = input_library
self.input_library_items = input_library_items
self.input_library_ids = input_library_ids
self.spectrum_counter = 0
def fetch_work(self):
"""
Check to see if we have any spectra to analyse. If yes, return the next Spectrum object which needs
analysing. If we have nothing to do, return None.
:return:
Spectrum object to be analysed
"""
# If we have done all the spectra already, return None
if self.spectrum_counter >= len(self.input_library_items):
return None
# Fetch next spectrum to analyse
test_spectrum_array = self.input_library.open(ids=self.input_library_ids[self.spectrum_counter])
spectrum = test_spectrum_array.extract_item(0)
spectrum_id = spectrum.metadata['uid']
return {
'spectrum': spectrum,
'spectrum_identifier': spectrum_id
}
def post_result(self, spectrum_analysis):
"""
Post the results from analysing a spectrum back to whatever database you are using to store the pipeline's
output
:param spectrum_analysis:
A SpectrumAnalysis object containing the results of the pipeline's analysis of the spectrum.
:type spectrum_analysis:
SpectrumAnalysis
:return:
True for success; False for failure.
"""
# This method should save the output of the pipeline somewhere
# Don't bother doing anything with the output for now
return True
def main(logger, input_library, workspace, fourmost_mode, reload_cannon):
"""
Main entry point for running the pipeline.
:param logger:
Logging object used to update user on progress of pipeline.
:param input_library:
The name of the spectrum library whose contents we are to run the pipeline on.
:param workspace:
The path of the workspace directory containing the spectrum libraries we are to use.
:param fourmost_mode:
The name of the 4MOST mode we are operating, either hrs or lrs
:param reload_cannon:
The filename of the output files containing the trained Cannon that we are to reload.
:return:
None
"""
# Let user know we're up and running
logger.info("Running FGK pipeline on the spectrum library <{}>".format(input_library))
# Set path to workspace where we expect to find libraries of spectra
our_path = os_path.split(os_path.abspath(__file__))[0]
workspace = workspace if workspace else os_path.join(our_path, "../../../workspace")
# Instantiate the pipeline
pipeline = PipelineFGK(
workspace=workspace,
fourmost_mode=fourmost_mode,
reload_cannon_from_file=reload_cannon
)
# Instantiate the pipeline manager
pipeline_manager = PipelineManagerReadFromSpectrumLibrary(
spectrum_library_to_analyse=input_library,
workspace=workspace,
pipeline=pipeline
)
# Do the work
while pipeline_manager.poll_server_for_work():
pass
# Do it right away if we're run as a script
if __name__ == "__main__":
# Configure format for logging messages
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(levelname)s:%(filename)s:%(message)s',
datefmt='%d/%m/%Y %H:%M:%S')
logger = logging.getLogger(__name__)
# Read input parameters
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--library', required=True, dest='input_library',
help="Library of spectra to pipeline is to work on. Stars may be filtered by parameters by "
"placing a comma-separated list of constraints in [] brackets after the name of the "
"library. Use the syntax [Teff=3000] to demand equality, or [0<[Fe/H]<0.2] to specify a "
"range.")
parser.add_argument('--workspace', dest='workspace', default="",
help="Directory where we expect to find spectrum libraries.")
parser.add_argument('--reload-cannon', required=True, dest='reload_cannon',
help="Skip training step, and reload a Cannon that we've previously trained. Specify the full "
"path to the .cannon file containing the trained Cannon, but without the .cannon suffix.")
parser.add_argument('--output-file', default="./test_cannon.out", dest='output_file',
help="Data file to write output to.")
parser.add_argument('--mode', required=True, dest='fourmost_mode',
choices=("hrs", "lrs"),
help="The 4MOST mode we are operating on.")
args = parser.parse_args()
main(logger=logger,
input_library=args.input_library,
workspace=args.workspace,
reload_cannon=args.reload_cannon,
fourmost_mode=args.fourmost_mode
)
|
dcf21/4most-4gp-scripts
|
src/scripts/pipeline/run_pipeline_on_spectrum_library.py
|
Python
|
mit
| 7,282
|
from api import Api
def full_url(resource):
return Api.url_base.format(resource=resource)
|
uhavin/pubbd
|
tests/api/__init__.py
|
Python
|
mit
| 96
|
#!/usr/bin/python
###
### in poiFlagEmail() below, write code that returns a boolean
### indicating if a given emails is from a POI
###
import sys
import reader
import poi_emails
def getToFromStrings(f):
f.seek(0)
to_string, from_string, cc_string = reader.getAddresses(f)
to_emails = reader.parseAddresses( to_string )
from_emails = reader.parseAddresses( from_string )
cc_emails = reader.parseAddresses( cc_string )
return to_emails, from_emails, cc_emails
### POI flag an email
def poiFlagEmail(f):
""" given an email file f,
return a trio of booleans for whether that email is
to, from, or cc'ing a poi """
to_emails, from_emails, cc_emails = getToFromStrings(f)
### list of email addresses of all the POIs
poi_email_list = poi_emails.poiEmails()
to_poi = False
from_poi = False
cc_poi = False
### to_poi and cc_poi are related functions, which flag whether
### the email under inspection is addressed to a POI, or if a POI is in cc
### you don't have to change this code at all
### there can be many "to" emails, but only one "from", so the
### "to" processing needs to be a little more complicated
if to_emails:
ctr = 0
while not to_poi and ctr < len(to_emails):
if to_emails[ctr] in poi_email_list:
to_poi = True
ctr += 1
if cc_emails:
ctr = 0
while not to_poi and ctr < len(cc_emails):
if cc_emails[ctr] in poi_email_list:
cc_poi = True
ctr += 1
#################################
######## your code below ########
### set from_poi to True if #####
### the email is from a POI #####
#################################
if from_emails and from_emails[0] in poi_email_list:
from_poi = True
#################################
return to_poi, from_poi, cc_poi
|
napjon/moocs_solution
|
ml-udacity/feature_selection/poi_flag_email.py
|
Python
|
mit
| 1,941
|
from __future__ import print_function
from django.core.management.base import BaseCommand, CommandError
from alapage.models import Page
class Command(BaseCommand):
help = 'Creates a page'
def add_arguments(self, parser):
parser.add_argument('name', nargs='+', type=str)
parser.add_argument('url', nargs='+', type=str)
def handle(self, *args, **options):
name = options['name'][0]
url = options['url'][0]
exists = Page.objects.filter(url=url).exists()
#~ create page
if not exists:
Page.objects.create(url=url, title=name)
print("Page "+name+" created")
else:
print("The page already exists at "+url)
|
synw/django-alapage
|
alapage/management/commands/create_page.py
|
Python
|
mit
| 718
|
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from .models import UrlShrinked
from .forms import UrlShrinkedForm
from django.shortcuts import render, get_object_or_404,redirect
from django.contrib.sites.shortcuts import get_current_site
from django.http import JsonResponse,HttpResponse
from hashids import Hashids
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import requests
#View used on site, make shrinked code from post request passed by form or inflate form in the html view
def create_shorturl(request):
if request.method == "POST":
form=UrlShrinkedForm(request.POST)
if form.is_valid():
url=form.save(commit=False)
url.save()
url.publish()
tmp="/"+url.shrinked_code+"/"
full_url = ''.join(['http://', get_current_site(request).domain,tmp])
return render(request, 'redirector/shrink_detail.html', {'url_skrink': full_url})
else:
form=UrlShrinkedForm()
return render(request, 'redirector/create.html',{'form':form})
#View redirect me to the right url
def url_redirection(request, code):
url_retrieved=get_url(code)
if(url_retrieved!=None):
return redirect(url_retrieved.url)
else:
return render(request,'redirector/error.html')
#Get url from shrinked code (Json)
#Csrf is disabled for get post request from external site without cookie or sessions
@csrf_exempt
def api_url_response(request,code):
result=get_url(code)
if(result!= None):
object_response={'url':result.url,'shrink':result.shrinked_code}
response=HttpResponse(JsonResponse(object_response), content_type="application/json")
else:
response=HttpResponse(JsonResponse({'url':'null'}), content_type="application/json")
return response
#Get shrinked code from url (Json)
#Csrf is disabled for get post request from external site without cookie or sessions
@csrf_exempt
def api_url_request(request):
if request.method=="POST":
url_verification=URLValidator()
post_url=request.POST.get('url');
result=UrlShrinked(url="")
if post_url:
try:
url_verification(post_url)
result.url=post_url
result.save()
result.publish()
except Exception as e:
result.url="url_invalid"
result.shrinked_code="url_invalid"
else:
result.url="url_empty"
result.shrinked_code="url_empty"
object_response={'url':result.url,'shrink':result.shrinked_code}
return HttpResponse(JsonResponse(object_response), content_type="application/json")
#Search url using shrinked code, private function, TO MOVE
def get_url(code):
try:
return UrlShrinked.objects.get(shrinked_code=code)
except UrlShrinked.DoesNotExist:
return None
|
Remeic/Squozy
|
redirector/views.py
|
Python
|
mit
| 2,951
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Ref: http://doc.qt.io/qt-5/modelview.html#2-1-a-read-only-table
import sys
from PyQt5.QtCore import Qt, QAbstractTableModel, QVariant
from PyQt5.QtWidgets import QApplication, QTableView
class MyData:
def __init__(self):
self._num_rows = 3
self._num_columns = 2
self._data = [["hello" for j in range(self._num_columns)] for i in range(self._num_rows)]
def get_num_rows(self):
return self._num_rows
def get_num_columns(self):
return self._num_columns
def get_data(self, row_index, column_index):
value = self._data[row_index][column_index]
return value
def set_data(self, row_index, column_index, value):
self._data[row_index][column_index] = value
###############################################################################
class MyModel(QAbstractTableModel):
def __init__(self, data, parent=None):
super().__init__(parent)
self._data = data # DON'T CALL THIS ATTRIBUTE "data", A QAbstractItemModel METHOD ALREADY HAVE THIS NAME (model.data(index, role)) !!!
def rowCount(self, parent):
return self._data.get_num_rows()
def columnCount(self, parent):
return self._data.get_num_columns()
def data(self, index, role):
if role == Qt.DisplayRole:
return self._data.get_data(index.row(), index.column())
return QVariant()
def setData(self, index, value, role):
if role == Qt.EditRole:
try:
self._data.set_data(index.row(), index.column(), value)
# The following line are necessary e.g. to dynamically update the QSortFilterProxyModel
self.dataChanged.emit(index, index, [Qt.EditRole])
except Exception as e:
print(e)
return False
return True
def flags(self, index):
return Qt.ItemIsSelectable | Qt.ItemIsEditable | Qt.ItemIsEnabled
def changedCallback():
print("changed")
if __name__ == '__main__':
app = QApplication(sys.argv)
data = MyData()
table_view = QTableView()
my_model = MyModel(data)
my_model.dataChanged.connect(changedCallback)
my_model.rowsInserted.connect(changedCallback)
my_model.rowsRemoved.connect(changedCallback)
table_view.setModel(my_model)
table_view.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
|
jeremiedecock/snippets
|
python/pyqt/pyqt5/widget_QTableView_edit_print_signal_when_data_changed.py
|
Python
|
mit
| 2,784
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='WebsiteContact',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('subject', models.CharField(max_length=100)),
('email', models.EmailField(max_length=254)),
('message', models.CharField(max_length=100)),
],
),
]
|
omerturner/manakinproducts
|
contact/migrations/0001_initial.py
|
Python
|
mit
| 618
|
import requests
import os
print('\n***数据与算法 GitHub自动上传脚本***\n')
username = input('输入你在GitHub上的用户名,如 Robert Ying:')
email = input('输入你注册GitHub用的Email:')
print('\n开始配置Git...')
os.system('git config --global user.name "' + username + '"')
os.system('git config --global user.email ' + email)
print('\n你输入的信息如下:')
os.system('git config user.name')
os.system('git config user.email')
if input('\n信息正确则输入y继续,直接回车则重新输入用户名与Email:') != 'y':
username = input('输入你在GitHub上的用户名,如 Robert Ying:')
email = input('输入你注册GitHub用的Email:')
print('\n你输入的信息如下:')
os.system('git config user.name')
os.system('git config user.email')
url = input(
'\n输入你fork后自己对应仓库的url,如https://github.com/robertying/data-and-algorithms.git,注意!最后有".git":')
name = input('\n输入你的真实姓名:')
payload = {"username": input('输入你的OJ用户名:'), "password": input('输入你的OJ密码:')}
print()
response = requests.post(
'http://lambda.ee.tsinghua.edu.cn/api/auth/login/', data=payload)
answer = requests.get('http://lambda.ee.tsinghua.edu.cn/api/my/submits/', headers={
'Authorization': 'TOKEN ' + response.json()['auth_token']}, params={'page': 1, 'page_size': 1})
count = answer.json()['count']
answer = requests.get('http://lambda.ee.tsinghua.edu.cn/api/my/submits/', headers={
'Authorization': 'TOKEN ' + response.json()['auth_token']}, params={'page': 1, 'page_size': count})
results = answer.json()['results']
if not os.path.exists('data-and-algorithms'):
os.system('git clone ' + url)
else:
os.system('cd data-and-algorithms & git pull')
os.system('cd data-and-algorithms & git remote add upstream https://github.com/ee64/data-and-algorithms.git & git fetch upstream & git checkout master & git merge upstream/master')
problem_dir = os.listdir('./data-and-algorithms')
already_walked_results = []
for result in results:
if result['problem_title'] not in already_walked_results and result['problem_title'] in problem_dir:
if result['score'] == 100:
if not os.path.exists('./data-and-algorithms/' + result['problem_title'] + '/' + name):
os.mkdir('./data-and-algorithms/' +
result['problem_title'] + '/' + name)
with open('./data-and-algorithms/' + result['problem_title'] + '/' + name + '/README.md', 'w', encoding='utf-8') as md:
md.write('# ' + result['problem_title'] + '\n\n')
md.write(
'| # | 时间 | 内存 |\n')
md.write(
'|:----------:|:------------------------------:|:------------------------------:|\n')
run_results = results[0]['run_results']
for i in range(len(run_results)):
md.write(
'|' + str(i + 1) + '|' + str(run_results[i][1]) + ' ms|' + str(run_results[i][2]) + ' KB|\n')
with open('./data-and-algorithms/' + result['problem_title'] + '/' + name + '/source.cpp', 'w', encoding='utf-8') as source:
source.write(result['code'] + '\n')
os.system('cd data-and-algorithms & git add .')
os.system('cd data-and-algorithms & git commit -m "Initial commit to ' +
result['problem_title'] + ' by ' + name)
already_walked_results.append(result['problem_title'])
continue
os.system('cd data-and-algorithms & git push origin master')
input('\n上传完成!如果发现没有效果,那么请重新再试一遍。回车退出程序:')
|
ee64/data-and-algorithms
|
upload.py
|
Python
|
mit
| 3,932
|
"""
scitran.data.meeg
=================
Data format for M/EEG data using mne-python.
"""
import logging
import tempfile
import zipfile
import warnings
import os
from os import path as op
from datetime import datetime, date
import shutil
from mne.io import read_raw_fif
from .. import data
log = logging.getLogger(__name__) # root logger already configured
# see data.py for expected project properties
project_properties = data.project_properties
# add additional session properties, which should be added as attributes of
# the Reader object
_session_properties = {
'subject': {
'type': 'object',
'properties': {
'firstname': {
'field': 'subj_firstname',
'title': 'First Name',
'type': 'string',
},
'lastname': {
'field': 'subj_lastname',
'title': 'Last Name',
'type': 'string',
},
'dob': {
'field': 'subj_dob',
'title': 'Date of Birth',
'type': 'string',
'format': 'date', # i.e., datetime object
},
'sex': {
'field': 'subj_sex',
'title': 'Sex',
'type': 'string',
'enum': ['male', 'female'],
},
'hand': {
'field': 'subj_hand',
'title': 'Handedness',
'type': 'string',
'enum': ['right', 'left'],
},
},
},
}
session_properties = data.dict_merge(data.session_properties,
_session_properties)
_acquisition_properties = { # add custom acquisition properties
}
acquisition_properties = data.dict_merge(data.acquisition_properties,
_acquisition_properties)
class MEEGError(data.DataError):
pass
class MEEGReader(data.Reader):
"""
Parameters
----------
path : str
Path to input file.
load_data : boolean
Indicate if a reader should attempt to immediately load all data.
Default False.
"""
project_properties = project_properties
session_properties = session_properties
acquisition_properties = acquisition_properties
domain = u'meeg'
filetype = u'meeg'
state = ['orig']
def __init__(self, path, load_data=False, timezone=None):
super(MEEGReader, self).__init__(path, load_data, timezone)
#
# Process the incoming data
#
self._temp_dir = tempfile.mkdtemp()
os.mkdir(op.join(self._temp_dir, 'reap'))
try:
with zipfile.ZipFile(self.filepath, 'r') as zip_file:
zip_fnames = [op.join('reap', op.basename(fname))
for fname in zip_file.namelist()]
fnames = [zip_file.extract(fname, self._temp_dir)
for fname in zip_fnames if fname.endswith('.fif')]
except Exception as e:
raise MEEGError(e)
# load information and optionally data from the files
with warnings.catch_warnings(record=True):
self._raws = [read_raw_fif(fname, allow_maxshield=True,
preload=load_data)
for fname in fnames]
info = self._raws[0].info
subject_info = info['subject_info']
hand_dict = {1: 'right', 2: 'left'}
sex_dict = {1: 'male', 2: 'female'}
#
# Parameters required by NIMS
#
# pick a unique filename
meas_date = datetime.fromtimestamp(info['meas_date'][0])
fname = meas_date.strftime('%Y_%m_%d_%H_%M_%S')
self.filename = fname
self.group_id = info['experimenter'] # XXX always "neuromag", !useful
self.project_name = info['proj_name']
self.session_id = meas_date.strftime('%Y%m%d')
self.acquisition = info['description']
self.session_subject = subject_info['his_id']
#
# Additional session properties
#
self.subj_firstname = subject_info['first_name']
self.subj_lastname = subject_info['last_name']
self.subj_dob = \
datetime.combine(date(*subject_info['birthday']),
datetime.min.time())
self.subj_hand = hand_dict[subject_info['hand']]
self.subj_sex = sex_dict[subject_info['sex']]
# Parsing is complete
self.metadata_status = 'complete'
def __del__(self):
shutil.rmtree(self._temp_dir)
def load_data(self):
super(MEEGReader, self).load_data()
for raw in self._raws:
raw.preload_data()
@property
def nims_group_id(self):
return self.group_id
@property
def nims_project(self):
return self.project_name
@property
def nims_session_id(self):
return self.session_id
@property
def nims_session_label(self):
return self.session_id
@property
def nims_session_subject(self):
return self.session_subject
@property
def nims_acquisition_id(self):
return self.acquisition
@property
def nims_acquisition_label(self):
return self.acquisition
@property
def nims_acquisition_description(self):
return self.acquisition
@property
def nims_file_name(self):
return self.filename
@property
def nims_file_kinds(self):
return ['FIF']
# the following are all handled by the super class Reader
@property
def nims_metadata_status(self):
return super(MEEGReader, self).nims_metadata_status
@property
def nims_file_ext(self):
return super(MEEGReader, self).nims_file_ext
@property
def nims_file_domain(self):
return super(MEEGReader, self).nims_file_domain
@property
def nims_file_type(self):
return super(MEEGReader, self).nims_file_type
@property
def nims_file_state(self):
return super(MEEGReader, self).nims_file_state
@property
def nims_timestamp(self):
return super(MEEGReader, self).nims_timestamp
@property
def nims_timezone(self):
return super(MEEGReader, self).nims_timezone
|
scitran/data
|
scitran/data/meeg/meeg.py
|
Python
|
mit
| 6,305
|
from collections import namedtuple
from pybliometrics.scopus.superclasses import Retrieval
from pybliometrics.scopus.utils import check_parameter_value, get_link
class SerialTitle(Retrieval):
@property
def aggregation_type(self):
"""The type of the source."""
return self._entry['prism:aggregationType']
@property
def citescoreyearinfolist(self):
"""A list of two tuples of the form (year, cite-score). The first
tuple represents the current cite-score, the second tuple
represents the tracker cite-score."""
try:
d = self._entry['citeScoreYearInfoList']
except KeyError:
return None
current = (d['citeScoreCurrentMetricYear'], d['citeScoreCurrentMetric'])
tracker = (d['citeScoreTrackerYear'], d['citeScoreTracker'])
return [current, tracker]
@property
def eissn(self):
"""The electronic ISSN of the source."""
return self._entry.get('prism:eIssn')
@property
def issn(self):
"""The ISSN of the source."""
return self._entry.get('prism:issn')
@property
def oaallowsauthorpaid(self):
"""Whether under the Open-Access policy authors are allowed to pay."""
return self._entry.get('oaAllowsAuthorPaid')
@property
def openaccess(self):
"""Open Access status (0 or 1)."""
return self._entry.get('openaccess')
@property
def openaccessstartdate(self):
"""Starting availability date."""
return self._entry.get('openaccessStartDate')
@property
def openaccesstype(self):
"""Open Archive status (full or partial)."""
return self._entry.get('openaccessType')
@property
def openaccessarticle(self):
"""Open Access status (boolean)."""
return self._entry.get('openaccessArticle')
@property
def openarchivearticle(self):
"""Open Archive status (boolean)."""
return self._entry.get('openArchiveArticle')
@property
def openaccesssponsorname(self):
"""The name of the Open Access sponsor."""
return self._entry.get('openaccessSponsorName')
@property
def openaccesssponsortype(self):
"""The type of the Open Access sponsor."""
return self._entry.get('openaccessSponsorType')
@property
def openaccessuserlicense(self):
"""The User license."""
return self._entry.get('openaccessUserLicense')
@property
def publisher(self):
"""The publisher of the source."""
return self._entry['dc:publisher']
@property
def scopus_source_link(self):
"""URL to info site on scopus.com."""
return get_link(self._entry, 0, ["link"])
@property
def self_link(self):
"""URL to the source's API page."""
return get_link(self._json, 0, ["link"])
@property
def sjrlist(self):
"""The SCImago Journal Rank (SJR) indicator as list of
(year, indicator)-tuples. See
https://www.scimagojr.com/journalrank.php.
"""
return _parse_list(self._entry, "SJR")
@property
def sniplist(self):
"""The Source-Normalized Impact per Paper (SNIP) as list of
(year, indicator)-tuples. See
https://blog.scopus.com/posts/journal-metrics-in-scopus-source-normalized-impact-per-paper-snip.
"""
return _parse_list(self._entry, "SNIP")
@property
def source_id(self):
"""The Scopus ID of the source."""
return self._entry['source-id']
@property
def subject_area(self):
"""List of named tuples of subject areas in the form
(area, abbreviation, code) of the source.
"""
area = namedtuple('Subjectarea', 'area abbreviation code')
areas = [area(area=item['$'], code=item['@code'],
abbreviation=item['@abbrev'])
for item in self._entry["subject-area"]]
return areas or None
@property
def title(self):
"""The title of the source."""
return self._entry['dc:title']
def __init__(self, issn, refresh=False, view="ENHANCED", years=None):
"""Interaction with the Serial Title API.
Parameters
----------
issn : str or int
The ISSN or the E-ISSN of the source.
refresh : bool or int (optional, default=False)
Whether to refresh the cached file if it exists or not. If int
is passed, cached file will be refreshed if the number of days
since last modification exceeds that value.
view : str (optional, default="ENHANCED")
The view of the file that should be downloaded. Allowed values:
BASIC, STANDARD, ENHANCED. For details see
https://dev.elsevier.com/sc_serial_title_views.html.
years : str (optional, default=None)
A string specifying a year or range of years (combining two
years with a hyphen) for which yearly metric data (SJR, SNIP,
yearly-data) should be looked up for. If None, only the
most recent metric data values are provided.
Note: If not None, refresh will always be True.
Examples
--------
See https://pybliometrics.readthedocs.io/en/stable/examples/SerialTitle.html.
Notes
-----
The directory for cached results is `{path}/{view}/{source_id}`,
where `path` is specified in `~/.scopus/config.ini`.
"""
# Checks
check_parameter_value(view, ('BASIC', 'STANDARD', 'ENHANCED'), "view")
# Load json
self._id = str(issn)
self._years = years
# Force refresh when years is specified
if years:
refresh = True
Retrieval.__init__(self, identifier=self._id, view=view, date=years,
api='SerialTitle', refresh=refresh)
self._json = self._json['serial-metadata-response']
self._entry = self._json['entry'][0]
def __str__(self):
"""Print a summary string."""
date = self.get_cache_file_mdate().split()[0]
areas = [e.area for e in self.subject_area]
if len(areas) == 1:
areas = areas[0]
else:
areas = " and ".join([", ".join(areas[:-1]), areas[-1]])
s = f"'{self.title}', {self.aggregation_type} published by "\
f"'{self.publisher}', is active in {areas}\n"
metrics = []
if self.sjrlist:
metrics.append(f"SJR: year value")
for rec in self.sjrlist:
metrics.append(f" {rec[0]} {rec[1]}")
if self.sniplist:
metrics.append(f"SNIP: year value")
for rec in self.sniplist:
metrics.append(f" {rec[0]} {rec[1]}")
if metrics:
s += f"Metrics as of {date}:\n " + "\n ".join(metrics) + "\n"
s += f" ISSN: {self.issn or '-'}, E-ISSN: {self.eissn or '-'}, "\
f"Scopus ID: {self.source_id}"
return s
def _parse_list(d, metric):
"""Auxiliary function to parse SNIP and SJR lists."""
try:
values = [(r['@year'], r['$']) for r in d[metric + "List"][metric]]
return sorted(set(values))
except (KeyError, TypeError):
return None
|
jkitchin/scopus
|
pybliometrics/scopus/serial_title.py
|
Python
|
mit
| 7,337
|
#!/usr/bin/python3 -B
exec(open("../index.py").read())
from waitress import serve
serve(application, host='0.0.0.0', port=8080, threads=1, channel_timeout=1)
|
shark555/websnake_demo
|
scripts/serve.py
|
Python
|
mit
| 161
|
import sys
import numpy as np
if __name__ == '__main__':
print 'Loading word vectors...'
wordvecs = None
wordlist = []
for i, line in enumerate(sys.stdin):
word, vec = line.strip().split(' ', 1)
vec = map(float, vec.split())
if wordvecs is None:
wordvecs = np.ones((400000, len(vec)), dtype=np.float)
wordvecs[i] = vec
wordlist.append(word)
words = dict((k, wordvecs[v]) for v, k in enumerate(wordlist))
tests = [('he', words['he']), ('she', words['she'])]
tests = [
('athens-greece+berlin', words['athens'] - words['greece'] + words['berlin']),
('sydney-australia+berlin', words['sydney'] - words['australia'] + words['berlin']),
('australia-sydney+germany', words['australia'] - words['sydney'] + words['berlin']),
('king-male+female', words['king'] - words['male'] + words['female']),
('king-man+woman', words['king'] - words['man'] + words['woman']),
('queen-female+male', words['queen'] - words['female'] + words['male']),
('queen-woman+man', words['queen'] - words['woman'] + words['man']),
('plane-air+rail', words['train'] - words['air'] + words['rail']),
]
for test, tvec in tests:
results = []
print '=-=-' * 10
print 'Testing {}'.format(test)
res = np.dot(wordvecs, tvec) / np.linalg.norm(tvec) / np.linalg.norm(wordvecs, axis=1)
results = zip(res, wordlist)
print '\n'.join([w for _, w in sorted(results, reverse=True)[:20]])
|
Smerity/glove-guante
|
cosine_similarity.py
|
Python
|
mit
| 1,457
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutTuples(Koan):
def test_creating_a_tuple(self):
count_of_three = (1, 2, 5)
self.assertEqual(5, count_of_three[2])
def test_tuples_are_immutable_so_item_assignment_is_not_possible(self):
count_of_three = (1, 2, 5)
try:
count_of_three[2] = "three"
except TypeError as ex:
self.assertMatch("'tuple' object does not support item assignment", ex[0])
def test_tuples_are_immutable_so_appending_is_not_possible(self):
count_of_three = (1, 2, 5)
try:
count_of_three.append("boom")
except Exception as ex:
self.assertEqual(AttributeError, type(ex))
# Note, assertMatch() uses regular expression pattern matching,
# so you don't have to copy the whole message.
self.assertMatch("'tuple' object has no attribute 'append'", ex[0])
# Tuples are less flexible than lists, but faster.
def test_tuples_can_only_be_changed_through_replacement(self):
count_of_three = (1, 2, 5)
list_count = list(count_of_three)
list_count.append("boom")
count_of_three = tuple(list_count)
self.assertEqual((1, 2, 5, 'boom'), count_of_three)
def test_tuples_of_one_look_peculiar(self):
self.assertEqual(int, (1).__class__)
self.assertEqual(tuple, (1,).__class__)
self.assertEqual(("Hello comma!", ), ("Hello comma!", ))
def test_tuple_constructor_can_be_surprising(self):
self.assertEqual(('S', 'u', 'r', 'p', 'r', 'i', 's', 'e', '!'), tuple("Surprise!"))
def test_creating_empty_tuples(self):
self.assertEqual((), ())
self.assertEqual((), tuple()) # Sometimes less confusing
def test_tuples_can_be_embedded(self):
lat = (37, 14, 6, 'N')
lon = (115, 48, 40, 'W')
place = ('Area 51', lat, lon)
self.assertEqual(('Area 51', (37, 14, 6, 'N'), (115, 48, 40, 'W')), place)
def test_tuples_are_good_for_representing_records(self):
locations = [
("Illuminati HQ", (38, 52, 15.56, 'N'), (77, 3, 21.46, 'W')),
("Stargate B", (41, 10, 43.92, 'N'), (1, 49, 34.29, 'W')),
]
locations.append(
("Cthulhu", (26, 40, 1, 'N'), (70, 45, 7, 'W'))
)
self.assertEqual("Cthulhu", locations[2][0])
self.assertEqual(15.56, locations[0][1][2])
|
codetestcode/pyintdemo
|
python2/koans/about_tuples.py
|
Python
|
mit
| 2,480
|
from axiom.test.historic.stubloader import StubbedTest
from xquotient.compose import Composer, Drafts
class ComposerUpgradeTestCase(StubbedTest):
"""
Test that the Composer no longer has a 'drafts' attribute, that no Drafts
items have been created and that the other attributes have been copied.
"""
def test_upgrade(self):
composer = self.store.findUnique(Composer)
self.failIf(hasattr(composer, 'drafts'), "Still has 'drafts' attribute")
self.assertNotEqual(composer.privateApplication, None)
self.assertEqual(self.store.count(Drafts), 0)
|
twisted/quotient
|
xquotient/test/historic/test_composer4to5.py
|
Python
|
mit
| 598
|
import astra
def gpu_fp(pg, vg, v):
v_id = astra.data2d.create('-vol', vg, v)
rt_id = astra.data2d.create('-sino', pg)
fp_cfg = astra.astra_dict('FP_CUDA')
fp_cfg['VolumeDataId'] = v_id
fp_cfg['ProjectionDataId'] = rt_id
fp_id = astra.algorithm.create(fp_cfg)
astra.algorithm.run(fp_id)
out = astra.data2d.get(rt_id)
astra.algorithm.delete(fp_id)
astra.data2d.delete(rt_id)
astra.data2d.delete(v_id)
return out
def gpu_bp(pg, vg, rt, supersampling=1):
v_id = astra.data2d.create('-vol', vg)
rt_id = astra.data2d.create('-sino', pg, data=rt)
bp_cfg = astra.astra_dict('BP_CUDA')
bp_cfg['ReconstructionDataId'] = v_id
bp_cfg['ProjectionDataId'] = rt_id
bp_id = astra.algorithm.create(bp_cfg)
astra.algorithm.run(bp_id)
out = astra.data2d.get(v_id)
astra.algorithm.delete(bp_id)
astra.data2d.delete(rt_id)
astra.data2d.delete(v_id)
return out
def gpu_fbp(pg, vg, rt):
rt_id = astra.data2d.create('-sino', pg, data=rt)
v_id = astra.data2d.create('-vol', vg)
fbp_cfg = astra.astra_dict('FBP_CUDA')
fbp_cfg['ReconstructionDataId'] = v_id
fbp_cfg['ProjectionDataId'] = rt_id
#fbp_cfg['FilterType'] = 'none'
fbp_id = astra.algorithm.create(fbp_cfg)
astra.algorithm.run(fbp_id, 100)
out = astra.data2d.get(v_id)
astra.algorithm.delete(fbp_id)
astra.data2d.delete(rt_id)
astra.data2d.delete(v_id)
return out
def gpu_sirt(pg, vg, rt, n_iters=100):
rt_id = astra.data2d.create('-sino', pg, data=rt)
v_id = astra.data2d.create('-vol', vg)
sirt_cfg = astra.astra_dict('SIRT_CUDA')
sirt_cfg['ReconstructionDataId'] = v_id
sirt_cfg['ProjectionDataId'] = rt_id
#sirt_cfg['option'] = {}
#sirt_cfg['option']['MinConstraint'] = 0
sirt_id = astra.algorithm.create(sirt_cfg)
astra.algorithm.run(sirt_id, n_iters)
out = astra.data2d.get(v_id)
astra.algorithm.delete(sirt_id)
astra.data2d.delete(rt_id)
astra.data2d.delete(v_id)
return out
|
buzmakov/tomography_scripts
|
tomo/yaivan/dispersion/alg.py
|
Python
|
mit
| 1,930
|
from __future__ import absolute_import
import json
from django import forms
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ObjectDoesNotExist
try:
from django.urls import reverse, resolve
except:
from django.core.urlresolvers import reverse, resolve
from django.core.validators import EMPTY_VALUES
from django.db import models
from django.http.request import HttpRequest
from django.utils.encoding import smart_str, force_text
from .form_fields import ChainedChoiceField, ChainedModelChoiceField, ChainedModelMultipleChoiceField
class ChainedChoicesMixin(object):
"""
Form Mixin to be used with ChainedChoicesForm and ChainedChoicesModelForm.
It loads the options when there is already an instance or initial data.
"""
user = None
prefix = None
fields = []
chained_fields_names = []
chained_model_fields_names = []
def init_chained_choices(self, *args, **kwargs):
self.chained_fields_names = self.get_fields_names_by_type(ChainedChoiceField)
self.chained_model_fields_names = self.get_fields_names_by_type(ChainedModelChoiceField) + self.get_fields_names_by_type(ChainedModelMultipleChoiceField)
self.user = kwargs.get('user', self.user)
if kwargs.get('data', None) is not None:
self.set_choices_via_ajax(kwargs['data'])
elif len(args) > 0 and args[0] not in EMPTY_VALUES:
self.set_choices_via_ajax(args[0])
elif kwargs.get('instance', None) is not None:
oldest_parent_field_names = list(set(self.get_oldest_parent_field_names()))
youngest_child_names = list(set(self.get_youngest_children_field_names()))
for youngest_child_name in youngest_child_names:
self.find_instance_attr(kwargs['instance'], youngest_child_name)
for oldest_parent_field_name in oldest_parent_field_names:
try:
self.fields[oldest_parent_field_name].initial = getattr(self, '%s' % oldest_parent_field_name)
except AttributeError:
pass
self.set_choices_via_ajax()
elif 'initial' in kwargs and kwargs['initial'] not in EMPTY_VALUES:
self.set_choices_via_ajax(kwargs['initial'], is_initial=True)
else:
for field_name in self.chained_fields_names + self.chained_model_fields_names:
empty_label = self.fields[field_name].empty_label
self.fields[field_name].choices = [('', empty_label)]
def set_choices_via_ajax(self, kwargs=None, is_initial=False):
for field_name in self.chained_fields_names + self.chained_model_fields_names:
field = self.fields[field_name]
try:
if kwargs is not None:
# initial data do not have any prefix
if self.prefix in EMPTY_VALUES or is_initial:
parent_value = kwargs.get(field.parent_field, None)
field_value = kwargs.get(field_name, None)
else:
parent_value = kwargs.get('%s-%s' % (self.prefix, field.parent_field), None)
field_value = kwargs.get('%s-%s' % (self.prefix, field_name), None)
else:
parent_value = self.initial.get(field.parent_field, None)
field_value = self.initial.get(field_name, None)
if parent_value is None:
parent_value = getattr(self, '%s' % field.parent_field, None)
if field_value is None:
field_value = getattr(self, '%s' % field_name, None)
field.choices = [('', field.empty_label)]
# check that parent_value is valid
if parent_value:
parent_value = getattr(parent_value, 'pk', parent_value)
url = force_text(field.ajax_url)
params = {
'field_name': field_name,
'parent_value': parent_value,
'field_value': field_value
}
# This will get the callable from the url.
# All we need to do is pass in a 'request'
url_callable = resolve(url).func
# Build the fake request
fake_request = HttpRequest()
fake_request.META["SERVER_NAME"] = "localhost"
fake_request.META["SERVER_PORT"] = '80'
# Add parameters and user if supplied
fake_request.method = "GET"
for key, value in params.items():
fake_request.GET[key] = value
if hasattr(self, "user") and self.user:
fake_request.user = self.user
else:
fake_request.user = AnonymousUser()
# Get the response
response = url_callable(fake_request)
# Apply the data (if it's returned)
if smart_str(response.content):
try:
field.choices += json.loads(smart_str(response.content))
except ValueError:
raise ValueError('Data returned from request (url={url}, params={params}) could not be deserialized to Python object: {data}'.format(
url=url,
params=params,
data=response.content
))
field.initial = field_value
except ObjectDoesNotExist:
field.choices = ()
def get_fields_names_by_type(self, type_):
result = []
for field_name in self.fields:
field = self.fields[field_name]
if isinstance(field, type_):
result.append(field_name)
return result
def get_parent_fields_names(self):
result = []
for field_name in self.fields:
field = self.fields[field_name]
if hasattr(field, 'parent_field'):
result.append(field.parent_field)
return result
def get_children_field_names(self, parent_name):
if parent_name in EMPTY_VALUES:
return []
result = []
for field_name in self.fields:
field = self.fields[field_name]
if getattr(field, 'parent_field', None) == parent_name:
result.append(field_name)
return result
def get_chained_fields_names(self):
chained_fields_names = self.get_fields_names_by_type(ChainedChoiceField)
chained_model_fields_names = self.get_fields_names_by_type(ChainedModelChoiceField)
return chained_fields_names + chained_model_fields_names
def get_oldest_parent_field_names(self):
chained_fields_names = self.get_fields_names_by_type(ChainedChoiceField)
chained_model_fields_names = self.get_fields_names_by_type(ChainedModelChoiceField)
oldest_parent_field_names = []
for field_name in self.get_parent_fields_names():
if field_name not in chained_fields_names and field_name not in chained_model_fields_names:
oldest_parent_field_names.append(field_name)
return oldest_parent_field_names
def get_youngest_children_field_names(self):
result = []
chained_fields_names = self.get_fields_names_by_type(ChainedChoiceField)
chained_model_fields_names = self.get_fields_names_by_type(ChainedModelChoiceField)
for field_name in chained_fields_names + chained_model_fields_names:
if field_name not in self.get_parent_fields_names():
result.append(field_name)
return result
def find_instance_attr(self, instance, attr_name):
field = self.fields[attr_name]
if hasattr(instance, attr_name):
attribute = getattr(instance, attr_name)
attr_value = getattr(attribute, 'pk', smart_str(attribute)) if attribute else None
setattr(self, '%s' % attr_name, attr_value)
if hasattr(field, 'parent_field'):
parent_instance = attribute if isinstance(attribute, models.Model) else instance
self.find_instance_attr(parent_instance, field.parent_field)
class ChainedChoicesForm(forms.Form, ChainedChoicesMixin):
"""
Form class to be used with ChainedChoiceField and ChainedSelect widget
If there is request POST data in *args (i.e. form validation was invalid)
then the options will be loaded when the form is built.
"""
def __init__(self, language_code=None, *args, **kwargs):
if kwargs.get('user'):
self.user = kwargs.pop('user') # To get request.user. Do not use kwargs.pop('user', None) due to potential security hole
super(ChainedChoicesForm, self).__init__(*args, **kwargs)
self.language_code = language_code
self.init_chained_choices(*args, **kwargs)
def is_valid(self):
if self.language_code:
# response is not translated to requested language code :/
# so translation is triggered manually
from django.utils.translation import activate
activate(self.language_code)
return super(ChainedChoicesForm, self).is_valid()
class ChainedChoicesModelForm(forms.ModelForm, ChainedChoicesMixin):
"""
Form class to be used with ChainedChoiceField and ChainedSelect widget
If there is already an instance (i.e. editing)
then the options will be loaded when the form is built.
"""
def __init__(self, *args, **kwargs):
if kwargs.get('user'):
self.user = kwargs.pop('user') # To get request.user. Do not use kwargs.pop('user', None) due to potential security hole
super(ChainedChoicesModelForm, self).__init__(*args, **kwargs)
self.language_code = kwargs.get('language_code', None)
self.init_chained_choices(*args, **kwargs)
def is_valid(self):
if self.language_code:
# response is not translated to requested language code :/
# so translation is triggered manually
from django.utils.translation import activate
activate(self.language_code)
return super(ChainedChoicesModelForm, self).is_valid()
|
PragmaticMates/django-clever-selects
|
clever_selects/forms.py
|
Python
|
mit
| 10,535
|
# Atomic covalent radius data
# http://www.periodictable.com/Properties/A/CovalentRadius.an.html
# Updated Jun. 9th, 2016
class Covalent(object):
x = {
"H": 0.37, "He": 0.32, "Li": 1.34, "Be": 0.90, "B": 0.82, "C": 0.77,
"N": 0.75, "O": 0.73, "F": 0.71, "Ne": 0.69, "Na": 1.54, "Mg": 1.30,
"Al": 1.18, "Si": 1.11, "P": 1.06, "S": 1.02, "Cl": 0.99, "Ar": 0.97,
"K": 1.96, "Ca": 1.74, "Sc": 1.44, "Ti": 1.36, "V": 1.25, "Cr": 1.27,
"Mn": 1.39, "Fe": 1.25, "Co": 1.26, "Ni": 1.21, "Cu": 1.38, "Zn": 1.31,
"Ga": 1.26, "Ge": 1.22, "As": 1.19, "Se": 1.16, "Br": 1.14, "Kr": 1.10,
"Rb": 2.11, "Sr": 1.92, "Y": 1.62, "Zr": 1.48, "Nb": 1.37, "Mo": 1.45,
"Tc": 1.56, "Ru": 1.26, "Rh": 1.35, "Pd": 1.31, "Ag": 1.53, "Cd": 1.48,
"In": 1.44, "Sn": 1.41, "Sb": 1.38, "Te": 1.35, "I": 1.33, "Xe": 1.30,
"Cs": 2.25, "Ba": 1.98, "La": 1.69, "Ce": 0.00, "Pr": 0.00, "Nd": 0.00,
"Pm": 0.00, "Sm": 0.00, "Eu": 0.00, "Gd": 0.00, "Tb": 0.00, "Dy": 0.00,
"Ho": 0.00, "Er": 0.00, "Tm": 0.00, "Yb": 0.00, "Lu": 1.60, "Hf": 1.50,
"Ta": 1.38, "W": 1.46, "Re": 1.59, "Os": 1.28, "Ir": 1.37, "Pt": 1.28,
"Au": 1.44, "Hg": 1.49, "Tl": 1.48, "Pb": 1.47, "Bi": 1.46, "Po": 0.00,
"At": 0.00, "Rn": 1.45, "Fr": 0.00, "Ra": 0.00, "Ac": 0.00, "Th": 0.00,
"Pa": 0.00, "U": 0.00, "Np": 0.00, "Pu": 0.00, "Am": 0.00, "Cm": 0.00,
"Bk": 0.00, "Cf": 0.00, "Es": 0.00, "Fm": 0.00, "Md": 0.00, "No": 0.00,
"Lr": 0.00, "Rf": 0.00, "Db": 0.00, "Sg": 0.00, "Bh": 0.00, "Hs": 0.00,
"Mt": 0.00, "Ds": 0.00, "Rg": 0.00, "Uub": 0.00, "Uut": 0.00, "Uuq": 0.00,
"Uup": 0.00, "Uuh": 0.00, "Uus": 0.00, "Uuo": 0.00
}
|
stczhc/neupy
|
tests/fitting/coval.py
|
Python
|
mit
| 1,681
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-16 22:52
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('transcripts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='transcriptpage',
name='updated_at',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2016, 6, 16, 22, 52, 58, 616986, tzinfo=utc)),
preserve_default=False,
),
]
|
harvard-lil/nuremberg
|
nuremberg/transcripts/migrations/0002_transcriptpage_updated_at.py
|
Python
|
mit
| 619
|
"""
Load pp, plot and save
8km difference
"""
import os, sys
#%matplotlib inline
#%pylab inline
import matplotlib
matplotlib.use('Agg')
# Must be before importing matplotlib.pyplot or pylab!
from matplotlib import rc
from matplotlib.font_manager import FontProperties
from matplotlib import rcParams
from mpl_toolkits.basemap import Basemap
rc('font', family = 'serif', serif = 'cmr10')
rc('text', usetex=True)
rcParams['text.usetex']=True
rcParams['text.latex.unicode']=True
rcParams['font.family']='serif'
rcParams['font.serif']='cmr10'
import matplotlib.pyplot as plt
#from matplotlib import figure
import matplotlib as mpl
import matplotlib.cm as mpl_cm
import numpy as np
import iris
import iris.coords as coords
import iris.quickplot as qplt
import iris.plot as iplt
import iris.coord_categorisation
import iris.unit as unit
import cartopy.crs as ccrs
import cartopy.io.img_tiles as cimgt
import matplotlib.ticker as mticker
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import datetime
from mpl_toolkits.basemap import cm
import imp
from textwrap import wrap
import re
import iris.analysis.cartography
import math
from dateutil import tz
#import multiprocessing as mp
import gc
import types
import pdb
save_path='/nfs/a90/eepdw/Figures/EMBRACE/'
model_name_convert_title = imp.load_source('util', '/nfs/see-fs-01_users/eepdw/python_scripts/modules/model_name_convert_title.py')
unrotate = imp.load_source('util', '/nfs/see-fs-01_users/eepdw/python_scripts/modules/unrotate_pole.py')
#pp_file = ''
plot_diags=['408']
#plot_diags=['sp_hum']
plot_levels = [925, 850, 700, 500]
#experiment_ids = ['dkmbq', 'dklyu']
experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dklwu', 'dklzq'] # All minus large 2
#Experiment_ids = ['djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkbhu', 'djznu', 'dkhgu' ] # All 12
#experiment_ids = ['djzny', 'djznq', 'djzns', 'dkjxq', 'dklwu', 'dklzq', 'dkbhu',] # All 12
#experiment_ids = ['dkbhu', 'dkjxq']
experiment_ids = ['dkmbq', 'dklyu', 'djznw', 'djzny', 'djznq', 'djzns', 'dklwu', 'dklzq'] # All minus large 2
experiment_ids = ['dkbhu']
pp_file_path = '/nfs/a90/eepdw/Data/EMBRACE/'
degs_crop_top = 1.7
degs_crop_bottom = 2.5
from iris.coord_categorisation import add_categorised_coord
# def add_hour_of_day(cube, coord, name='hour'):
# add_categorised_coord(cube, name, coord,
# lambda coord, x: coord.units.num2date(x).hour)
figprops = dict(figsize=(8,8), dpi=100)
#cmap=cm.s3pcpn_l
u = unit.Unit('hours since 1970-01-01 00:00:00',calendar='gregorian')
dx, dy = 10, 10
divisor=10 # for lat/lon rounding
lon_high = 101.866
lon_low = 64.115
lat_high = 33.
lat_low =-6.79
lon_low_tick=lon_low -(lon_low%divisor)
lon_high_tick=math.ceil(lon_high/divisor)*divisor
lat_low_tick=lat_low - (lat_low%divisor)
lat_high_tick=math.ceil(lat_high/divisor)*divisor
def main():
for p_level in plot_levels:
# Set pressure height contour min/max
if p_level == 925:
clev_min = 0.
clev_max = 810.
elif p_level == 850:
clev_min = 1435.
clev_max = 1530.
elif p_level == 700:
clev_min = 3090.
clev_max = 3155.
elif p_level == 500:
clev_min = 5800.
clev_max = 5890.
else:
print 'Contour min/max not set for this pressure level'
# Set potential temperature min/max
if p_level == 925:
clevpt_min = 300.
clevpt_max = 312.
elif p_level == 850:
clevpt_min = 302.
clevpt_max = 310.
elif p_level == 700:
clevpt_min = 312.
clevpt_max = 320.
elif p_level == 500:
clevpt_min = 325.
clevpt_max = 332.
else:
print 'Potential temperature min/max not set for this pressure level'
# Set specific humidity min/max
if p_level == 925:
clevsh_min = 0.012
clevsh_max = 0.020
elif p_level == 850:
clevsh_min = 0.007
clevsh_max = 0.017
elif p_level == 700:
clevsh_min = 0.002
clevsh_max = 0.010
elif p_level == 500:
clevsh_min = 0.001
clevsh_max = 0.005
else:
print 'Specific humidity min/max not set for this pressure level'
#clevs_col = np.arange(clev_min, clev_max)
clevs_lin = np.arange(clev_min, clev_max, 256)
p_level_constraint = iris.Constraint(pressure=p_level)
for plot_diag in plot_diags:
for experiment_id in experiment_ids:
expmin1 = experiment_id[:-1]
# For each day in cube
height_pp_file = '%s_408_on_p_levs_mean_by_day.pp' % (experiment_id)
height_pfile = '%s%s/%s/%s' % (pp_file_path, expmin1, experiment_id, height_pp_file)
cube = iris.load_cube(height_pfile, p_level_constraint)
#print pcube
#print height_cube
time_coords = cube.coord('time')
#add_hour_of_day(pcube, pcube.coord('time'))
#add_hour_of_day(height_cube, height_cube.coord('time'))
iris.coord_categorisation.add_day_of_year(cube, time_coords, name='day_of_year')
#pcube.remove_coord('time')
#cube_diff.remove_coord('time')
#height_cube.remove_coord('time')
#height_cube_diff.remove_coord('time')
#p_cube_difference = iris.analysis.maths.subtract(pcube, cube_diff, dim='hour')
#height_cube_difference = iris.analysis.maths.subtract(height_cube, height_cube_diff, dim='hour')
#pdb.set_trace()
#del height_cube, pcube, height_cube_diff, cube_diff
for t, time_cube in enumerate(cube.slices(['grid_latitude', 'grid_longitude'])):
#pdb.set_trace()
# Get time of averagesfor plot title
h = u.num2date(np.array(time_cube.coord('time').points, dtype=float)[0]).strftime('%d%b')
#Convert to India time
from_zone = tz.gettz('UTC')
to_zone = tz.gettz('Asia/Kolkata')
h_utc = u.num2date(np.array(time_cube.coord('day_of_year').points, dtype=float)[0]).replace(tzinfo=from_zone)
h_local = h_utc.astimezone(to_zone).strftime('%H%M')
fig = plt.figure(**figprops)
cmap=plt.cm.RdBu_r
ax = plt.axes(projection=ccrs.PlateCarree(), extent=(lon_low,lon_high,lat_low+degs_crop_bottom,lat_high-degs_crop_top))
m =\
Basemap(llcrnrlon=lon_low,llcrnrlat=lat_low,urcrnrlon=lon_high,urcrnrlat=lat_high, rsphere = 6371229)
#pdb.set_trace()
lat = cube.coord('grid_latitude').points
lon = cube.coord('grid_longitude').points
cs = cube.coord_system('CoordSystem')
lons, lats = np.meshgrid(lon, lat)
lons, lats = iris.analysis.cartography.unrotate_pole\
(lons,lats, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)
x,y = m(lons,lats)
# if plot_diag=='temp':
# min_contour = clevpt_min
# max_contour = clevpt_max
# cb_label='K'
# main_title='8km Explicit model (dklyu) minus 8km parametrised model geopotential height (grey contours), potential temperature (colours),\
# and wind (vectors) %s UTC %s IST' % (h, h_local)
# tick_interval=2
# clev_number=max_contour-min_contour+1
# elif plot_diag=='sp_hum':
# min_contour = clevsh_min
# max_contour = clevsh_max
# cb_label='kg/kg'
# main_title='8km Explicit model (dklyu) minus 8km parametrised model geopotential height (grey contours), specific humidity (colours),\
# and wind (vectors) %s UTC %s IST' % (h, h_local)
# tick_interval=0.002
# clev_number=max_contour-min_contour+0.001
# clevs = np.linspace(min_contour, max_contour, clev_number)
# #clevs = np.linspace(-3, 3, 32)
# cont = plt.contourf(x,y,time_cube.data, clevs, cmap=cmap, extend='both')
#cont = iplt.contourf(time_cube, clevs, cmap=cmap, extend='both')
cs_lin = iplt.contour(time_cube, clevs_lin,colors='#262626',linewidths=1.)
plt.clabel(cs_lin, fontsize=14, fmt='%d', color='black')
#del time_cube
#plt.clabel(cont, fmt='%d')
#ax.stock_img()
ax.coastlines(resolution='110m', color='#262626')
gl = ax.gridlines(draw_labels=True,linewidth=0.5, color='#262626', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_right = False
#gl.xlines = False
dx, dy = 10, 10
gl.xlocator = mticker.FixedLocator(range(int(lon_low_tick),int(lon_high_tick)+dx,dx))
gl.ylocator = mticker.FixedLocator(range(int(lat_low_tick),int(lat_high_tick)+dy,dy))
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
gl.xlabel_style = {'size': 12, 'color':'#262626'}
#gl.xlabel_style = {'color': '#262626', 'weight': 'bold'}
gl.ylabel_style = {'size': 12, 'color':'#262626'}
# cbar = fig.colorbar(cont, orientation='horizontal', pad=0.05, extend='both')
# cbar.set_label('%s' % cb_label, fontsize=10, color='#262626')
# #cbar.set_label(time_cube.units, fontsize=10, color='#262626')
# cbar.set_ticks(np.arange(min_contour, max_contour+tick_interval,tick_interval))
# ticks = (np.arange(min_contour, max_contour+tick_interval,tick_interval))
# cbar.set_ticklabels(['${%.1f}$' % i for i in ticks])
# cbar.ax.tick_params(labelsize=10, color='#262626')
#main_title='Mean Rainfall for EMBRACE Period -%s UTC (%s IST)' % (h, h_local)
#main_title=time_cube.standard_name.title().replace('_',' ')
#model_info = re.sub(r'[(\']', ' ', model_info)
#model_info = re.sub(r'[\',)]', ' ', model_info)
#print model_info
file_save_name = '%s_%s_%s_hPa_and_geop_height_%s' % (experiment_id, plot_diag, p_level, h)
save_dir = '%s%s/%s' % (save_path, experiment_id, plot_diag)
if not os.path.exists('%s' % save_dir): os.makedirs('%s' % (save_dir))
#plt.show()
#fig.savefig('%s/%s_notitle.png' % (save_dir, file_save_name), format='png', bbox_inches='tight')
plt.title('%s UTC' % (h))
fig.savefig('%s/%s_short_title.png' % (save_dir, file_save_name) , format='png', bbox_inches='tight')
#model_info=re.sub('(.{68} )', '\\1\n', str(model_name_convert_title.main(experiment_id)), 0, re.DOTALL)
#plt.title('\n'.join(wrap('%s\n%s' % (main_title, model_info), 1000,replace_whitespace=False)), fontsize=16)
#fig.savefig('%s/%s.png' % (save_dir, file_save_name), format='png', bbox_inches='tight')
fig.clf()
plt.close()
#del time_cube
gc.collect()
if __name__ == '__main__':
main()
#proc=mp.Process(target=worker)
#proc.daemon=True
#proc.start()
#proc.join()
|
peterwilletts24/Python-Scripts
|
plot_scripts/EMBRACE/plot_from_pp_geop_height_by_day_dkbhu.py
|
Python
|
mit
| 12,998
|
from __future__ import division
from pyvx import *
from array import array
class TestDiv(object):
def test_div(self):
g = Graph()
with g:
img = Image(3, 4, DF_IMAGE_U8, array('B', range(12)))
sa1 = img / 2
sa2 = img // 2
sa1.force()
sa2.force()
g.process()
assert [sa1.data[i] for i in range(6)] == [0, 0.5, 1.0, 1.5, 2.0, 2.5]
assert [sa2.data[i] for i in range(6)] == [0, 0, 1, 1, 2, 2]
|
hakanardo/pyvx
|
old/test/test_truediv.py
|
Python
|
mit
| 494
|
from bundleprocessing import interpolateMetric
import pandas as pd
import nibabel as nib
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-templateBundle', '--templateBundle', required = True)
parser.add_argument('-subjectBundle', '--subjectBundle', required = True)
parser.add_argument('-metric', '--metric', required = True)
parser.add_argument('-numPoints', '--numPoints', type=int, required = True)
parser.add_argument('-flip', '--flip', type=bool, required = True)
parser.add_argument('-outTracks', '--outTracks', required = True)
parser.add_argument('-outMetrics', '--outMetrics', required = True)
args = parser.parse_args()
tracks, hdr = nib.trackvis.read(args.templateBundle)
templateBundle = [trk[0] for trk in tracks]
tracks, hdr = nib.trackvis.read(args.subjectBundle)
subjectBundle = [trk[0] for trk in tracks]
image = nib.load(args.metric)
metric = image.get_data()
subjectTracks,scalars = interpolateMetric(templateBundle,
subjectBundle,
metric,
hdr['voxel_size'],
args.numPoints,
args.flip)
nib.trackvis.write(args.outTracks,subjectTracks,hdr)
data = pd.DataFrame(scalars,columns=["Metrics"])
data.to_csv(args.outMetrics,index=False)
|
conorkcorbin/tractometry
|
call_interpolate.py
|
Python
|
mit
| 1,231
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import os
def path(*path_segments):
return os.path.join(os.getcwd(), *path_segments)
def open_file(*path_segments):
file_path = path(*path_segments)
open(file_path, 'w').close()
return open(file_path, 'a')
header = open(path('README_header.rst'), 'r')
readme = open_file('README.rst')
sphinx = open_file('doc', 'source', 'cli.rst')
sphinx_header = (
'Comande line interface\n',
'======================\n',
'\n',
'.. code-block:: text\n',
'\n',
)
for line in sphinx_header:
sphinx.write(str(line))
footer = open(path('README_footer.rst'), 'r')
for line in header:
readme.write(line)
audiorenamer = subprocess.Popen('audiorenamer --help', shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
readme.write('\n')
for line in audiorenamer.stdout:
indented_line = ' ' + line.decode('utf-8')
readme.write(indented_line)
sphinx.write(indented_line)
audiorenamer.wait()
for line in footer:
readme.write(line)
readme.close()
sphinx.close()
|
Josef-Friedrich/audiorename
|
_generate-readme.py
|
Python
|
mit
| 1,114
|
# Copyright (c) 2014 Katsuya Noguchi
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import http_client
def get_by_species(species):
path = '/info/analysis/%s' % species
return http_client.get(path)
|
kn/ensembl
|
ensembl/info/analysis.py
|
Python
|
mit
| 1,205
|
import sys
from pdb import Pdb
from ipdb.__main__ import update_stdout, def_colors, wrap_sys_excepthook
class Tdb(Pdb):
def __init__(self, *args, **kwargs):
Pdb.__init__(self, *args, **kwargs)
self.botframe = None
self.quitting = False
self.stopframe = None
self.codemap = {}
self.entered = False
def add_trace(self, func):
code = func.__code__
self.codemap[code] = 0
def trace_dispatch(self, frame, event, arg):
if self.quitting:
return # None
if event == 'line':
return self.dispatch_line(frame)
if event == 'call':
return self.dispatch_call(frame, arg)
if event == 'return':
return self.dispatch_return(frame, arg)
if event == 'exception':
return self.dispatch_exception(frame, arg)
if event == 'c_call':
return self.trace_dispatch
if event == 'c_exception':
return self.trace_dispatch
if event == 'c_return':
return self.trace_dispatch
print(('bdb.Bdb.dispatch: unknown debugging event:', repr(event)))
return self.trace_dispatch
def dispatch_call(self, frame, arg):
if not self.entered:
f_code = frame.f_code
if f_code in self.codemap:
self.entered = True
self.codemap[f_code] += 1
self._set_stopinfo(frame, None)
return self.trace_dispatch
else:
return None
# XXX 'arg' is no longer used
if self.botframe is None:
# First call of dispatch since reset()
self.botframe = frame.f_back # (CT) Note that this may also be None!
return self.trace_dispatch
if not (self.stop_here(frame) or self.break_anywhere(frame)):
# No need to trace this function
return # None
self.user_call(frame, arg)
if self.quitting: raise BdbQuit
return self.trace_dispatch
def set_trace(self, frame=None):
"""
"""
update_stdout()
wrap_sys_excepthook()
if frame is None:
frame = sys._getframe().f_back
#pdb = Tdb(def_colors)
self.reset()
self.set_step()
sys.settrace(self.trace_dispatch)
def with_trace(f):
@wraps(f)
def tracing(*args, **kwargs):
set_trace()
return f(*args, **kwargs)
return tracing
class Trace(object):
def __init__(self, *args):
self.tdb = Tdb(def_colors)
funcs = list(filter(callable, args))
for func in funcs:
self.add_function(func)
def add_function(self, func):
self.tdb.add_trace(func)
def __enter__(self):
self.tdb.set_trace()
return self
def __exit__(self, type, value, traceback):
sys.settrace(None)
|
dalejung/trtools
|
trtools/tools/trace.py
|
Python
|
mit
| 2,904
|
# ----------------------------------------------------------------------
# Copyright (c) 2014 Rafael Gonzalez.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ----------------------------------------------------------------------
# ========================== DESIGN NOTES ==============================
#
# The EMA hardware is quite smart. It does many things on its own:
# if any of its various thresholds are reached it opens or closes relays
# Threshold caluclations are done inside the EMA.
# But there may be other actions that could be triggered
# when the roof or aux relay or change state, like sending an SMS.
#
# Also, in my current setup, the battery voltage should be carefully
# controled. If volage falls below a threshold, we should start
# switching off all devices except EMAitself.
#
# This module is the real added value of this EMA Python server
# to this excellent hardware.
#
# It allows you to trigger scripts that do interesting things like
# sendimg SMS or switching off its own computer (a Raspberry Pi)
#
# Scripts can be written in any language you like, of course. This
# project includes scripts to send SMS using the python-gammu
# binding to gammu project.
#
# I have preferred to trigger arbitrary scripts rather than a tight
# integration to this EMA server. Loosely couple modules evolve better
# over time.
#
# Scripts are forked in background and can be triggered each time
# the event takes places or just once. There will be only one script
# process running. If a recurring event takes place and the script is
# still active, the new script is not launched.
#
# Notifier Class responsibilities:
# 1) capture Voltage Low , Roff Relay on/off
# and Aux Relay off/on events
# 2) Hold Script objects to run
# 3) Run them when the event comes.
#
# Script Class responsibilities
# 1) Hold a path to the external script file
# 2) Knows what its execution mode is (run once, run many times)
# 3) Forks the script in background and does not wait for its completion
#
# We use an exception to signal notifier about an porcess already being
# executed. I think this is cleaner than carrying return information
# across two levels.
#
# In V2.0, notifier is generic, allowing regsitering and execution of any
# event
# ======================================================================
import logging
import subprocess
import os
log = logging.getLogger('notifier')
class ExecutedScript(Exception):
'''Signals a script has executed'''
def __init__(self, name, *args):
self.name = name
self.args = args
def __str__(self):
'''Prints useful information'''
tmp = ''
for arg in self.args:
tmp += ' ' + arg
return self.name + ' ' + tmp
class Script(object):
'''Notifier creates Script wrapper objects, representing
scripts to be launched'''
# modes as constants
NEVER = 0
ONCE = 1
MANY = 2
# mappping from strings to numbers
MODES = { 'Never' : NEVER, 'Once' : ONCE, 'Many' : MANY }
def __init__(self, cfg):
self.mode = Script.MODES[cfg[1]]
self.path = cfg[0]
self.name = os.path.basename(self.path)
self.child = None
self.executed = False
def runOnce(self, *args):
'''run only once in the whole server lifetime'''
# skip if already run
# otherwise, spawn it
if self.executed:
return False
# If not running, spawn it
try:
self.child = subprocess.Popen((self.path,) + args)
except (OSError, ValueError) as e:
log.error("runOnce(%s): %s", self.path, e)
else:
self.executed = True
raise ExecutedScript(self.name, *args)
def runMany(self, *args):
'''Run one more time, if previous run completed'''
# Check existing script already running
# If running we don't go any further and return.
# otherwise, spawn it.
if self.child:
self.child.poll()
if self.child.returncode is None:
log.warning("script %s has not finished. Can't launch it again", self.name)
return
try:
self.child = subprocess.Popen((self.path,) + args)
except (OSError, ValueError) as e:
log.error("runMany(%s): %s", self.path, e)
else:
raise ExecutedScript(self.name, *args)
return
def run(self, *args):
'''Launch a script, depending on the launch mode'''
# Skip if no script is configured
if not self.path:
return
if self.mode == Script.ONCE:
self.runOnce(*args)
elif self.mode == Script.MANY:
self.runMany(*args)
class Notifier(object):
'''Notifies EMA events to third parties by executing scripts'''
# Modes as a set text strings to be used in config file
MODES = {'Never', 'Once', 'Many'}
def __init__(self):
pass
self.scripts = {}
# ---------------------------
# Adding scripts to notifier
# ---------------------------
def addScript(self, event, mode, path):
''' *_script are tuples of (path, mode)'''
aList = self.scripts.get(event,[])
aList.append(Script((path, mode)))
self.scripts[event] = aList
# ---------------------------
# Event handler from Devices
# ---------------------------
def onEventExecute(self, event, *args):
try:
for script in self.scripts[event]:
script.run(*args)
except ExecutedScript as e:
log.warning("On event %s executed script => %s ", event, e)
|
astrorafael/ema
|
ema/notifier.py
|
Python
|
mit
| 6,211
|
# -*- coding: utf-8 -*-
"""
Collection of raw lexer test cases and class constructor.
"""
from __future__ import unicode_literals
import textwrap
swapquotes = {
39: 34, 34: 39,
# note the follow are interim error messages
96: 39,
}
# The structure and some test cases are taken
# from https://bitbucket.org/ned/jslex
es5_cases = [
(
# Identifiers
'identifiers_ascii',
('i my_variable_name c17 _dummy $str $ _ CamelCase class2type',
['ID i', 'ID my_variable_name', 'ID c17', 'ID _dummy',
'ID $str', 'ID $', 'ID _', 'ID CamelCase', 'ID class2type']
),
), (
'identifiers_unicode',
(u'\u03c0 \u03c0_tail var\ua67c',
[u'ID \u03c0', u'ID \u03c0_tail', u'ID var\ua67c']),
), (
# https://github.com/rspivak/slimit/issues/2
'slimit_issue_2',
('nullify truelie falsepositive',
['ID nullify', 'ID truelie', 'ID falsepositive']),
), (
'keywords_break',
('break Break BREAK', ['BREAK break', 'ID Break', 'ID BREAK']),
), (
# Literals
'literals',
('null true false Null True False',
['NULL null', 'TRUE true', 'FALSE false',
'ID Null', 'ID True', 'ID False']
),
), (
# Punctuators
'punctuators_simple',
('a /= b', ['ID a', 'DIVEQUAL /=', 'ID b']),
), (
'punctuators_various_equality',
(('= == != === !== < > <= >= || && ++ -- << >> '
'>>> += -= *= <<= >>= >>>= &= %= ^= |='),
['EQ =', 'EQEQ ==', 'NE !=', 'STREQ ===', 'STRNEQ !==', 'LT <',
'GT >', 'LE <=', 'GE >=', 'OR ||', 'AND &&', 'PLUSPLUS ++',
'MINUSMINUS --', 'LSHIFT <<', 'RSHIFT >>', 'URSHIFT >>>',
'PLUSEQUAL +=', 'MINUSEQUAL -=', 'MULTEQUAL *=', 'LSHIFTEQUAL <<=',
'RSHIFTEQUAL >>=', 'URSHIFTEQUAL >>>=', 'ANDEQUAL &=', 'MODEQUAL %=',
'XOREQUAL ^=', 'OREQUAL |=',
]
),
), (
'punctuators_various_others',
('. , ; : + - * % & | ^ ~ ? ! ( ) { } [ ]',
['PERIOD .', 'COMMA ,', 'SEMI ;', 'COLON :', 'PLUS +', 'MINUS -',
'MULT *', 'MOD %', 'BAND &', 'BOR |', 'BXOR ^', 'BNOT ~',
'CONDOP ?', 'NOT !', 'LPAREN (', 'RPAREN )', 'LBRACE {', 'RBRACE }',
'LBRACKET [', 'RBRACKET ]']
),
), (
'division_simple',
('a / b', ['ID a', 'DIV /', 'ID b']),
), (
'numbers',
(('3 3.3 0 0. 0.0 0.001 010 3.e2 3.e-2 3.e+2 3E2 3E+2 3E-2 '
'0.5e2 0.5e+2 0.5e-2 33 128.15 0x001 0X12ABCDEF 0xabcdef'),
['NUMBER 3', 'NUMBER 3.3', 'NUMBER 0', 'NUMBER 0.', 'NUMBER 0.0',
'NUMBER 0.001', 'NUMBER 010', 'NUMBER 3.e2', 'NUMBER 3.e-2',
'NUMBER 3.e+2', 'NUMBER 3E2', 'NUMBER 3E+2', 'NUMBER 3E-2',
'NUMBER 0.5e2', 'NUMBER 0.5e+2', 'NUMBER 0.5e-2', 'NUMBER 33',
'NUMBER 128.15', 'NUMBER 0x001', 'NUMBER 0X12ABCDEF',
'NUMBER 0xabcdef']
),
), (
'strings_simple_quote',
(""" '"' """, ["""STRING '"'"""]),
), (
'strings_escape_quote_tab',
(r'''"foo" 'foo' "x\";" 'x\';' "foo\tbar"''',
['STRING "foo"', """STRING 'foo'""", r'STRING "x\";"',
r"STRING 'x\';'", r'STRING "foo\tbar"']
),
), (
'strings_escape_ascii',
(r"""'\x55' "\x12ABCDEF" '!@#$%^&*()_+{}[]\";?'""",
[r"STRING '\x55'", r'STRING "\x12ABCDEF"',
r"STRING '!@#$%^&*()_+{}[]\";?'"]
),
), (
'strings_escape_unicode',
(r"""'\u0001' "\uFCEF" 'a\\\b\n'""",
[r"STRING '\u0001'", r'STRING "\uFCEF"', r"STRING 'a\\\b\n'"]
),
), (
'strings_unicode',
(u'"тест строки\\""', [u'STRING "тест строки\\""']),
), (
'strings_escape_octal',
(r"""'\251'""", [r"""STRING '\251'"""]),
), (
# Bug - https://github.com/rspivak/slimit/issues/5
'slimit_issue_5',
(r"var tagRegExp = new RegExp('<(\/*)(FooBar)', 'gi');",
['VAR var', 'ID tagRegExp', 'EQ =',
'NEW new', 'ID RegExp', 'LPAREN (',
r"STRING '<(\/*)(FooBar)'", 'COMMA ,', "STRING 'gi'",
'RPAREN )', 'SEMI ;']),
), (
# same as above but inside double quotes
'slimit_issue_5_double_quote',
(r'"<(\/*)(FooBar)"', [r'STRING "<(\/*)(FooBar)"']),
), (
# multiline string (string written across multiple lines
# of code) https://github.com/rspivak/slimit/issues/24
'slimit_issue_24_multi_line_code_double',
("var a = 'hello \\\n world'",
['VAR var', 'ID a', 'EQ =', "STRING 'hello \\\n world'"]),
), (
'slimit_issue_24_multi_line_code_single',
('var a = "hello \\\r world"',
['VAR var', 'ID a', 'EQ =', 'STRING "hello \\\r world"']),
), (
# regex
'regex_1',
(r'a=/a*/,1', ['ID a', 'EQ =', 'REGEX /a*/', 'COMMA ,', 'NUMBER 1']),
), (
'regex_2',
(r'a=/a*[^/]+/,1',
['ID a', 'EQ =', 'REGEX /a*[^/]+/', 'COMMA ,', 'NUMBER 1']
),
), (
'regex_3',
(r'a=/a*\[^/,1',
['ID a', 'EQ =', r'REGEX /a*\[^/', 'COMMA ,', 'NUMBER 1']
),
), (
'regex_4',
(r'a=/\//,1', ['ID a', 'EQ =', r'REGEX /\//', 'COMMA ,', 'NUMBER 1']),
), (
# not a regex, just a division
# https://github.com/rspivak/slimit/issues/6
'slimit_issue_6_not_regex_but_division',
(r'x = this / y;',
['ID x', 'EQ =', 'THIS this', r'DIV /', r'ID y', r'SEMI ;']),
), (
'regex_mozilla_example_1',
# next two are from
# http://www.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions
('for (var x = a in foo && "</x>" || mot ? z:/x:3;x<5;y</g/i) '
'{xyz(x++);}',
["FOR for", "LPAREN (", "VAR var", "ID x", "EQ =", "ID a", "IN in",
"ID foo", "AND &&", 'STRING "</x>"', "OR ||", "ID mot", "CONDOP ?",
"ID z", "COLON :", "REGEX /x:3;x<5;y</g", "DIV /", "ID i",
"RPAREN )", "LBRACE {", "ID xyz", "LPAREN (", "ID x", "PLUSPLUS ++",
"RPAREN )", "SEMI ;", "RBRACE }"]
),
), (
'regex_mozilla_example_2',
('for (var x = a in foo && "</x>" || mot ? z/x:3;x<5;y</g/i) '
'{xyz(x++);}',
["FOR for", "LPAREN (", "VAR var", "ID x", "EQ =", "ID a", "IN in",
"ID foo", "AND &&", 'STRING "</x>"', "OR ||", "ID mot", "CONDOP ?",
"ID z", "DIV /", "ID x", "COLON :", "NUMBER 3", "SEMI ;", "ID x",
"LT <", "NUMBER 5", "SEMI ;", "ID y", "LT <", "REGEX /g/i",
"RPAREN )", "LBRACE {", "ID xyz", "LPAREN (", "ID x", "PLUSPLUS ++",
"RPAREN )", "SEMI ;", "RBRACE }"]
),
), (
'regex_illegal_1',
# Various "illegal" regexes that are valid according to the std.
(r"""/????/, /++++/, /[----]/ """,
['REGEX /????/', 'COMMA ,',
'REGEX /++++/', 'COMMA ,', 'REGEX /[----]/']
),
), (
'regex_stress_test_1',
# Stress cases from
# http://stackoverflow.com/questions/5533925/
# what-javascript-constructs-does-jslex-incorrectly-lex/5573409#5573409
(r"""/\[/""", [r"""REGEX /\[/"""]),
), (
'regex_stress_test_2',
(r"""/[i]/""", [r"""REGEX /[i]/"""]),
), (
'regex_stress_test_3',
(r"""/[\]]/""", [r"""REGEX /[\]]/"""]),
), (
'regex_stress_test_4',
(r"""/a[\]]/""", [r"""REGEX /a[\]]/"""]),
), (
'regex_stress_test_5',
(r"""/a[\]]b/""", [r"""REGEX /a[\]]b/"""]),
), (
'regex_stress_test_6',
(r"""/[\]/]/gi""", [r"""REGEX /[\]/]/gi"""]),
), (
'regex_stress_test_7',
(r"""/\[[^\]]+\]/gi""", [r"""REGEX /\[[^\]]+\]/gi"""]),
), (
'regex_stress_test_8',
(r"""
rexl.re = {
NAME: /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/,
UNQUOTED_LITERAL: /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
QUOTED_LITERAL: /^'(?:[^']|'')*'/,
NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
};
""", [
"ID rexl", "PERIOD .", "ID re", "EQ =", "LBRACE {",
"ID NAME", "COLON :",
r"""REGEX /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/""", "COMMA ,",
"ID UNQUOTED_LITERAL", "COLON :",
r"""REGEX /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
"COMMA ,", "ID QUOTED_LITERAL", "COLON :",
r"""REGEX /^'(?:[^']|'')*'/""", "COMMA ,", "ID NUMERIC_LITERAL",
"COLON :",
r"""REGEX /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "COMMA ,",
"ID SYMBOL", "COLON :",
r"""REGEX /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""",
"RBRACE }", "SEMI ;"]),
), (
'regex_stress_test_9',
(r"""
rexl.re = {
NAME: /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/,
UNQUOTED_LITERAL: /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
QUOTED_LITERAL: /^'(?:[^']|'')*'/,
NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
};
str = '"';
""", [
"ID rexl", "PERIOD .", "ID re", "EQ =", "LBRACE {",
"ID NAME", "COLON :", r"""REGEX /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/""",
"COMMA ,", "ID UNQUOTED_LITERAL", "COLON :",
r"""REGEX /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
"COMMA ,", "ID QUOTED_LITERAL", "COLON :",
r"""REGEX /^'(?:[^']|'')*'/""", "COMMA ,",
"ID NUMERIC_LITERAL", "COLON :",
r"""REGEX /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "COMMA ,",
"ID SYMBOL", "COLON :",
r"""REGEX /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""",
"RBRACE }", "SEMI ;",
"ID str", "EQ =", """STRING '"'""", "SEMI ;",
]),
), (
'regex_stress_test_10',
(r""" this._js = "e.str(\"" + this.value.replace(/\\/g, "\\\\").replace(/"/g, "\\\"") + "\")"; """,
["THIS this", "PERIOD .", "ID _js", "EQ =",
r'''STRING "e.str(\""''', "PLUS +", "THIS this", "PERIOD .",
"ID value", "PERIOD .", "ID replace", "LPAREN (", r"REGEX /\\/g",
"COMMA ,", r'STRING "\\\\"', "RPAREN )", "PERIOD .", "ID replace",
"LPAREN (", r'REGEX /"/g', "COMMA ,", r'STRING "\\\""', "RPAREN )",
"PLUS +", r'STRING "\")"', "SEMI ;"]),
), (
'regex_division_check',
('a = /a/ / /b/',
['ID a', 'EQ =', 'REGEX /a/', 'DIV /', 'REGEX /b/']),
), (
'regex_after_plus_brace',
('+{}/a/g',
['PLUS +', 'LBRACE {', 'RBRACE }', 'DIV /', 'ID a', 'DIV /', 'ID g']),
# The following pathological cases cannot be tested using the
# lexer alone, as the rules can only be addressed in conjunction
# with a parser
#
# 'regex_after_brace',
# ('{}/a/g',
# ['LBRACE {', 'RBRACE }', 'REGEX /a/g']),
# 'regex_after_if_brace',
# ('if (a) { } /a/.test(a)',
# ['IF if', 'LPAREN (', 'ID a', 'RPAREN )', 'LBRACE {', 'RBRACE }',
# 'REGEX /a/', "PERIOD .", "ID test", 'LPAREN (', 'ID a',
# 'RPAREN )']),
), (
'regex_case',
('switch(0){case /a/:}',
['SWITCH switch', 'LPAREN (', 'NUMBER 0', 'RPAREN )', 'LBRACE {',
'CASE case', 'REGEX /a/', 'COLON :', 'RBRACE }']),
), (
'div_after_valid_statement_function_call',
('if(){} f(a) / f(b)',
['IF if', 'LPAREN (', 'RPAREN )', 'LBRACE {', 'RBRACE }',
'ID f', 'LPAREN (', 'ID a', 'RPAREN )', 'DIV /',
'ID f', 'LPAREN (', 'ID b', 'RPAREN )']),
), (
'for_regex_slimit_issue_54',
('for (;;) /r/;',
['FOR for', 'LPAREN (', 'SEMI ;', 'SEMI ;', 'RPAREN )',
'REGEX /r/', 'SEMI ;']),
), (
'for_regex_slimit_issue_54_not_break_division',
('for (;;) { x / y }',
['FOR for', 'LPAREN (', 'SEMI ;', 'SEMI ;', 'RPAREN )',
'LBRACE {', 'ID x', 'DIV /', 'ID y', 'RBRACE }']),
), (
'for_regex_slimit_issue_54_bracket_accessor_check',
('s = {a:1} + s[2] / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'ID s', 'LBRACKET [', 'NUMBER 2', 'RBRACKET ]',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_function_parentheses_check',
('s = {a:1} + f(2) / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'ID f', 'LPAREN (', 'NUMBER 2', 'RPAREN )',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_math_parentheses_check',
('s = {a:1} + (2) / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'LPAREN (', 'NUMBER 2', 'RPAREN )',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_math_bracket_check',
('s = {a:1} + [2] / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'LBRACKET [', 'NUMBER 2', 'RBRACKET ]',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_math_braces_check',
('s = {a:2} / 166 / 9',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 2',
'RBRACE }', 'DIV /', 'NUMBER 166', 'DIV /', 'NUMBER 9'])
), (
'do_while_regex',
('do {} while (0) /s/',
['DO do', 'LBRACE {', 'RBRACE }', 'WHILE while', 'LPAREN (',
'NUMBER 0', 'RPAREN )', 'REGEX /s/'])
), (
'if_regex',
('if (thing) /s/',
['IF if', 'LPAREN (', 'ID thing', 'RPAREN )', 'REGEX /s/'])
), (
'identifier_math',
('f (v) /s/g',
['ID f', 'LPAREN (', 'ID v', 'RPAREN )', 'DIV /', 'ID s', 'DIV /',
'ID g'])
), (
'section_7',
("a = b\n/hi/s",
['ID a', 'EQ =', 'ID b', 'DIV /', 'ID hi', 'DIV /', 'ID s'])
), (
'section_7_extras',
("a = b\n\n\n/hi/s",
['ID a', 'EQ =', 'ID b', 'DIV /', 'ID hi', 'DIV /', 'ID s'])
), (
'slimit_issue_39_and_57',
(r"f(a, 'hi\01').split('\1').split('\0');",
['ID f', 'LPAREN (', 'ID a', 'COMMA ,', r"STRING 'hi\01'", 'RPAREN )',
'PERIOD .', 'ID split', 'LPAREN (', r"STRING '\1'", 'RPAREN )',
'PERIOD .', 'ID split', 'LPAREN (', r"STRING '\0'", 'RPAREN )',
'SEMI ;'])
), (
'section_7_8_4_string_literal_with_7_3_conformance',
("'<LF>\\\n<CR>\\\r<LS>\\\u2028<PS>\\\u2029<CR><LF>\\\r\n'",
["STRING '<LF>\\\n<CR>\\\r<LS>\\\u2028<PS>\\\u2029<CR><LF>\\\r\n'"])
), (
# okay this is getting ridiculous how bad ECMA is.
'section_7_comments',
("a = b\n/** **/\n\n/hi/s",
['ID a', 'EQ =', 'ID b', 'DIV /', 'ID hi', 'DIV /', 'ID s'])
),
]
# various string related syntax errors
es5_error_cases_str = [
(
'unterminated_string_eof',
"var foo = 'test",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_line_separator_in_string',
"vaf foo = 'test\u2028foo'",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_line_feed_in_string',
"var foo = 'test\u2029foo'",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_crnl_in_string',
"var foo = 'test\r\nfoo'",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_cr_in_string',
"var foo = 'test\\\n\rfoo'",
# FIXME Note that the \\ is double escaped
'Unterminated string literal "\'test\\\\" at 1:11',
), (
'invalid_hex_sequence',
"var foo = 'fail\\x1'",
# backticks are converted to single quotes
"Invalid hexadecimal escape sequence `\\x1` at 1:16",
), (
'invalid_unicode_sequence',
"var foo = 'fail\\u12'",
"Invalid unicode escape sequence `\\u12` at 1:16",
), (
'invalid_hex_sequence_multiline',
"var foo = 'foobar\\\r\nfail\\x1'",
# backticks are converted to single quotes
"Invalid hexadecimal escape sequence `\\x1` at 2:5",
), (
'invalid_unicode_sequence_multiline',
"var foo = 'foobar\\\nfail\\u12'",
"Invalid unicode escape sequence `\\u12` at 2:5",
), (
'long_invalid_string_truncated',
"var foo = '1234567890abcdetruncated",
'Unterminated string literal "\'1234567890abcde..." at 1:11',
)
]
es5_comment_cases = [
(
'line_comment_whole',
('//comment\na = 5;\n',
['LINE_COMMENT //comment', 'ID a', 'EQ =', 'NUMBER 5', 'SEMI ;']),
), (
'line_comment_trail',
('a//comment', ['ID a', 'LINE_COMMENT //comment']),
), (
'block_comment_single',
('/***/b/=3//line',
['BLOCK_COMMENT /***/', 'ID b', 'DIVEQUAL /=',
'NUMBER 3', 'LINE_COMMENT //line']),
), (
'block_comment_multiline',
('/*\n * Copyright LGPL 2011 \n*/\na = 1;',
['BLOCK_COMMENT /*\n * Copyright LGPL 2011 \n*/',
'ID a', 'EQ =', 'NUMBER 1', 'SEMI ;']),
), (
# this will replace the standard test cases
'section_7_comments',
("a = b\n/** **/\n\n/hi/s",
['ID a', 'EQ =', 'ID b', 'BLOCK_COMMENT /** **/', 'DIV /', 'ID hi',
'DIV /', 'ID s'])
)
]
# replace the section_7_comments test case
es5_all_cases = es5_cases[:-1] + es5_comment_cases
# double quote version
es5_error_cases_str_dq = [
(n, arg.translate(swapquotes), msg.translate(swapquotes))
for n, arg, msg in es5_error_cases_str
]
# single quote version
es5_error_cases_str_sq = [
(n, arg, msg.translate({96: 39}))
for n, arg, msg in es5_error_cases_str
]
es5_pos_cases = [
(
'single_line',
"""
var foo = bar; // line 1
""", ([
'var 1:0', 'foo 1:4', '= 1:8', 'bar 1:10', '; 1:13'
], [
'var 1:1', 'foo 1:5', '= 1:9', 'bar 1:11', '; 1:14',
])
), (
'multi_line',
"""
var foo = bar; // line 1
var bar = baz; // line 4
""", ([
'var 1:0', 'foo 1:4', '= 1:8', 'bar 1:10', '; 1:13',
'var 4:28', 'bar 4:32', '= 4:36', 'baz 4:38', '; 4:41',
], [
'var 1:1', 'foo 1:5', '= 1:9', 'bar 1:11', '; 1:14',
'var 4:1', 'bar 4:5', '= 4:9', 'baz 4:11', '; 4:14',
])
), (
'inline_comment',
"""
// this is a comment // line 1
var foo = bar; // line 2
// another one // line 4
var bar = baz; // line 5
""", ([
'var 2:32', 'foo 2:36', '= 2:40', 'bar 2:42', '; 2:45',
'var 5:85', 'bar 5:89', '= 5:93', 'baz 5:95', '; 5:98',
], [
'var 2:1', 'foo 2:5', '= 2:9', 'bar 2:11', '; 2:14',
'var 5:1', 'bar 5:5', '= 5:9', 'baz 5:11', '; 5:14',
])
), (
'block_comment',
"""
/*
This is a block comment
*/
var foo = bar; // line 4
/* block single line */ // line 6
var bar = baz; // line 7
/* oops */bar(); // line 9
foo();
""", ([
'var 4:30', 'foo 4:34', '= 4:38', 'bar 4:40', '; 4:43',
'var 7:91', 'bar 7:95', '= 7:99', 'baz 7:101', '; 7:104',
'bar 9:128', '( 9:131', ') 9:132', '; 9:133',
'foo 11:149', '( 11:152', ') 11:153', '; 11:154',
], [
'var 4:1', 'foo 4:5', '= 4:9', 'bar 4:11', '; 4:14',
'var 7:1', 'bar 7:5', '= 7:9', 'baz 7:11', '; 7:14',
'bar 9:11', '( 9:14', ') 9:15', '; 9:16',
'foo 11:3', '( 11:6', ') 11:7', '; 11:8',
])
), (
'syntax_error_heading_comma',
"""
var a;
, b;
""", ([
'var 1:0', 'a 1:4', '; 1:5',
', 2:7', 'b 2:9', '; 2:10'
], [
'var 1:1', 'a 1:5', '; 1:6',
', 2:1', 'b 2:3', '; 2:4'
])
)
]
def run_lexer(value, lexer_cls):
lexer = lexer_cls()
lexer.input(value)
return ['%s %s' % (token.type, token.value) for token in lexer]
def run_lexer_pos(value, lexer_cls):
lexer = lexer_cls()
lexer.input(textwrap.dedent(value).strip())
tokens = list(lexer)
return ([
'%s %d:%d' % (token.value, token.lineno, token.lexpos)
for token in tokens
], [
'%s %d:%d' % (token.value, token.lineno, token.colno)
for token in tokens
])
|
calmjs/calmjs.parse
|
src/calmjs/parse/tests/lexer.py
|
Python
|
mit
| 21,010
|
#!/usr/bin/env python
"""Provide the standard Python string.Template engine."""
from __future__ import absolute_import
from __future__ import print_function
from string import Template
from . import Engine
class StringTemplate(Engine):
"""String.Template engine."""
handle = 'string.Template'
def __init__(self, template, tolerant=False, **kwargs):
"""Initialize string.Template."""
super(StringTemplate, self).__init__(**kwargs)
self.template = Template(template)
self.tolerant = tolerant
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
mapping = {name: self.str(value, tolerant=self.tolerant)
for name, value in mapping.items()
if value is not None or self.tolerant}
if self.tolerant:
return self.template.safe_substitute(mapping)
return self.template.substitute(mapping)
|
blubberdiblub/eztemplate
|
eztemplate/engines/string_template_engine.py
|
Python
|
mit
| 951
|
from django.conf.urls import include
from django.contrib import admin
from django.urls import path
app_name = "transmission"
urlpatterns = [
path("admin/", admin.site.urls),
path("torrents/", include("torrents.urls", namespace="torrents")),
path("shows/", include("shows.urls", namespace="shows")),
]
|
onepesu/django_transmission
|
core/urls.py
|
Python
|
mit
| 315
|
"""Added a table for timed commands
Revision ID: 4db5dc4bc98
Revises: 514f4b9bc74
Create Date: 2015-12-23 00:00:59.156496
"""
# revision identifiers, used by Alembic.
revision = '4db5dc4bc98'
down_revision = '514f4b9bc74'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_timer',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=256), nullable=False),
sa.Column('action', mysql.TEXT(), nullable=False),
sa.Column('interval_online', sa.Integer(), nullable=False),
sa.Column('interval_offline', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_timer')
### end Alembic commands ###
|
gigglearrows/anniesbot
|
alembic/versions/4db5dc4bc98_added_a_table_for_timed_commands.py
|
Python
|
mit
| 1,034
|
from colorama import Fore
from rtxlib import info, error, debug
from rtxlib.preprocessors.SparkPreProcessor import SparkPreProcessor
def init_pre_processors(wf):
""" we look into the workflows definition and run the required preprocessors """
if hasattr(wf, "pre_processors"):
pp = wf.pre_processors
for p in pp:
if p["type"] == "spark":
p["instance"] = SparkPreProcessor(wf, p)
else:
info("> Preprocessor | None", Fore.CYAN)
def kill_pre_processors(wf):
""" after the experiment, we stop all preprocessors """
try:
for p in wf.pre_processors:
p["instance"].shutdown()
info("> Shutting down Spark preprocessor")
except AttributeError:
pass
|
Starofall/RTX
|
rtxlib/preprocessors/__init__.py
|
Python
|
mit
| 779
|
import ctypes
import os
STD_INPUT_HANDLE = -10
STD_OUTPUT_HANDLE= -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLACK = 0x0
FOREGROUND_BLUE = 0x01 # text color contains blue.
FOREGROUND_GREEN= 0x02 # text color contains green.
FOREGROUND_RED = 0x04 # text color contains red.
FOREGROUND_INTENSITY = 0x08 # text color is intensified.
BACKGROUND_BLUE = 0x10 # background color contains blue.
BACKGROUND_GREEN= 0x20 # background color contains green.
BACKGROUND_RED = 0x40 # background color contains red.
BACKGROUND_INTENSITY = 0x80 # background color is intensified.
class CmdFormat(object):
"""docstring for CmdFormat"""
std_out_handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
def __init__(self, WinTitle="Console Window",\
color=FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY,\
):
super( CmdFormat, self).__init__()
self.WinTitle = WinTitle
os.system("title " + WinTitle)
def set_cmd_color(self, color, handle=std_out_handle):
bool = ctypes.windll.kernel32.SetConsoleTextAttribute(handle, color)
return bool
def reset_color(self):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)
def print_white_text(self,print_text,end='\n'):
self.reset_color()
print(print_text,end=end)
def print_red_text(self, print_text,end='\n'):
self.set_cmd_color(4 | 8)
print(print_text,end=end)
self.reset_color()
def print_green_input_text(self, print_text):
self.set_cmd_color(FOREGROUND_GREEN | FOREGROUND_INTENSITY)
c = input(print_text)
self.reset_color()
return c
def print_green_text(self, print_text,end='\n'):
self.set_cmd_color(FOREGROUND_GREEN | FOREGROUND_INTENSITY)
print(print_text,end=end)
self.reset_color()
def print_yellow_text(self, print_text,end='\n'):
self.set_cmd_color(6 | 8)
print(print_text,end=end)
self.reset_color()
def print_blue_text(self, print_text,end='\n'):
self.set_cmd_color(1 | 10)
print(print_text,end=end)
self.reset_color()
if __name__ == '__main__':
clr = CmdFormat("Window Title")
clr.set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)
clr.print_red_text('red')
clr.print_green_text("green")
clr.print_green_input_text("input: ")
clr.print_blue_text('blue')
clr.print_yellow_text('yellow')
input()
|
vicyangworld/AutoOfficer
|
CmdFormat.py
|
Python
|
mit
| 2,619
|
"""
Handling signals of the `core` app
"""
from django.dispatch import receiver
from core import signals
from reader import actions
@receiver(signals.app_link_ready)
def app_link_ready(sender, **kwargs):
actions.create_app_link()
|
signaldetect/messity
|
reader/receivers/core.py
|
Python
|
mit
| 238
|
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.registry.om/status_reserved
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisRegistryOmStatusReserved(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.registry.om/status_reserved.txt"
host = "whois.registry.om"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, 'reserved')
def test_available(self):
eq_(self.record.available, False)
def test_registered(self):
eq_(self.record.registered, False)
def test_reserved(self):
eq_(self.record.reserved, True)
|
huyphan/pyyawhois
|
test/record/parser/test_response_whois_registry_om_status_reserved.py
|
Python
|
mit
| 984
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This modle will help you put your local file to
QiNiu cloud storage, I use it to share files and
pictures in my blog.
"""
import argparse
# Compatible for Py2 and Py3
try:
import ConfigParser
except ImportError:
import configparser
import os
import qiniu.config
import sys
from qiniu import Auth
from qiniu import BucketManager
conf_file = "/etc/qnupload/qnupload.conf"
def getAuth(accessKey, secretKey):
"""Get the auth object by access key and secret key."""
auth = Auth(accessKey, secretKey)
return auth
def uploadFile(bucketName, filePath, auth, domain, full_name):
"""Upload file to your bucket on qiniu server."""
# Compatible for Py2 and Py3
if full_name:
file_basename = filePath
else:
file_basename = os.path.basename(filePath)
try:
fileName = file_basename.decode("utf-8")
except AttributeError:
fileName = file_basename
up_token = auth.upload_token(bucketName)
ret, resp = qiniu.put_file(up_token, fileName, filePath)
if ret:
print("Upload file: %s" % (filePath))
print("Link: %s" % (domain + fileName))
else:
print("Failed to upload file.")
print(resp)
def getBucket(uploadAuth):
"""Get the bucket object."""
return BucketManager(uploadAuth)
def checkFile(bucket, filePath, bucketName):
"""Check the file path is right and if it is exist in the bucket."""
if not os.path.exists(filePath):
print("Wrong file path: %s" % (filePath))
return False
ret, info = bucket.stat(bucketName, filePath)
if ret:
print("File exists in Qiniu cloud: %s" % (filePath))
return ret is None
def check_conf(conf_file):
"""Check the configure file is existed."""
if not os.path.exists(conf_file):
print("ERROR: Cannot find configure file.")
print("Please create configure file: %s" % (conf_file))
print("[DEFAULT]")
print("default_bucket_name =")
print("access_key =")
print("secret_key =")
print("domain =")
sys.exit(1)
def main():
# Check the configure file
check_conf(conf_file)
# Read configure file
# Compatible for Py2 and Py3
try:
cf = ConfigParser.ConfigParser()
except NameError:
cf = configparser.ConfigParser()
cf.read(conf_file)
parser = argparse.ArgumentParser(
prog="quupload",
description="This is a tool to upload file to Qiniu cloud.")
parser.add_argument("file",
metavar="filepath",
nargs='+',
help="Specify a file to upload to Qiniu cloud.")
parser.add_argument("-b", "--bucket",
help="A bucket under your Qiniu account.")
parser.add_argument("-a", "--access-key",
help="Your access key.")
parser.add_argument("-s", "--secret-key",
help="Your secret key.")
parser.add_argument("-d", "--domain",
help="The domain of your Qiniu account to share \
the file you upload to Qiniu cloud.")
parser.add_argument("--full-name",
action='store_true',
help="The file will be named with the path as \
its prefix when specify this option. ")
args = parser.parse_args()
if args.bucket is None:
bucketName = cf.get("DEFAULT", "default_bucket_name")
else:
bucketName = args.bucket
if args.access_key is None:
access_key = cf.get("DEFAULT", "access_key")
else:
access_key = args.access_key
if args.secret_key is None:
secret_key = cf.get("DEFAULT", "secret_key")
else:
secret_key = args.secret_key
if args.domain is None:
domain = cf.get(bucketName, "domain")
else:
domain = args.domain
full_name = args.full_name
# Parse domain
domain = domain + "/"
if not domain.startswith("http"):
domain = "http://" + domain
# Get the full file list from the command line
fileList = []
for item in args.file:
if os.path.isdir(item):
fileList.extend([item+'/'+f for f in os.listdir(item)])
elif os.path.isfile(item):
fileList.append(item)
uploadAuth = getAuth(access_key, secret_key)
bucket = getBucket(uploadAuth)
for filePath in fileList:
if checkFile(bucket, filePath, bucketName):
uploadFile(bucketName, filePath, uploadAuth, domain, full_name)
if __name__ == '__main__':
main()
|
cheneydc/qnupload
|
src/qnupload/qnupload.py
|
Python
|
mit
| 4,668
|
# This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import redirect, request
from indico.modules.categories.compat import compat_category
from indico.modules.categories.controllers.admin import RHManageUpcomingEvents
from indico.modules.categories.controllers.display import (RHCategoryCalendarView, RHCategoryIcon, RHCategoryInfo,
RHCategoryLogo, RHCategoryOverview, RHCategorySearch,
RHCategoryStatistics, RHCategoryStatisticsJSON,
RHCategoryUpcomingEvent, RHDisplayCategory, RHEventList,
RHExportCategoryAtom, RHExportCategoryICAL,
RHReachableCategoriesInfo, RHShowFutureEventsInCategory,
RHShowPastEventsInCategory, RHSubcatInfo)
from indico.modules.categories.controllers.management import (RHAddCategoryRole, RHAddCategoryRoleMembers,
RHAPIEventMoveRequests, RHCategoryRoleMembersExportCSV,
RHCategoryRoleMembersImportCSV, RHCategoryRoles,
RHCreateCategory, RHDeleteCategory, RHDeleteCategoryRole,
RHDeleteEvents, RHDeleteSubcategories, RHEditCategoryRole,
RHManageCategoryContent, RHManageCategoryIcon,
RHManageCategoryLogo, RHManageCategoryModeration,
RHManageCategoryProtection, RHManageCategorySettings,
RHMoveCategory, RHMoveEvents, RHMoveSubcategories,
RHRemoveCategoryRoleMember, RHSortSubcategories,
RHSplitCategory)
from indico.modules.users import User
from indico.web.flask.util import make_compat_redirect_func, redirect_view, url_for
from indico.web.flask.wrappers import IndicoBlueprint
def _redirect_event_creation(category_id, event_type):
anchor = f'create-event:{event_type}:{category_id}'
return redirect(url_for('.display', category_id=category_id, _anchor=anchor))
_bp = IndicoBlueprint('categories', __name__, template_folder='templates', virtual_template_folder='categories',
url_prefix='/category/<int:category_id>')
# Category management
_bp.add_url_rule('/manage/', 'manage_content', RHManageCategoryContent)
_bp.add_url_rule('/manage/delete', 'delete', RHDeleteCategory, methods=('POST',))
_bp.add_url_rule('/manage/icon', 'manage_icon', RHManageCategoryIcon, methods=('POST', 'DELETE'))
_bp.add_url_rule('/manage/logo', 'manage_logo', RHManageCategoryLogo, methods=('POST', 'DELETE'))
_bp.add_url_rule('/manage/move', 'move', RHMoveCategory, methods=('POST',))
_bp.add_url_rule('/manage/protection', 'manage_protection', RHManageCategoryProtection, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/settings', 'manage_settings', RHManageCategorySettings, methods=('POST', 'GET'))
_bp.add_url_rule('/manage/moderation', 'manage_moderation', RHManageCategoryModeration)
# Role management
_bp.add_url_rule('/manage/roles', 'manage_roles', RHCategoryRoles, methods=('POST', 'GET'))
_bp.add_url_rule('/manage/roles/create', 'add_role', RHAddCategoryRole, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/roles/<int:role_id>/edit', 'edit_role', RHEditCategoryRole, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/roles/<int:role_id>', 'delete_role', RHDeleteCategoryRole, methods=('DELETE',))
_bp.add_url_rule('/manage/roles/<int:role_id>/members', 'add_role_members', RHAddCategoryRoleMembers, methods=('POST',))
_bp.add_url_rule('/manage/roles/<int:role_id>/members/<int:user_id>', 'remove_role_member', RHRemoveCategoryRoleMember,
methods=('DELETE',))
_bp.add_url_rule('/manage/roles/<int:role_id>/members/import', 'add_members_import_csv',
RHCategoryRoleMembersImportCSV, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/roles/<int:role_id>/members.csv', 'members_export_csv', RHCategoryRoleMembersExportCSV)
# Event management
_bp.add_url_rule('/manage/events/delete', 'delete_events', RHDeleteEvents, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/events/move', 'move_events', RHMoveEvents, methods=('POST',))
_bp.add_url_rule('/manage/events/split', 'split_category', RHSplitCategory, methods=('GET', 'POST'))
# Subcategory management
_bp.add_url_rule('/manage/subcategories/create', 'create_subcategory', RHCreateCategory, methods=('GET', 'POST'))
_bp.add_url_rule('/manage/subcategories/delete', 'delete_subcategories', RHDeleteSubcategories, methods=('POST',))
_bp.add_url_rule('/manage/subcategories/move', 'move_subcategories', RHMoveSubcategories, methods=('POST',))
_bp.add_url_rule('/manage/subcategories/sort', 'sort_subcategories', RHSortSubcategories, methods=('POST',))
# Display
_bp.add_url_rule('!/', 'display', RHDisplayCategory, defaults={'category_id': 0})
_bp.add_url_rule('/', 'display', RHDisplayCategory)
_bp.add_url_rule('/event-list', 'event_list', RHEventList)
_bp.add_url_rule('/events.atom', 'export_atom', RHExportCategoryAtom)
_bp.add_url_rule('/events.ics', 'export_ical', RHExportCategoryICAL)
_bp.add_url_rule('/events.rss', 'export_rss', make_compat_redirect_func(_bp, 'export_atom'))
_bp.add_url_rule('/icon-<slug>.png', 'display_icon', RHCategoryIcon)
_bp.add_url_rule('/info', 'info', RHCategoryInfo)
_bp.add_url_rule('/info-from', 'info_from', RHReachableCategoriesInfo, methods=('GET', 'POST'))
_bp.add_url_rule('/logo-<slug>.png', 'display_logo', RHCategoryLogo)
_bp.add_url_rule('/overview', 'overview', RHCategoryOverview)
_bp.add_url_rule('/show-future-events', 'show_future_events', RHShowFutureEventsInCategory, methods=('DELETE', 'PUT'))
_bp.add_url_rule('/show-past-events', 'show_past_events', RHShowPastEventsInCategory, methods=('DELETE', 'PUT'))
_bp.add_url_rule('/statistics', 'statistics', RHCategoryStatistics)
_bp.add_url_rule('/statistics.json', 'statistics_json', RHCategoryStatisticsJSON)
_bp.add_url_rule('/subcat-info', 'subcat_info', RHSubcatInfo)
_bp.add_url_rule('/calendar', 'calendar', RHCategoryCalendarView)
_bp.add_url_rule('/upcoming', 'upcoming_event', RHCategoryUpcomingEvent)
# Event creation - redirect to anchor page opening the dialog
_bp.add_url_rule('/create/event/<any(lecture,meeting,conference):event_type>', view_func=_redirect_event_creation)
# Short URLs
_bp.add_url_rule('!/categ/<int:category_id>', view_func=redirect_view('.display'), strict_slashes=False)
_bp.add_url_rule('!/c/<int:category_id>', view_func=redirect_view('.display'), strict_slashes=False)
# Internal API
_bp.add_url_rule('!/category/search', 'search', RHCategorySearch)
_bp.add_url_rule('/api/event-move-requests', 'api_event_move_requests', RHAPIEventMoveRequests, methods=('GET', 'POST'))
# Administration
_bp.add_url_rule('!/admin/upcoming-events', 'manage_upcoming', RHManageUpcomingEvents, methods=('GET', 'POST'))
@_bp.before_request
def _redirect_to_bootstrap():
# No users in Indico yet? Redirect from index page to bootstrap form
if (request.endpoint == 'categories.display' and not request.view_args['category_id'] and
not User.query.filter_by(is_system=False).has_rows()):
return redirect(url_for('bootstrap.index'))
_compat_bp = IndicoBlueprint('compat_categories', __name__)
_compat_bp.add_url_rule('/category/<legacy_category_id>/<path:path>', 'legacy_id', compat_category)
_compat_bp.add_url_rule('/category/<legacy_category_id>/', 'legacy_id', compat_category)
_compat_bp.add_url_rule('!/categoryDisplay.py', 'display_modpython',
make_compat_redirect_func(_compat_bp, 'legacy_id',
view_args_conv={'categId': 'legacy_category_id'}))
|
indico/indico
|
indico/modules/categories/blueprint.py
|
Python
|
mit
| 8,380
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-usuario',
version='0.4',
packages=['usuario'],
include_package_data=True,
license='MIT License',
description='Extension to model User.',
long_description=README,
keywords = "django user",
url='https://github.com/dyachan/django-usuario',
author='Diego Yachan',
author_email='diego.yachan@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
dyachan/django-usuario
|
setup.py
|
Python
|
mit
| 1,204
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN
import json
import logging
import os
import random
import re
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was returned or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find:
assert_equal(expected, {})
num_matched = 0
for item in object_array:
all_match = True
for key, value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find:
num_matched = num_matched + 1
for key, value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched + 1
if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
# Utility functions
###################
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# RPC/P2P connection constants and functions
############################################
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, rpchost=None):
rpc_u, rpc_p = get_auth_cookie(datadir, i)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
# Node functions
################
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node" + str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("listenonion=0\n")
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
def get_auth_cookie(datadir, n):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
userpass = f.read()
split_userpass = userpass.split(':')
user = split_userpass[0]
password = split_userpass[1]
if user is None or password is None:
raise ValueError("No RPC credentials")
return user, password
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
from_connection.disconnectnode(nodeid=peer_id)
for _ in range(50):
if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
break
time.sleep(0.1)
else:
raise AssertionError("timed out waiting for disconnect")
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]] * len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match + 1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
# Transaction/Block functions
#############################
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >= 0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5 * count) + 101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{"txid": coinbase, "vout": 0}]
outputs = {to_address: amount}
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
|
btcgroup2/bitcoin
|
test/functional/test_framework/util.py
|
Python
|
mit
| 19,182
|
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def isBalanced(self, root: TreeNode) -> bool:
def dfs(root):
if root is None:
return (True, 0)
lb, lh = dfs(root.left)
rb, rh = dfs(root.right)
h = max(lh, rh) + 1
return (lb and rb and abs(lh - rh) <= 1, h)
return dfs(root)[0]
|
jiadaizhao/LeetCode
|
0101-0200/0110-Balanced Binary Tree/0110-Balanced Binary Tree.py
|
Python
|
mit
| 493
|