hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ace493329cecaea3e436ff039a918e81340bbf24 | 4,811 | py | Python | Spider_Address.py | zhguliangsheng/dzdp_scrapy | 061e1dc8e8d60a12be2c6278bd6c2ba0ab0d92e8 | [
"Apache-2.0"
] | 3 | 2018-08-11T09:36:40.000Z | 2019-02-28T12:59:27.000Z | Spider_Address.py | zhguliangsheng/dzdp_scrapy | 061e1dc8e8d60a12be2c6278bd6c2ba0ab0d92e8 | [
"Apache-2.0"
] | null | null | null | Spider_Address.py | zhguliangsheng/dzdp_scrapy | 061e1dc8e8d60a12be2c6278bd6c2ba0ab0d92e8 | [
"Apache-2.0"
] | null | null | null | # coding: UTF-8
import xlwt
'''
爬取网页时直接出现403,意思是没有访问权限
'''
from bs4 import BeautifulSoup
import urllib
# 入口网页
start_url = 'https://www.dianping.com/changsha/ch10' #长沙美食
def get_content(url):
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Cookie':'cy=1; cye=shanghai; _lx_utm=utm_source%3DBaidu%26utm_medium%3Dorganic; _lxsdk_cuid=164c9e2cad2c8-0881b9c09552e6-5b193413-100200-164c9e2cad4c8; _lxsdk=164c9e2cad2c8-0881b9c09552e6-5b193413-100200-164c9e2cad4c8; _hc.v=b4246e94-470f-1aa8-cf98-df323c97ad13.1532395442; s_ViewType=10; _lxsdk_s=164c9e2cad4-387-e39-c15%7C%7C112'
}
req = urllib.request.Request(url=url, headers=headers)
html = urllib.request.urlopen(req).read().decode("utf-8")
return html
'''
获取所有行政区的url
'''
def region_url(html):
soup = BeautifulSoup(html, 'lxml') # lxml解析器
# <div id="region-nav" class="nc-items ">
# <a href="/search/category/344/10/r299"><span>芙蓉区</span></a>
# 列表推导式
region_url_list = [i['href'] for i in soup.find('div', id="region-nav").find_all('a')]
return region_url_list
# 获取商户的详情页的url地址
# find:取第一个(返回一个具体的元素,没有为null) find_all:匹配所有(返回列表,没有返回[])
def get_shop_url(html):
soup = BeautifulSoup(html, 'lxml') # lxml解析器
shop_url_list = [i.find('a')['href'] for i in soup.find_all('div', class_='tit')]
return shop_url_list
# 获取所得信息(店名,价格,评分)。。。解析页面
def get_detail(html):
soup = BeautifulSoup(html, 'lxml') # lxml解析器
# <h1 class="shop-name">1911牛肉烤串</h1>
title = soup.find('div', class_='breadcrumb').find('span').text
# <span id="avgPriceTitle" class="item">人均:-</span>
price = soup.find('span', id="avgPriceTitle").text
# <span id="comment_score"><span class="item">口味:7.6</span><span class="item">环境:7.4</span><span class="item">服务:7.5</span></span>
evaluation = soup.find('span', id="comment_score").find_all('span', class_="item") # 评分的list
# <span id="reviewCount" class="item">3条评论</span>
comments = soup.find('span', id="reviewCount").text # 评论的数量
# <div class="expand-info address" itemprop="street-address">
# <span class="item" itemprop="street-address" title="麓松路南丰港安置小区12栋">
# 麓松路南丰港安置小区12栋
# </span>
# </div>
address = soup.find('span', class_="item", itemprop="street-address").text.strip()
# print u'店名'+title
# for ev in evaluation:
# print ev.text
# print u'价格'+price
# print u'评论数量'+comments
# print u'地址'+address
return (title, evaluation[0].text, evaluation[1].text, evaluation[2].text, price, comments, address)
# 文件作为脚本直接执行,而import到其他脚本中是不会被执行的。
if __name__ == '__main__':
items = []
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36',
'Cookie': '__guid=169583271.1366018690068634000.1532332838354.5256; _lxsdk_cuid=164c62792dec8-0af620d87c03ba-6b1b1279-100200-164c62792dfc8; _lxsdk=164c62792dec8-0af620d87c03ba-6b1b1279-100200-164c62792dfc8; _hc.v=48e5f4dc-11fb-1b35-9255-74c8078901f5.1532332840; s_ViewType=10; monitor_count=14; Hm_lvt_df17baab2895cc586cda96cfc3bb3f95=1532332840; Hm_lpvt_df17baab2895cc586cda96cfc3bb3f95=1532335073; _lxsdk_s=164c62792e1-65e-6b2-b9%7C%7C169'
}
html = get_content(start_url)
region_url_list = region_url(html)
# 遍历所有行政区的所有商户
for url in region_url_list: # 遍历所有的行政区
# 简单的出错处理,有错则略过
try:
for n in range(1, 51): # 遍历所有的50页
html=get_content(url + 'p' + str(n))
# 所有商户的详情页
shop_url_list = get_shop_url(html)
for shop_url in shop_url_list:
# print shop_url
# 提取数据,获取
detail_html = get_content(shop_url)
'''
#403 Forbidden(没有访问权限):
(1)直接出现:
(2)爬取一会儿出现403:可以通过代理ip解决
referer 防盗链
Host域名
Cookie
'''
items.append(get_detail(detail_html))
except:
continue
new_table = 'dzdp.xls'
wb = xlwt.Workbook(encoding='utf-8')
ws = wb.add_sheet('test1')
headData = ['商户名字', '口味评分', '环境评分', '服务评分', '人均价格', '评论数量', '地址']
for colnum in range(0, 7):
ws.write(0, colnum, headData[colnum], xlwt.easyxf('font:bold on'))
index = 1
lens = len(items)
for j in range(0, lens):
for i in range(0, 7):
ws.write(index, i, items[j][i])
index = index + 1
wb.save(new_table)
| 40.428571 | 449 | 0.61401 |
ace4936664a7424138d498a258a8b4c0b6ddff4b | 12,304 | py | Python | pyramid_routehelper/__init__.py | Pylons/pyramid_routehelper | 040306a3ca3309cd2b7a79549d52aed2410447eb | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2016-02-11T14:41:01.000Z | 2016-02-11T14:41:01.000Z | pyramid_routehelper/__init__.py | Pylons/pyramid_routehelper | 040306a3ca3309cd2b7a79549d52aed2410447eb | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2017-02-18T08:00:08.000Z | 2017-02-18T08:00:08.000Z | pyramid_routehelper/__init__.py | Pylons/pyramid_routehelper | 040306a3ca3309cd2b7a79549d52aed2410447eb | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | from pyramid.config import ConfigurationError
import inspect
__all__ = ['includeme', 'add_resource', 'action']
def includeme(config):
config.add_directive('add_resource', add_resource)
def strip_slashes(name):
"""Remove slashes from the beginning and end of a part/URL."""
if name.startswith('/'):
name = name[1:]
if name.endswith('/'):
name = name[:-1]
return name
class action(object):
"""Decorate a method for registration by
:func:`~pyramid_routehelper.add_resource`.
Keyword arguments are identical to :class:`~pyramid.view.view_config`, with
the exception to how the ``name`` argument is used.
``alt_for``
Designate a method as another view for the specified action if
the decorated method is not the desired action name instead of registering
the method with an action of the same name.
``format``
Specify a format for the view that this decorator describes.
"""
def __init__(self, **kw):
self.kw = kw
def __call__(self, wrapped):
if hasattr(wrapped, '__exposed__'):
wrapped.__exposed__.append(self.kw)
else:
wrapped.__exposed__ = [self.kw]
return wrapped
# map.resource port
def add_resource(self, handler, member_name, collection_name, **kwargs):
""" Add some RESTful routes for a resource handler.
This function should never be called directly; instead the
``pyramid_routehelper.includeme`` function should be used to include this
function into an application; the function will thereafter be available
as a method of the resulting configurator.
The concept of a web resource maps somewhat directly to 'CRUD'
operations. The overlying things to keep in mind is that
adding a resource handler is about handling creating, viewing, and
editing that resource.
``handler`` is a dotted name of (or direct reference to) a
Python handler class,
e.g. ``'my.package.handlers.MyHandler'``.
``member_name`` should be the appropriate singular version of the resource
given your locale and used with members of the collection.
``collection_name`` will be used to refer to the resource collection methods
and should be a plural version of the member_name argument.
All keyword arguments are optional.
``collection``
Additional action mappings used to manipulate/view the
entire set of resources provided by the handler.
Example::
config.add_resource('myproject.handlers:MessageHandler', 'message', 'messages', collection={'rss':'GET'})
# GET /messages/rss (maps to the rss action)
# also adds named route "rss_message"
``member``
Additional action mappings used to access an individual
'member' of this handler's resources.
Example::
config.add_resource('myproject.handlers:MessageHandler', 'message', 'messages', member={'mark':'POST'})
# POST /messages/1/mark (maps to the mark action)
# also adds named route "mark_message"
``new``
Action mappings that involve dealing with a new member in
the controller resources.
Example::
config.add_resource('myproject.handlers:MessageHandler', 'message', 'messages', new={'preview':'POST'})
# POST /messages/new/preview (maps to the preview action)
# also adds a url named "preview_new_message"
``path_prefix``
Prepends the URL path for the Route with the path_prefix
given. This is most useful for cases where you want to mix
resources or relations between resources.
``name_prefix``
Perpends the route names that are generated with the
name_prefix given. Combined with the path_prefix option,
it's easy to generate route names and paths that represent
resources that are in relations.
Example::
config.add_resource('myproject.handlers:CategoryHandler', 'message', 'messages',
path_prefix='/category/:category_id',
name_prefix="category_")
# GET /category/7/messages/1
# has named route "category_message"
``parent_resource``
A ``dict`` containing information about the parent
resource, for creating a nested resource. It should contain
the ``member_name`` and ``collection_name`` of the parent
resource.
If ``parent_resource`` is supplied and ``path_prefix``
isn't, ``path_prefix`` will be generated from
``parent_resource`` as
"<parent collection name>/:<parent member name>_id".
If ``parent_resource`` is supplied and ``name_prefix``
isn't, ``name_prefix`` will be generated from
``parent_resource`` as "<parent member name>_".
Example::
>>> from pyramid.url import route_path
>>> config.add_resource('myproject.handlers:LocationHandler', 'location', 'locations',
... parent_resource=dict(member_name='region',
... collection_name='regions'))
>>> # path_prefix is "regions/:region_id"
>>> # name prefix is "region_"
>>> route_path('region_locations', region_id=13)
'/regions/13/locations'
>>> route_path('region_new_location', region_id=13)
'/regions/13/locations/new'
>>> route_path('region_location', region_id=13, id=60)
'/regions/13/locations/60'
>>> route_path('region_edit_location', region_id=13, id=60)
'/regions/13/locations/60/edit'
Overriding generated ``path_prefix``::
>>> config.add_resource('myproject.handlers:LocationHandler', 'location', 'locations',
... parent_resource=dict(member_name='region',
... collection_name='regions'),
... path_prefix='areas/:area_id')
>>> # name prefix is "region_"
>>> route_path('region_locations', area_id=51)
'/areas/51/locations'
Overriding generated ``name_prefix``::
>>> config.add_resource('myproject.handlers:LocationHandler', 'location', 'locations',
... parent_resource=dict(member_name='region',
... collection_name='regions'),
... name_prefix='')
>>> # path_prefix is "regions/:region_id"
>>> route_path('locations', region_id=51)
'/regions/51/locations'
"""
handler = self.maybe_dotted(handler)
action_kwargs = {}
for name,meth in inspect.getmembers(handler, inspect.ismethod):
if hasattr(meth, '__exposed__'):
for settings in meth.__exposed__:
config_settings = settings.copy()
action_name = config_settings.pop('alt_for', name)
# If format is not set, use the route that doesn't specify a format
if 'format' not in config_settings:
if 'default' in action_kwargs.get(action_name,{}):
raise ConfigurationError("Two methods have been decorated without specifying a format.")
else:
action_kwargs.setdefault(action_name, {})['default'] = config_settings
# Otherwise, append to the list of view config settings for formatted views
else:
config_settings['attr'] = name
action_kwargs.setdefault(action_name, {}).setdefault('formatted',[]).append(config_settings)
collection = kwargs.pop('collection', {})
member = kwargs.pop('member', {})
new = kwargs.pop('new', {})
path_prefix = kwargs.pop('path_prefix', None)
name_prefix = kwargs.pop('name_prefix', None)
parent_resource = kwargs.pop('parent_resource', None)
if parent_resource is not None:
if path_prefix is None:
path_prefix = '%s/:%s_id' % (parent_resource['collection_name'], parent_resource['member_name'])
if name_prefix is None:
name_prefix = '%s_' % parent_resource['member_name']
else:
if path_prefix is None: path_prefix = ''
if name_prefix is None: name_prefix = ''
member['edit'] = 'GET'
new['new'] = 'GET'
def swap(dct, newdct):
map(lambda (key,value): newdct.setdefault(value.upper(), []).append(key), dct.items())
return newdct
collection_methods = swap(collection, {})
member_methods = swap(member, {})
new_methods = swap(new, {})
collection_methods.setdefault('POST', []).insert(0, 'create')
member_methods.setdefault('PUT', []).insert(0, 'update')
member_methods.setdefault('DELETE', []).insert(0, 'delete')
# Continue porting code
controller = strip_slashes(collection_name)
path_prefix = strip_slashes(path_prefix)
path_prefix = '/' + path_prefix
if path_prefix and path_prefix != '/':
path = path_prefix + '/' + controller
else:
path = '/' + controller
collection_path = path
new_path = path + '/new'
member_path = path + '/:id'
added_route_names = {}
def add_route_if_new(self, route_name, path, **kwargs):
if route_name not in added_route_names:
self.add_route(route_name, path, **kwargs)
added_route_names[route_name] = path
def add_route_and_view(self, action, route_name, path, request_method='any'):
if request_method != 'any':
request_method = request_method.upper()
else:
request_method = None
add_route_if_new(self, route_name, path, **kwargs)
self.add_view(view=handler, attr=action, route_name=route_name, request_method=request_method, **action_kwargs.get(action, {}).get('default', {}))
for format_kwargs in action_kwargs.get(action, {}).get('formatted', []):
format = format_kwargs.pop('format')
formatted_route_name = "%s_formatted_%s" % (format, route_name)
add_route_if_new(self, formatted_route_name, "%s.%s" % (path, format), **kwargs)
self.add_view(view=handler, attr=format_kwargs.pop('attr'), request_method=request_method,
route_name = "%s_formatted_%s" % (format, route_name), **format_kwargs)
for method, lst in collection_methods.iteritems():
primary = (method != 'GET' and lst.pop(0)) or None
for action in lst:
add_route_and_view(self, action, "%s%s_%s" % (name_prefix, action, collection_name), "%s/%s" % (collection_path,action))
if primary:
add_route_and_view(self, primary, name_prefix + collection_name, collection_path, method)
# Add route and view for collection
add_route_and_view(self, 'index', name_prefix + collection_name, collection_path, 'GET')
for method, lst in new_methods.iteritems():
for action in lst:
path = (action == 'new' and new_path) or "%s/%s" % (new_path, action)
name = "new_" + member_name
if action != 'new':
name = action + "_" + name
formatted_path = (action == 'new' and new_path + '.:format') or "%s/%s.:format" % (new_path, action)
add_route_and_view(self, action, name_prefix + name, path, method)
for method, lst in member_methods.iteritems():
if method not in ['POST', 'GET', 'any']:
primary = lst.pop(0)
else:
primary = None
for action in lst:
add_route_and_view(self, action, '%s%s_%s' % (name_prefix, action, member_name), '%s/%s' % (member_path, action))
if primary:
add_route_and_view(self, primary, name_prefix + member_name, member_path, method)
add_route_and_view(self, 'show', name_prefix + member_name, member_path, method)
# Submapper support
# Sub_domain option
# Converters?? | 41.708475 | 154 | 0.610696 |
ace4951121bb20d02e964382f9f2c2540550198b | 90,734 | py | Python | mkt/webapps/tests/test_models.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | mkt/webapps/tests/test_models.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | mkt/webapps/tests/test_models.py | jasonthomas/zamboni | 948247609cb4b2ed72e6daa4da5257927bfe0c17 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import functools
import json
import os
import tempfile
import unittest
import uuid
import zipfile
from contextlib import nested
from datetime import datetime, timedelta
from decimal import Decimal
from django import forms
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core import mail
from django.core.urlresolvers import reverse
from django.db.models.signals import post_delete, post_save
from django.test.utils import override_settings
from django.utils import translation
import elasticsearch
import mock
from mock import patch
from nose.tools import eq_, ok_, raises
import mkt
from lib.utils import static_url
from mkt.constants import apps, MANIFEST_CONTENT_TYPE
from mkt.constants.applications import DEVICE_TYPES
from mkt.constants.iarc_mappings import (DESCS, INTERACTIVES,
REVERSE_DESCS, REVERSE_INTERACTIVES)
from mkt.constants.payments import PROVIDER_BANGO, PROVIDER_REFERENCE
from mkt.constants.regions import RESTOFWORLD
from mkt.developers.models import (AddonPaymentAccount, PaymentAccount,
SolitudeSeller)
from mkt.developers.providers import ALL_PROVIDERS
from mkt.files.models import File
from mkt.files.tests.test_models import UploadTest as BaseUploadTest
from mkt.files.utils import WebAppParser
from mkt.prices.models import AddonPremium, Price, PriceCurrency
from mkt.reviewers.models import EscalationQueue, RereviewQueue
from mkt.site.fixtures import fixture
from mkt.site.helpers import absolutify
from mkt.site.storage_utils import (public_storage, private_storage,
storage_is_remote)
from mkt.site.tests import (DynamicBoolFieldsTestMixin, ESTestCase,
TestCase, WebappTestCase, user_factory)
from mkt.site.utils import app_factory, version_factory
from mkt.submit.tests.test_views import BasePackagedAppTest, BaseWebAppTest
from mkt.translations.models import Translation
from mkt.users.models import UserProfile
from mkt.versions.models import update_status, Version
from mkt.webapps.indexers import WebappIndexer
from mkt.webapps.models import (AddonDeviceType, AddonExcludedRegion,
AddonUpsell, AppFeatures, AppManifest,
BlockedSlug, ContentRating, Geodata,
get_excluded_in, IARCCert, Installed,
Preview, RatingDescriptors, RatingInteractives,
version_changed, Webapp)
from mkt.webapps.signals import version_changed as version_changed_signal
class TestWebapp(WebappTestCase):
def add_payment_account(self, app, provider_id, user=None):
if not user:
user = user_factory()
payment = PaymentAccount.objects.create(
solitude_seller=SolitudeSeller.objects.create(user=user,
uuid=uuid.uuid4()),
provider=provider_id,
user=user,
seller_uri=uuid.uuid4(),
uri=uuid.uuid4())
return AddonPaymentAccount.objects.create(
addon=app, payment_account=payment, product_uri=uuid.uuid4())
def test_get_icon_url(self):
app = self.get_app()
if storage_is_remote():
path = '%s/%s-%s.png' % (app.get_icon_dir(), app.pk, 32)
expected = '%s?modified=never' % public_storage.url(path)
else:
expected = (static_url('ADDON_ICON_URL')
% (str(app.id)[0:3], app.id, 32, 'never'))
assert app.get_icon_url(32).endswith(expected), (
'Expected %s, got %s' % (expected, app.icon_url))
app.icon_hash = 'abcdef'
assert app.get_icon_url(32).endswith('?modified=abcdef')
app.icon_type = None
assert app.get_icon_url(32).endswith('hub/default-32.png')
def test_get_promo_img_url(self):
app = self.get_app()
eq_(app.get_promo_img_url('640'), '')
eq_(app.get_promo_img_url('1050'), '')
app.promo_img_hash = 'chicken'
ok_('webapp_promo_imgs/337/337141-640.png?modified=chicken' in
app.get_promo_img_url('640'))
ok_('webapp_promo_imgs/337/337141-1050.png?modified=chicken' in
app.get_promo_img_url('1050'))
def test_has_payment_account(self):
app = self.get_app()
assert not app.has_payment_account()
self.add_payment_account(app, PROVIDER_BANGO)
assert app.has_payment_account()
def test_has_multiple_payment_accounts(self):
app = self.get_app()
assert not app.has_multiple_payment_accounts(), 'no accounts'
account = self.add_payment_account(app, PROVIDER_BANGO)
assert not app.has_multiple_payment_accounts(), 'one account'
self.add_payment_account(app, PROVIDER_REFERENCE, user=account.user)
ok_(app.has_multiple_payment_accounts(), 'two accounts')
def test_no_payment_account(self):
app = self.get_app()
assert not app.has_payment_account()
with self.assertRaises(app.PayAccountDoesNotExist):
app.payment_account(PROVIDER_BANGO)
def test_get_payment_account(self):
app = self.get_app()
acct = self.add_payment_account(app, PROVIDER_BANGO)
fetched_acct = app.payment_account(PROVIDER_BANGO)
eq_(acct, fetched_acct)
def test_delete_reason(self):
"""Test deleting with a reason gives the reason in the mail."""
app = self.get_app()
reason = u'trêason'
eq_(len(mail.outbox), 0)
app.delete(msg='bye', reason=reason)
eq_(len(mail.outbox), 1)
assert reason in mail.outbox[0].body
def test_delete_popularity(self):
app = self.get_app()
pop = 47
app.popularity.create(region=0, value=pop)
eq_(len(mail.outbox), 0)
app.delete(msg='bye')
eq_(len(mail.outbox), 1)
assert ('POPULARITY: %s' % (pop,)) in mail.outbox[0].body
def test_soft_deleted(self):
app = self.get_app()
eq_(len(Webapp.objects.all()), 1)
eq_(len(Webapp.with_deleted.all()), 1)
app.delete('boom shakalakalaka')
eq_(len(Webapp.objects.all()), 0)
eq_(len(Webapp.with_deleted.all()), 1)
# When an app is deleted its slugs and domain should get relinquished.
post_mortem = Webapp.with_deleted.filter(id=app.id)
eq_(post_mortem.count(), 1)
eq_(getattr(post_mortem[0], 'app_domain'), None)
eq_(getattr(post_mortem[0], 'app_slug'), '337141')
def test_soft_deleted_valid(self):
app = self.get_app()
Webapp.objects.create(status=mkt.STATUS_DELETED)
eq_(list(Webapp.objects.valid()), [app])
eq_(list(Webapp.with_deleted.valid()), [app])
def test_delete_incomplete_with_deleted_version(self):
"""Test deleting incomplete add-ons with no public version attached."""
app = self.get_app()
app.current_version.delete()
eq_(Version.objects.count(), 0)
eq_(Version.with_deleted.count(), 1)
app.update(status=0, highest_status=0)
# We want to be in the worst possible situation, no direct foreign key
# to the deleted versions, do we call update_version() now that we have
# an incomplete app.
app.update_version()
eq_(app.latest_version, None)
eq_(app.current_version, None)
app.delete()
# The app should have been soft-deleted.
eq_(len(mail.outbox), 1)
eq_(Webapp.objects.count(), 0)
eq_(Webapp.with_deleted.count(), 1)
def test_undelete(self):
app = self.get_app()
app.update(status=mkt.STATUS_PUBLIC)
app.delete()
eq_(app.status, mkt.STATUS_DELETED)
app.undelete()
app.reload()
eq_(app.status, mkt.STATUS_PUBLIC)
def test_get_price(self):
app = self.get_app()
self.make_premium(app)
eq_(app.get_price(region=mkt.regions.USA.id), 1)
def test_get_price_tier(self):
app = self.get_app()
self.make_premium(app)
eq_(str(app.get_tier().price), '1.00')
ok_(app.get_tier_name())
def test_get_price_tier_no_charge(self):
app = self.get_app()
self.make_premium(app, 0)
eq_(str(app.get_tier().price), '0')
ok_(app.get_tier_name())
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_app_type_privileged(self):
app = self.get_app()
app.update(is_packaged=True)
eq_(app.app_type, 'privileged')
def test_excluded_in(self):
app = self.get_app()
region = mkt.regions.BRA
AddonExcludedRegion.objects.create(addon=app, region=region.id)
self.assertSetEqual(get_excluded_in(region.id), [app.id])
def test_supported_locale_property(self):
app = self.get_app()
eq_(app.supported_locales,
(u'English (US)', [u'English (US)', u'Espa\xf1ol',
u'Portugu\xeas (do\xa0Brasil)']))
def test_supported_locale_property_empty(self):
app = self.get_app()
app.current_version.update(supported_locales='')
eq_(app.supported_locales, (u'English (US)', []))
def test_supported_locale_property_bad(self):
app = self.get_app()
app.current_version.update(supported_locales='de,xx', _signal=False)
eq_(app.supported_locales, (u'English (US)', [u'Deutsch']))
def test_supported_locale_app_non_public(self):
"""
Test supported locales falls back to latest_version when not public.
"""
app = self.get_app()
app.update(status=mkt.STATUS_PENDING)
app.latest_version.files.update(status=mkt.STATUS_PENDING)
app.update_version()
eq_(app.supported_locales,
(u'English (US)',
[u'English (US)', u'Espa\xf1ol', u'Portugu\xeas (do\xa0Brasil)']))
def test_guess_is_offline_when_appcache_path(self):
app = self.get_app()
# If there's no appcache_path defined, ain't an offline-capable app.
am = AppManifest.objects.get(version=app.current_version)
eq_(app.guess_is_offline(), False)
# If there's an appcache_path defined, this is an offline-capable app.
manifest = json.loads(am.manifest)
manifest['appcache_path'] = '/manifest.appcache'
am.update(manifest=json.dumps(manifest))
# reload isn't enough, it doesn't clear cached_property.
app = self.get_app()
eq_(app.guess_is_offline(), True)
def test_guess_is_offline_no_manifest(self):
app = Webapp()
eq_(app.guess_is_offline(), False)
@mock.patch('mkt.webapps.models.cache.get')
def test_is_offline_when_packaged(self, mock_get):
mock_get.return_value = ''
eq_(Webapp(is_packaged=True).guess_is_offline(), True)
eq_(Webapp(is_packaged=False).guess_is_offline(), False)
def test_guess_is_offline_no_version(self):
app = Webapp()
with mock.patch.object(Webapp, 'latest_version', None):
eq_(app.guess_is_offline(), False)
def test_guess_is_offline_no_files(self):
app = Webapp()
version = mock.MagicMock(all_files=[])
with mock.patch.object(Webapp, 'latest_version', version):
eq_(app.guess_is_offline(), False)
@mock.patch('mkt.webapps.models.Webapp.has_payment_account')
def test_payments_complete(self, pay_mock):
# Default to complete if it's not needed.
pay_mock.return_value = False
app = self.get_app()
assert app.payments_complete()
self.make_premium(app)
assert not app.payments_complete()
pay_mock.return_value = True
assert app.payments_complete()
def test_get_region_ids_no_exclusions(self):
# This returns IDs for the *included* regions.
eq_(self.get_app().get_region_ids(), mkt.regions.REGION_IDS)
def test_get_regions_no_exclusions(self):
# This returns the class definitions for the *included* regions.
eq_(sorted(self.get_app().get_regions()),
sorted(mkt.regions.REGIONS_CHOICES_ID_DICT.values()))
def test_get_regions_sort(self):
eq_(self.get_app().get_regions(),
sorted(mkt.regions.REGIONS_CHOICES_ID_DICT.values(),
key=lambda x: x.slug))
eq_(self.get_app().get_regions(sort_by='name'),
sorted(mkt.regions.REGIONS_CHOICES_ID_DICT.values(),
key=lambda x: x.name))
eq_(self.get_app().get_regions(sort_by='id'),
sorted(mkt.regions.REGIONS_CHOICES_ID_DICT.values(),
key=lambda x: x.id))
def test_file_size(self):
app = self.get_app()
ok_(app.file_size)
f = app.current_version.all_files[0]
f.update(size=12345)
eq_(app.file_size, 12345)
app.update(_current_version=None)
f = app.latest_version.all_files[0]
f.update(size=54321)
eq_(app.file_size, 54321)
class TestCleanSlug(TestCase):
def test_clean_slug_new_object(self):
# Make sure there's at least an addon with the "webapp" slug,
# subsequent ones should be "webapp-1", "webapp-2", etc.
a = Webapp.objects.create()
eq_(a.app_slug, 'webapp')
# Start with a first clash. This should give us "webapp-1".
# We're not saving yet, we're testing the slug creation without an id.
b = Webapp()
b.clean_slug()
eq_(b.app_slug, 'webapp-1')
# Now save the instance to the database for future clashes.
b.save()
# Test on another object without an id.
c = Webapp()
c.clean_slug()
eq_(c.app_slug, 'webapp-2')
# Even if an addon is deleted, don't clash with its slug.
c.status = mkt.STATUS_DELETED
# Now save the instance to the database for future clashes.
c.save()
# And yet another object without an id. Make sure we're not trying to
# assign the 'webapp-2' slug from the deleted addon.
d = Webapp()
d.clean_slug()
eq_(d.app_slug, 'webapp-3')
def test_clean_slug_with_id(self):
# Create an addon and save it to have an id.
a = Webapp.objects.create()
# Start over: don't use the name nor the id to generate the slug.
a.app_slug = a.name = ""
a.clean_slug()
# Slugs created from an id are of the form "id~", eg "123~" to avoid
# clashing with URLs.
eq_(a.app_slug, "%s~" % a.id)
# And again, this time make it clash.
b = Webapp.objects.create()
# Set a's slug to be what should be created for b from its id.
a.app_slug = "%s~" % b.id
a.save()
# Now start over for b.
b.app_slug = b.name = ""
b.clean_slug()
eq_(b.app_slug, "%s~-1" % b.id)
def test_clean_slug_with_name(self):
# Make sure there's at least an addon with the "fooname" slug,
# subsequent ones should be "fooname-1", "fooname-2" ...
a = Webapp.objects.create(name="fooname")
eq_(a.app_slug, "fooname")
b = Webapp(name="fooname")
b.clean_slug()
eq_(b.app_slug, "fooname-1")
def test_clean_slug_with_slug(self):
# Make sure there's at least an addon with the "fooslug" slug,
# subsequent ones should be "fooslug-1", "fooslug-2" ...
a = Webapp.objects.create(name="fooslug")
eq_(a.app_slug, "fooslug")
b = Webapp(name="fooslug")
b.clean_slug()
eq_(b.app_slug, "fooslug-1")
def test_clean_slug_blocked_slug(self):
blocked_slug = 'fooblocked'
BlockedSlug.objects.create(name=blocked_slug)
a = Webapp(app_slug=blocked_slug)
a.clean_slug()
# Blocked slugs (like "activate" or IDs) have a "~" appended to
# avoid clashing with URLs.
eq_(a.app_slug, "%s~" % blocked_slug)
# Now save the instance to the database for future clashes.
a.save()
b = Webapp(app_slug=blocked_slug)
b.clean_slug()
eq_(b.app_slug, "%s~-1" % blocked_slug)
def test_clean_slug_blocked_slug_long_slug(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
BlockedSlug.objects.create(name=long_slug[:30])
# If there's no clashing slug, just append a "~".
a = Webapp.objects.create(app_slug=long_slug[:30])
eq_(a.app_slug, "%s~" % long_slug[:29])
# If there's a clash, use the standard clash resolution.
a = Webapp.objects.create(app_slug=long_slug[:30])
eq_(a.app_slug, "%s-1" % long_slug[:27])
def test_clean_slug_long_slug(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
# If there's no clashing slug, don't over-shorten it.
a = Webapp.objects.create(app_slug=long_slug)
eq_(a.app_slug, long_slug[:30])
# Now that there is a clash, test the clash resolution.
b = Webapp(app_slug=long_slug)
b.clean_slug()
eq_(b.app_slug, "%s-1" % long_slug[:27])
def test_clean_slug_always_slugify(self):
illegal_chars = "some spaces and !?@"
# Slugify if there's a slug provided.
a = Webapp(app_slug=illegal_chars)
a.clean_slug()
assert a.app_slug.startswith("some-spaces-and"), a.app_slug
# Also slugify if there's no slug provided.
b = Webapp(name=illegal_chars)
b.clean_slug()
assert b.app_slug.startswith("some-spaces-and"), b.app_slug
def test_clean_slug_worst_case_scenario(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
# Generate 100 addons with this very long slug. We should encounter the
# worst case scenario where all the available clashes have been
# avoided. Check the comment in addons.models.clean_slug, in the "else"
# part of the "for" loop checking for available slugs not yet assigned.
for i in range(100):
Webapp.objects.create(app_slug=long_slug)
with self.assertRaises(RuntimeError): # Fail on the 100th clash.
Webapp.objects.create(app_slug=long_slug)
class TestPreviewModel(mkt.site.tests.TestCase):
def setUp(self):
app = Webapp.objects.create()
self.preview = Preview.objects.create(addon=app, filetype='image/png',
caption='my preview')
def test_as_dict(self):
expect = ['caption', 'full', 'thumbnail']
reality = sorted(Preview.objects.all()[0].as_dict().keys())
eq_(expect, reality)
def test_filename(self):
eq_(self.preview.file_extension, 'png')
self.preview.update(filetype='')
eq_(self.preview.file_extension, 'png')
self.preview.update(filetype='video/webm')
eq_(self.preview.file_extension, 'webm')
def test_filename_in_url(self):
self.preview.update(filetype='video/webm')
assert 'png' in self.preview.thumbnail_path
assert 'webm' in self.preview.image_path
class TestRemoveLocale(mkt.site.tests.TestCase):
def test_remove(self):
app = Webapp.objects.create()
app.name = {'en-US': 'woo', 'el': 'yeah'}
app.description = {'en-US': 'woo', 'el': 'yeah', 'ja': 'ola'}
app.save()
app.remove_locale('el')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
eq_(sorted(qs.filter(id=app.name_id)), ['en-us'])
eq_(sorted(qs.filter(id=app.description_id)), ['en-us', 'ja'])
def test_remove_version_locale(self):
app = app_factory()
version = app.latest_version
version.releasenotes = {'fr': 'oui'}
version.save()
app.remove_locale('fr')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
eq_(sorted(qs), [u'en-us'])
class TestUpdateNames(mkt.site.tests.TestCase):
def setUp(self):
self.addon = Webapp.objects.create()
self.addon.name = self.names = {'en-US': 'woo'}
self.addon.save()
def get_name(self, app, locale='en-US'):
return Translation.objects.get(id=app.name_id, locale=locale)
def check_names(self, names):
"""`names` in {locale: name} format."""
for locale, localized_string in names.iteritems():
eq_(self.get_name(self.addon, locale).localized_string,
localized_string)
def test_new_name(self):
names = dict(self.names, **{'de': u'frü'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_new_names(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_remove_name_missing(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
# Now update without de to remove it.
del names['de']
self.addon.update_names(names)
self.addon.save()
names['de'] = None
self.check_names(names)
def test_remove_name_with_none(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
# Now update without de to remove it.
names['de'] = None
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_add_and_remove(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
# Now add a new locale and remove an existing one.
names['de'] = None
names['fr'] = u'oui'
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
def test_default_locale_change(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso'})
self.addon.default_locale = 'de'
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
addon = self.addon.reload()
eq_(addon.default_locale, 'de')
def test_default_locale_change_remove_old(self):
names = dict(self.names, **{'de': u'frü', 'es': u'eso', 'en-US': None})
self.addon.default_locale = 'de'
self.addon.update_names(names)
self.addon.save()
self.check_names(names)
eq_(self.addon.reload().default_locale, 'de')
def test_default_locale_removal_not_deleted(self):
names = {'en-US': None}
self.addon.update_names(names)
self.addon.save()
self.check_names(self.names)
class TestAddonWatchDisabled(mkt.site.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.app = Webapp.objects.get(pk=337141)
@patch('mkt.webapps.models.File.hide_disabled_file')
@patch('mkt.webapps.models.File.unhide_disabled_file')
def test_no_disabled_change(self, unhide, hide):
self.app.save()
assert not unhide.called
assert not hide.called
@patch('mkt.webapps.models.File.hide_disabled_file')
@patch('mkt.webapps.models.File.unhide_disabled_file')
def test_disable_addon(self, unhide, hide):
self.app.update(disabled_by_user=True)
assert not unhide.called
assert hide.called
@patch('mkt.webapps.models.File.hide_disabled_file')
@patch('mkt.webapps.models.File.unhide_disabled_file')
def test_admin_disable_addon(self, unhide, hide):
self.app.update(status=mkt.STATUS_DISABLED)
assert not unhide.called
assert hide.called
@patch('mkt.webapps.models.File.hide_disabled_file')
@patch('mkt.webapps.models.File.unhide_disabled_file')
def test_enable_addon(self, unhide, hide):
self.app.update(status=mkt.STATUS_DISABLED)
unhide.reset_mock()
hide.reset_mock()
self.app.update(status=mkt.STATUS_PUBLIC)
assert unhide.called
assert not hide.called
class TestAddonUpsell(mkt.site.tests.TestCase):
def setUp(self):
self.one = Webapp.objects.create(name='free')
self.two = Webapp.objects.create(name='premium')
self.upsell = AddonUpsell.objects.create(free=self.one,
premium=self.two)
def test_create_upsell(self):
eq_(self.one.upsell.free, self.one)
eq_(self.one.upsell.premium, self.two)
eq_(self.two.upsell, None)
def test_delete(self):
self.upsell = AddonUpsell.objects.create(free=self.two,
premium=self.one)
# Note: delete ignores if status 0.
self.one.update(status=mkt.STATUS_PUBLIC)
self.one.delete()
eq_(AddonUpsell.objects.count(), 0)
class TestAddonPurchase(mkt.site.tests.TestCase):
fixtures = fixture('user_999')
def setUp(self):
self.user = UserProfile.objects.get(pk=999)
self.addon = Webapp.objects.create(premium_type=mkt.ADDON_PREMIUM,
name='premium')
def test_no_premium(self):
# If you've purchased something, the fact that its now free
# doesn't change the fact that you purchased it.
self.addon.addonpurchase_set.create(user=self.user)
self.addon.update(premium_type=mkt.ADDON_FREE)
assert self.addon.has_purchased(self.user)
def test_has_purchased(self):
self.addon.addonpurchase_set.create(user=self.user)
assert self.addon.has_purchased(self.user)
def test_not_purchased(self):
assert not self.addon.has_purchased(self.user)
def test_anonymous(self):
assert not self.addon.has_purchased(None)
assert not self.addon.has_purchased(AnonymousUser)
def test_is_refunded(self):
self.addon.addonpurchase_set.create(user=self.user,
type=mkt.CONTRIB_REFUND)
assert self.addon.is_refunded(self.user)
def test_is_chargeback(self):
self.addon.addonpurchase_set.create(user=self.user,
type=mkt.CONTRIB_CHARGEBACK)
assert self.addon.is_chargeback(self.user)
def test_purchase_state(self):
purchase = self.addon.addonpurchase_set.create(user=self.user)
for state in [mkt.CONTRIB_PURCHASE, mkt.CONTRIB_REFUND,
mkt.CONTRIB_CHARGEBACK]:
purchase.update(type=state)
eq_(state, self.addon.get_purchase_type(self.user))
class TestWebappLight(mkt.site.tests.TestCase):
"""
Tests that don't require saving a Webapp to the database or want an empty
database with no existing apps.
"""
fixtures = fixture('prices')
def test_is_public(self):
app = Webapp(status=mkt.STATUS_UNLISTED)
assert app.is_public(), 'STATUS_UNLISTED app should be is_public()'
app.status = mkt.STATUS_PUBLIC
assert app.is_public(), 'STATUS_PUBLIC app should be is_public()'
# Any non-public status
app.status = mkt.STATUS_PENDING
assert not app.is_public(), (
'STATUS_PENDING app should not be is_public()')
# Public, disabled.
app.status = mkt.STATUS_PUBLIC
app.disabled_by_user = True
assert not app.is_public(), (
'STATUS_PUBLIC, disabled app should not be is_public()')
def test_app_slug_collision(self):
Webapp(app_slug='slug').save()
w2 = Webapp(app_slug='slug')
w2.save()
eq_(w2.app_slug, 'slug-1')
w3 = Webapp(app_slug='slug')
w3.save()
eq_(w3.app_slug, 'slug-2')
def test_app_slug_blocklist(self):
BlockedSlug.objects.create(name='slug')
w = Webapp(app_slug='slug')
w.save()
eq_(w.app_slug, 'slug~')
def test_geodata_upon_app_creation(self):
app = Webapp.objects.create()
assert app.geodata, (
'Geodata was not created with Webapp.')
def test_get_url_path(self):
webapp = Webapp(app_slug='woo')
eq_(webapp.get_url_path(), '/app/woo/')
def test_get_api_url(self):
webapp = Webapp(app_slug='woo', pk=1)
self.assertApiUrlEqual(webapp.get_api_url(), '/apps/app/woo/')
def test_get_api_url_pk(self):
webapp = Webapp(pk=1)
self.assertApiUrlEqual(webapp.get_api_url(pk=True), '/apps/app/1/')
def test_get_stats_url(self):
webapp = Webapp(app_slug='woo')
eq_(webapp.get_stats_url(), '/statistics/app/woo')
def test_get_comm_thread_url(self):
app = Webapp(app_slug='foo')
eq_(app.get_comm_thread_url(), '/comm/app/foo')
def test_get_origin(self):
url = 'http://www.xx.com:4000/randompath/manifest.webapp'
webapp = Webapp(manifest_url=url)
eq_(webapp.origin, 'http://www.xx.com:4000')
def test_get_packaged_origin(self):
webapp = Webapp(app_domain='app://foo.com', is_packaged=True,
manifest_url='')
eq_(webapp.origin, 'app://foo.com')
def test_punicode_domain(self):
webapp = Webapp(app_domain=u'http://www.allizôm.org')
eq_(webapp.punycode_app_domain, 'http://www.xn--allizm-mxa.org')
def test_cannot_be_purchased(self):
eq_(Webapp(premium_type=True).can_be_purchased(), False)
eq_(Webapp(premium_type=False).can_be_purchased(), False)
def test_can_be_purchased(self):
w = Webapp(status=mkt.STATUS_PUBLIC, premium_type=True)
eq_(w.can_be_purchased(), True)
w = Webapp(status=mkt.STATUS_PUBLIC, premium_type=False)
eq_(w.can_be_purchased(), False)
def test_get_previews(self):
w = Webapp.objects.create()
eq_(w.get_promo(), None)
p = Preview.objects.create(addon=w, position=0)
eq_(list(w.get_previews()), [p])
p.update(position=-1)
eq_(list(w.get_previews()), [])
def test_get_promo(self):
w = Webapp.objects.create()
eq_(w.get_promo(), None)
p = Preview.objects.create(addon=w, position=0)
eq_(w.get_promo(), None)
p.update(position=-1)
eq_(w.get_promo(), p)
def test_mark_done_pending(self):
w = Webapp.objects.create()
eq_(w.status, mkt.STATUS_NULL)
w.mark_done()
eq_(w.status, mkt.WEBAPPS_UNREVIEWED_STATUS)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_no_icon_in_manifest(self, get_manifest_json):
webapp = Webapp()
get_manifest_json.return_value = {}
eq_(webapp.has_icon_in_manifest(), False)
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_has_icon_in_manifest(self, get_manifest_json):
webapp = Webapp()
get_manifest_json.return_value = {'icons': {}}
eq_(webapp.has_icon_in_manifest(), True)
def test_no_version(self):
webapp = Webapp()
eq_(webapp.get_manifest_json(), {})
eq_(webapp.current_version, None)
def test_has_premium(self):
webapp = Webapp(premium_type=mkt.ADDON_PREMIUM)
webapp._premium = mock.Mock()
webapp._premium.price = 1
eq_(webapp.has_premium(), True)
webapp._premium.price = 0
eq_(webapp.has_premium(), True)
def test_get_price_no_premium(self):
webapp = Webapp(premium_type=mkt.ADDON_PREMIUM)
webapp.save()
# Needed because get_price accesses excluded, which triggers geodata
# which triggers a save to the db.
eq_(webapp.get_price(), None)
eq_(webapp.get_price_locale(), None)
def test_has_no_premium(self):
webapp = Webapp(premium_type=mkt.ADDON_PREMIUM)
webapp._premium = None
eq_(webapp.has_premium(), False)
def test_not_premium(self):
eq_(Webapp().has_premium(), False)
def test_get_region_ids_with_exclusions(self):
w1 = Webapp.objects.create()
w2 = Webapp.objects.create()
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.BRA.id)
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.USA.id)
AddonExcludedRegion.objects.create(addon=w2, region=mkt.regions.GBR.id)
w1_regions = list(mkt.regions.REGION_IDS)
w1_regions.remove(mkt.regions.BRA.id)
w1_regions.remove(mkt.regions.USA.id)
w2_regions = list(mkt.regions.REGION_IDS)
w2_regions.remove(mkt.regions.GBR.id)
eq_(sorted(Webapp.objects.get(id=w1.id).get_region_ids()),
sorted(w1_regions))
eq_(sorted(Webapp.objects.get(id=w2.id).get_region_ids()),
sorted(w2_regions))
def test_get_regions_with_exclusions(self):
w1 = Webapp.objects.create()
w2 = Webapp.objects.create()
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.BRA.id)
AddonExcludedRegion.objects.create(addon=w1, region=mkt.regions.USA.id)
AddonExcludedRegion.objects.create(addon=w2, region=mkt.regions.GBR.id)
all_regions = mkt.regions.REGIONS_CHOICES_ID_DICT.values()
w1_regions = list(all_regions)
w1_regions.remove(mkt.regions.BRA)
w1_regions.remove(mkt.regions.USA)
w2_regions = list(all_regions)
w2_regions.remove(mkt.regions.GBR)
eq_(sorted(Webapp.objects.get(id=w1.id).get_regions()),
sorted(w1_regions))
eq_(sorted(Webapp.objects.get(id=w2.id).get_regions()),
sorted(w2_regions))
def test_assign_uuid(self):
app = Webapp()
eq_(app.guid, None)
app.save()
assert app.guid is not None, (
'Expected app to have a UUID assigned to guid')
@mock.patch.object(uuid, 'uuid4')
def test_assign_uuid_max_tries(self, mock_uuid4):
guid = 'abcdef12-abcd-abcd-abcd-abcdef123456'
mock_uuid4.return_value = uuid.UUID(guid)
# Create another webapp with and set the guid.
Webapp.objects.create(guid=guid)
# Now `assign_uuid()` should fail.
app = Webapp()
with self.assertRaises(ValueError):
app.save()
def test_is_premium_type_upgrade_check(self):
app = Webapp()
ALL = set(mkt.ADDON_FREES + mkt.ADDON_PREMIUMS)
free_upgrade = ALL - set([mkt.ADDON_FREE])
free_inapp_upgrade = ALL - set([mkt.ADDON_FREE, mkt.ADDON_FREE_INAPP])
# Checking ADDON_FREE changes.
app.premium_type = mkt.ADDON_FREE
for pt in ALL:
eq_(app.is_premium_type_upgrade(pt), pt in free_upgrade)
# Checking ADDON_FREE_INAPP changes.
app.premium_type = mkt.ADDON_FREE_INAPP
for pt in ALL:
eq_(app.is_premium_type_upgrade(pt), pt in free_inapp_upgrade)
# All else is false.
for pt_old in ALL - set([mkt.ADDON_FREE, mkt.ADDON_FREE_INAPP]):
app.premium_type = pt_old
for pt_new in ALL:
eq_(app.is_premium_type_upgrade(pt_new), False)
@raises(ValueError)
def test_parse_domain(self):
Webapp(is_packaged=True).parsed_app_domain
def test_app_type_hosted(self):
eq_(Webapp().app_type, 'hosted')
def test_app_type_packaged(self):
eq_(Webapp(is_packaged=True).app_type, 'packaged')
def test_nomination_new(self):
app = app_factory()
app.update(status=mkt.STATUS_NULL)
app.versions.latest().update(nomination=None)
app.update(status=mkt.STATUS_PENDING)
assert app.versions.latest().nomination
def test_nomination_rejected(self):
app = app_factory()
app.update(status=mkt.STATUS_REJECTED)
app.versions.latest().update(nomination=self.days_ago(1))
app.update(status=mkt.STATUS_PENDING)
self.assertCloseToNow(app.versions.latest().nomination)
def test_nomination_pkg_pending_new_version(self):
# New versions while pending inherit version nomination.
app = app_factory()
app.update(status=mkt.STATUS_PENDING, is_packaged=True)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
old_ver.all_files[0].update(status=mkt.STATUS_PENDING)
v = Version.objects.create(addon=app, version='1.9')
eq_(v.nomination, old_ver.nomination)
def test_nomination_pkg_public_new_version(self):
# New versions while public get a new version nomination.
app = app_factory()
app.update(is_packaged=True)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
v = Version.objects.create(addon=app, version='1.9')
self.assertCloseToNow(v.nomination)
def test_nomination_approved(self):
# New versions while public waiting get a new version nomination.
app = app_factory()
app.update(is_packaged=True, status=mkt.STATUS_APPROVED)
old_ver = app.versions.latest()
old_ver.update(nomination=self.days_ago(1))
old_ver.all_files[0].update(status=mkt.STATUS_APPROVED)
v = Version.objects.create(addon=app, version='1.9')
self.assertCloseToNow(v.nomination)
def test_excluded_in_iarc(self):
app = app_factory()
geodata = app._geodata
geodata.update(region_br_iarc_exclude=True,
region_de_iarc_exclude=True)
self.assertSetEqual(get_excluded_in(mkt.regions.BRA.id), [app.id])
self.assertSetEqual(get_excluded_in(mkt.regions.DEU.id), [app.id])
def test_excluded_in_iarc_de(self):
app = app_factory()
geodata = app._geodata
geodata.update(region_br_iarc_exclude=False,
region_de_iarc_exclude=True)
self.assertSetEqual(get_excluded_in(mkt.regions.BRA.id), [])
self.assertSetEqual(get_excluded_in(mkt.regions.DEU.id), [app.id])
def test_excluded_in_usk_exclude(self):
app = app_factory()
geodata = app._geodata
geodata.update(region_de_usk_exclude=True)
self.assertSetEqual(get_excluded_in(mkt.regions.BRA.id), [])
self.assertSetEqual(get_excluded_in(mkt.regions.DEU.id), [app.id])
@mock.patch('mkt.webapps.models.Webapp.completion_errors')
def test_completion_errors(self, complete_mock):
app = app_factory()
complete_mock.return_value = {
'details': ['1', '2'],
'payments': 'pc load letter'
}
eq_(app.completion_error_msgs(), ['1', '2', 'pc load letter'])
assert not app.is_fully_complete()
complete_mock.return_value = {}
eq_(app.completion_error_msgs(), [])
assert app.is_fully_complete()
@mock.patch('mkt.webapps.models.Webapp.payments_complete')
@mock.patch('mkt.webapps.models.Webapp.is_rated')
@mock.patch('mkt.webapps.models.Webapp.details_complete')
def test_next_step(self, detail_step, rating_step, pay_step):
for step in (detail_step, rating_step, pay_step):
step.return_value = False
app = app_factory(status=mkt.STATUS_NULL)
self.make_premium(app)
eq_(app.next_step()['url'], app.get_dev_url())
detail_step.return_value = True
eq_(app.next_step()['url'], app.get_dev_url('ratings'))
rating_step.return_value = True
eq_(app.next_step()['url'], app.get_dev_url('payments'))
pay_step.return_value = True
assert not app.next_step()
def test_meta_translated_fields(self):
"""Test that we don't load translations for all the translated fields
that live on Addon but we don't need in Webapp."""
useless_fields = ()
useful_fields = ('homepage', 'privacy_policy', 'name', 'description',
'support_email', 'support_url')
self.assertSetEqual(
Webapp._meta.translated_fields,
[Webapp._meta.get_field(f) for f in
useless_fields + useful_fields])
self.assertSetEqual(
Webapp._meta.translated_fields,
[Webapp._meta.get_field(f) for f in useful_fields])
# Build fake data with all fields, and use it to create an app.
data = dict(zip(useless_fields + useful_fields,
useless_fields + useful_fields))
app = app_factory(**data)
for field_name in useless_fields + useful_fields:
field_id_name = app._meta.get_field(field_name).attname
ok_(getattr(app, field_name, None))
ok_(getattr(app, field_id_name, None))
# Reload the app, the useless fields should all have ids but the value
# shouldn't have been loaded.
app = Webapp.objects.get(pk=app.pk)
for field_name in useless_fields:
field_id_name = app._meta.get_field(field_name).attname
ok_(getattr(app, field_name, None) is None)
ok_(getattr(app, field_id_name, None))
# The useful fields should all be ok.
for field_name in useful_fields:
field_id_name = app._meta.get_field(field_name).attname
ok_(getattr(app, field_name, None))
ok_(getattr(app, field_id_name, None))
def test_version_and_file_transformer_with_empty_query(self):
# When we process a query, don't return a list just because
# the query is empty
empty_query = Webapp.objects.filter(app_slug='mahna__mahna')
empty_result = Webapp.version_and_file_transformer(empty_query)
self.assertEqual(empty_result.count(), 0)
class TestWebappContentRatings(TestCase):
def test_rated(self):
assert app_factory(rated=True).is_rated()
assert not app_factory().is_rated()
@mock.patch('mkt.webapps.models.Webapp.details_complete')
@mock.patch('mkt.webapps.models.Webapp.payments_complete')
def test_set_content_ratings(self, pay_mock, detail_mock):
detail_mock.return_value = True
pay_mock.return_value = True
rb = mkt.ratingsbodies
app = app_factory(status=mkt.STATUS_NULL)
app.set_content_ratings({})
assert not app.is_rated()
eq_(app.status, mkt.STATUS_NULL)
# Create.
app.set_content_ratings({
rb.CLASSIND: rb.CLASSIND_L,
rb.PEGI: rb.PEGI_3,
})
eq_(ContentRating.objects.count(), 2)
for expected in [(rb.CLASSIND.id, rb.CLASSIND_L.id),
(rb.PEGI.id, rb.PEGI_3.id)]:
assert ContentRating.objects.filter(
addon=app, ratings_body=expected[0],
rating=expected[1]).exists()
eq_(app.reload().status, mkt.STATUS_PENDING)
# Update.
app.set_content_ratings({
rb.CLASSIND: rb.CLASSIND_10,
rb.PEGI: rb.PEGI_3,
rb.GENERIC: rb.GENERIC_18,
})
for expected in [(rb.CLASSIND.id, rb.CLASSIND_10.id),
(rb.PEGI.id, rb.PEGI_3.id),
(rb.GENERIC.id, rb.GENERIC_18.id)]:
assert ContentRating.objects.filter(
addon=app, ratings_body=expected[0],
rating=expected[1]).exists()
eq_(app.reload().status, mkt.STATUS_PENDING)
def test_app_delete_clears_iarc_data(self):
app = app_factory(rated=True)
# Ensure we have some data to start with.
ok_(IARCCert.objects.filter(app=app).exists())
ok_(ContentRating.objects.filter(addon=app).exists())
ok_(RatingDescriptors.objects.filter(addon=app).exists())
ok_(RatingInteractives.objects.filter(addon=app).exists())
# Delete.
app.delete()
msg = 'Related IARC data should be deleted.'
ok_(not IARCCert.objects.filter(app=app).exists(), msg)
ok_(not ContentRating.objects.filter(addon=app).exists(), msg)
ok_(not RatingDescriptors.objects.filter(addon=app).exists(), msg)
ok_(not RatingInteractives.objects.filter(addon=app).exists(), msg)
def test_set_content_ratings_usk_refused(self):
app = app_factory()
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_REJECTED
})
ok_(Geodata.objects.get(addon=app).region_de_usk_exclude)
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_12
})
ok_(not Geodata.objects.get(addon=app).region_de_usk_exclude)
def test_set_content_ratings_iarc_games_unexclude(self):
app = app_factory()
app._geodata.update(region_br_iarc_exclude=True,
region_de_iarc_exclude=True)
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_12
})
geodata = Geodata.objects.get(addon=app)
ok_(not geodata.region_br_iarc_exclude)
ok_(not geodata.region_de_iarc_exclude)
def test_set_content_ratings_purge_unexclude(self):
app = app_factory()
app.update(status=mkt.STATUS_DISABLED, iarc_purged=True)
app.set_content_ratings({
mkt.ratingsbodies.USK: mkt.ratingsbodies.USK_12
})
ok_(not app.reload().iarc_purged)
eq_(app.status, mkt.STATUS_PUBLIC)
def test_set_descriptors(self):
app = app_factory()
eq_(RatingDescriptors.objects.count(), 0)
app.set_descriptors([])
descriptors = RatingDescriptors.objects.get(addon=app)
assert not descriptors.has_classind_drugs
assert not descriptors.has_esrb_blood # Blood-deuh!
# Create.
app.set_descriptors([
'has_classind_drugs', 'has_pegi_scary', 'has_generic_drugs'
])
descriptors = RatingDescriptors.objects.get(addon=app)
assert descriptors.has_classind_drugs
assert descriptors.has_pegi_scary
assert descriptors.has_generic_drugs
assert not descriptors.has_esrb_blood
# Update.
app.set_descriptors([
'has_esrb_blood', 'has_classind_drugs'
])
descriptors = RatingDescriptors.objects.get(addon=app)
assert descriptors.has_esrb_blood
assert descriptors.has_classind_drugs
assert not descriptors.has_pegi_scary
assert not descriptors.has_generic_drugs
def test_set_interactives(self):
app = app_factory()
app.set_interactives([])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert not app_interactives.has_shares_info
assert not app_interactives.has_digital_purchases
# Create.
app.set_interactives([
'has_shares_info', 'has_digital_purchases', 'has_UWOTM8'
])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert app_interactives.has_shares_info
assert app_interactives.has_digital_purchases
assert not app_interactives.has_users_interact
# Update.
app.set_interactives([
'has_digital_purchases', 'has_shares_ur_mum'
])
eq_(RatingInteractives.objects.count(), 1)
app_interactives = RatingInteractives.objects.get(addon=app)
assert not app_interactives.has_shares_info
assert app_interactives.has_digital_purchases
def test_delete(self):
app = app_factory()
app.delete()
eq_(app.status, mkt.STATUS_DELETED)
@mock.patch('mkt.webapps.models.Webapp.details_complete')
@mock.patch('mkt.webapps.models.Webapp.payments_complete')
def test_completion_errors_ignore_ratings(self, mock1, mock2):
app = app_factory()
for mock_ in (mock1, mock2):
mock_.return_value = True
assert not app.completion_errors()
assert app.is_fully_complete()
class DeletedAppTests(TestCase):
def test_soft_deleted_no_current_version(self):
webapp = app_factory()
webapp._current_version = None
webapp.save()
webapp.delete()
eq_(webapp.current_version, None)
def test_soft_deleted_no_latest_version(self):
webapp = app_factory()
webapp._latest_version = None
webapp.save()
webapp.delete()
eq_(webapp.latest_version, None)
class TestExclusions(TestCase):
fixtures = fixture('prices')
def setUp(self):
self.app = Webapp.objects.create(premium_type=mkt.ADDON_PREMIUM)
self.app.addonexcludedregion.create(region=mkt.regions.USA.id)
self.geodata = self.app._geodata
def make_tier(self):
self.price = Price.objects.get(pk=1)
AddonPremium.objects.create(addon=self.app, price=self.price)
self.row = PriceCurrency.objects.create(
currency='USD',
dev=True,
paid=True,
price=Decimal('0.99'),
provider=ALL_PROVIDERS[settings.DEFAULT_PAYMENT_PROVIDER].provider,
region=RESTOFWORLD.id,
tier=self.price
)
def test_not_premium(self):
ok_(mkt.regions.USA.id in self.app.get_excluded_region_ids())
def test_not_paid(self):
PriceCurrency.objects.update(paid=False)
# The US is excluded because there are no valid prices.
ok_(mkt.regions.USA.id in self.app.get_excluded_region_ids())
def test_premium(self):
self.make_tier()
ok_(mkt.regions.USA.id in self.app.get_excluded_region_ids())
def test_premium_not_remove_tier(self):
self.make_tier()
(self.price.pricecurrency_set
.filter(region=mkt.regions.POL.id).update(paid=True))
# Poland will not be excluded because we haven't excluded the rest
# of the world.
ok_(mkt.regions.POL.id not in self.app.get_excluded_region_ids())
def test_premium_remove_tier(self):
self.make_tier()
self.app.addonexcludedregion.create(region=mkt.regions.RESTOFWORLD.id)
# If we exclude the rest of the world, then we'll exclude Nicaragua
# which has no price currency.
ok_(mkt.regions.NIC.id in self.app.get_excluded_region_ids())
def test_not_paid_worldwide(self):
self.make_tier()
self.row.update(paid=False)
# Rest of world has been set to not paid. Meaning that its not
# available right now, so we should exclude Nicaragua.
ok_(mkt.regions.NIC.id in self.app.get_excluded_region_ids())
def test_usk_rating_refused(self):
self.geodata.update(region_de_usk_exclude=True)
ok_(mkt.regions.DEU.id in self.app.get_excluded_region_ids())
def test_game_iarc(self):
self.geodata.update(region_de_iarc_exclude=True,
region_br_iarc_exclude=True)
excluded = self.app.get_excluded_region_ids()
ok_(mkt.regions.BRA.id in excluded)
ok_(mkt.regions.DEU.id in excluded)
class TestPackagedAppManifestUpdates(mkt.site.tests.TestCase):
# Note: More extensive tests for `.update_names` are above.
def setUp(self):
self.webapp = app_factory(is_packaged=True,
default_locale='en-US')
self.webapp.name = {'en-US': 'Packaged App'}
self.webapp.save()
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_default_name_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo'}
self.trans_eq(self.webapp.name, 'en-US', 'Packaged App')
self.webapp.update_name_from_package_manifest()
self.webapp = Webapp.objects.get(pk=self.webapp.pk)
self.trans_eq(self.webapp.name, 'en-US', 'Yo')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_default_locale_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo', 'default_locale': 'fr'}
eq_(self.webapp.default_locale, 'en-US')
self.webapp.update_name_from_package_manifest()
eq_(self.webapp.default_locale, 'fr')
self.trans_eq(self.webapp.name, 'en-US', None)
self.trans_eq(self.webapp.name, 'fr', 'Yo')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_locales_change(self, get_manifest_json):
get_manifest_json.return_value = {'name': 'Yo',
'locales': {'es': {'name': 'es'},
'de': {'name': 'de'}}}
self.webapp.update_supported_locales()
self.webapp.reload()
eq_(self.webapp.current_version.supported_locales, 'de,es')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
def test_package_manifest_locales_change_pending(self, get_manifest_json):
"""Ensure we still work for pending apps."""
get_manifest_json.return_value = {'name': 'Yo',
'locales': {'es': {'name': 'es'},
'de': {'name': 'de'}}}
self.webapp.update(status=mkt.STATUS_PENDING)
self.webapp.update_supported_locales(latest=True)
self.webapp.reload()
eq_(self.webapp.latest_version.supported_locales, 'de,es')
def test_update_name_from_package_manifest_version(self):
evil_manifest = {
'name': u'Evil App Name'
}
good_manifest = {
'name': u'Good App Name',
}
latest_version = version_factory(
addon=self.webapp, version='2.3',
file_kw=dict(status=mkt.STATUS_DISABLED))
current_version = self.webapp.current_version
AppManifest.objects.create(version=current_version,
manifest=json.dumps(good_manifest))
AppManifest.objects.create(version=latest_version,
manifest=json.dumps(evil_manifest))
self.webapp.update_name_from_package_manifest()
eq_(self.webapp.name, u'Good App Name')
class TestWebappVersion(mkt.site.tests.TestCase):
def test_no_version(self):
eq_(Webapp().get_latest_file(), None)
def test_no_file(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
webapp._current_version = Version.objects.create(addon=webapp)
eq_(webapp.get_latest_file(), None)
def test_right_file(self):
webapp = Webapp.objects.create(manifest_url='http://foo.com')
version = Version.objects.create(addon=webapp)
old_file = File.objects.create(version=version)
old_file.update(created=datetime.now() - timedelta(days=1))
new_file = File.objects.create(version=version)
webapp._current_version = version
eq_(webapp.get_latest_file().pk, new_file.pk)
class TestWebappManager(TestCase):
def test_by_identifier(self):
w = Webapp.objects.create(app_slug='foo')
eq_(Webapp.objects.by_identifier(w.id), w)
eq_(Webapp.objects.by_identifier(str(w.id)), w)
eq_(Webapp.objects.by_identifier(w.app_slug), w)
with self.assertRaises(Webapp.DoesNotExist):
Webapp.objects.by_identifier('fake')
def test_rated(self):
rated = app_factory(rated=True)
app_factory()
eq_(Webapp.objects.count(), 2)
eq_(list(Webapp.objects.rated()), [rated])
class TestManifest(BaseWebAppTest):
def test_get_manifest_json(self):
webapp = self.post_addon()
assert webapp.latest_version
assert webapp.latest_version.has_files
with open(self.manifest, 'r') as mf:
manifest_json = json.load(mf)
eq_(webapp.get_manifest_json(webapp.latest_version.all_files[0]),
manifest_json)
class TestPackagedModel(mkt.site.tests.TestCase):
@override_settings(SITE_URL='http://hy.fr')
def test_get_package_path(self):
app = app_factory(name=u'Mozillaball ょ', app_slug='test',
is_packaged=False, version_kw={'version': '1.0',
'created': None})
app = app.reload()
f = app.versions.latest().files.latest()
# There should not be a `package_path` for a hosted app.
eq_(app.get_package_path(), None)
# There should be a `package_path` for a packaged app.
app.update(is_packaged=True)
eq_(app.get_package_path(),
'http://hy.fr/downloads/file/%s/%s' % (f.id, f.filename))
# Delete one of the files and ensure that `package_path` is gone.
f.delete()
eq_(app.reload().get_package_path(), None)
@override_settings(SITE_URL='http://hy.fr')
@mock.patch('lib.crypto.packaged.os.unlink', new=mock.Mock)
def test_create_blocklisted_version(self):
app = app_factory(name=u'Mozillaball ょ', app_slug='test',
is_packaged=True, version_kw={'version': '1.0',
'created': None})
app.create_blocklisted_version()
app = app.reload()
v = app.versions.latest()
f = v.files.latest()
eq_(app.status, mkt.STATUS_BLOCKED)
eq_(app.versions.count(), 2)
eq_(v.version, 'blocklisted')
eq_(app._current_version, v)
assert 'blocklisted' in f.filename
eq_(f.status, mkt.STATUS_BLOCKED)
# Check manifest.
url = app.get_manifest_url()
res = self.client.get(url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
assert 'etag' in res._headers
data = json.loads(res.content)
eq_(data['name'], 'Blocked by Mozilla')
eq_(data['version'], 'blocklisted')
eq_(data['package_path'], 'http://hy.fr/downloads/file/%s/%s' % (
f.id, f.filename))
class TestPackagedManifest(BasePackagedAppTest):
def _get_manifest_json(self):
zf = zipfile.ZipFile(self.package)
data = zf.open('manifest.webapp').read()
zf.close()
return json.loads(data)
def test_get_manifest_json(self):
webapp = self.post_addon()
webapp.update(status=mkt.STATUS_PUBLIC)
file_ = webapp.latest_version.all_files[0]
file_.update(status=mkt.STATUS_PUBLIC)
assert webapp.current_version
assert webapp.current_version.has_files
# Test without file argument.
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(), mf)
# Test with file argument.
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(file_), mf)
def test_get_manifest_json_multiple_versions(self):
"""Test `get_manifest_json` gets the right version."""
webapp = self.post_addon()
webapp.update(status=mkt.STATUS_PUBLIC)
latest_version = webapp.latest_version
latest_version.files.update(status=mkt.STATUS_PUBLIC)
version = version_factory(addon=webapp, version='0.5',
created=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
version.files.update(created=self.days_ago(1))
webapp = Webapp.objects.get(pk=webapp.pk)
eq_(webapp.current_version, latest_version)
assert webapp.current_version.has_files
mf = self._get_manifest_json()
eq_(webapp.get_manifest_json(), mf)
def test_get_manifest_json_multiple_version_disabled(self):
# Post an app, then emulate a reviewer reject and add a new, pending
# version.
webapp = self.post_addon()
webapp.latest_version.files.update(status=mkt.STATUS_DISABLED)
webapp.latest_version.update(created=self.days_ago(1))
webapp.update(status=mkt.STATUS_REJECTED, _current_version=None)
version = version_factory(addon=webapp, version='2.0',
file_kw={'status': mkt.STATUS_PENDING})
mf = self._get_manifest_json()
AppManifest.objects.create(version=version,
manifest=json.dumps(mf))
webapp.update_version()
webapp = webapp.reload()
eq_(webapp.latest_version, version)
self.file = version.all_files[0]
self.setup_files()
eq_(webapp.get_manifest_json(self.file), mf)
def test_cached_manifest_is_cached(self):
webapp = self.post_addon()
# First call does queries and caches results.
webapp.get_cached_manifest()
# Subsequent calls are cached.
with self.assertNumQueries(0):
webapp.get_cached_manifest()
@mock.patch('mkt.webapps.utils.cache')
def test_cached_manifest_no_version_not_cached(self, cache_mock):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
webapp._current_version = None
eq_(webapp.get_cached_manifest(force=True), '{}')
assert not cache_mock.called
def test_cached_manifest_contents(self):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
webapp.update(status=mkt.STATUS_PUBLIC)
version = webapp.latest_version
self.file = version.all_files[0]
self.file.update(status=mkt.STATUS_PUBLIC)
self.setup_files()
manifest = self._get_manifest_json()
data = json.loads(webapp.get_cached_manifest(self.file)[0])
eq_(data['name'], webapp.name)
eq_(data['version'], webapp.current_version.version)
eq_(data['size'], self.file.size)
eq_(data['release_notes'], version.releasenotes)
eq_(data['package_path'], absolutify(
os.path.join(reverse('downloads.file', args=[self.file.id]),
self.file.filename)))
eq_(data['developer'], manifest['developer'])
eq_(data['icons'], manifest['icons'])
eq_(data['locales'], manifest['locales'])
def _createPackage(self):
webapp = self.post_addon(
data={'packaged': True, 'free_platforms': 'free-firefoxos'})
webapp.update(status=mkt.STATUS_PUBLIC)
version = webapp.latest_version
file = version.all_files[0]
file.update(status=mkt.STATUS_PUBLIC)
return file
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage')
def test_package_path_local(self):
file = self._createPackage()
res = self.client.get(file.get_url_path('manifest'))
eq_(res.status_code, 200)
eq_(res['content-type'], 'application/zip')
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage')
def test_package_path_storage(self):
file = self._createPackage()
file.version.addon.get_cached_manifest(force=True)
res = self.client.get(file.get_url_path('manifest'))
self.assert3xx(res, public_storage.url(file.signed_file_path))
def test_packaged_with_BOM(self):
# Exercise separate code paths to loading the packaged app manifest.
self.file.filename = 'mozBOM.zip'
self.setup_files('mozBOM.zip')
assert WebAppParser().parse(private_storage.open(self.file.file_path))
self.assertTrue(self.app.has_icon_in_manifest())
class TestDomainFromURL(unittest.TestCase):
def test_simple(self):
eq_(Webapp.domain_from_url('http://mozilla.com/'),
'http://mozilla.com')
def test_long_path(self):
eq_(Webapp.domain_from_url('http://mozilla.com/super/rad.webapp'),
'http://mozilla.com')
def test_no_normalize_www(self):
eq_(Webapp.domain_from_url('http://www.mozilla.com/super/rad.webapp'),
'http://www.mozilla.com')
def test_with_port(self):
eq_(Webapp.domain_from_url('http://mozilla.com:9000/'),
'http://mozilla.com:9000')
def test_subdomains(self):
eq_(Webapp.domain_from_url('http://apps.mozilla.com/'),
'http://apps.mozilla.com')
def test_https(self):
eq_(Webapp.domain_from_url('https://mozilla.com/'),
'https://mozilla.com')
def test_normalize_case(self):
eq_(Webapp.domain_from_url('httP://mOzIllA.com/'),
'http://mozilla.com')
@raises(ValueError)
def test_none(self):
Webapp.domain_from_url(None)
@raises(ValueError)
def test_empty(self):
Webapp.domain_from_url('')
def test_empty_or_none(self):
eq_(Webapp.domain_from_url(None, allow_none=True), None)
class TestTransformer(mkt.site.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
self.device = DEVICE_TYPES.keys()[0]
def test_versions(self):
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
ok_(isinstance(webapp.latest_version, Version))
ok_(isinstance(webapp.current_version, Version))
def test_previews(self):
p1 = Preview.objects.create(filetype='image/png', addon_id=337141,
position=0)
p2 = Preview.objects.create(filetype='image/png', addon_id=337141,
position=1)
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
eq_(webapp.all_previews, [p1, p2])
def test_prices(self):
self.make_premium(Webapp.objects.get(pk=337141))
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
ok_(unicode(webapp.premium))
eq_(str(webapp.get_tier().price), '1.00')
ok_(webapp.get_tier_name())
def test_prices_free(self):
webapps = list(Webapp.objects.all())
with self.assertNumQueries(0):
for webapp in webapps:
eq_(webapp.premium, None)
eq_(webapp.get_tier(), None)
def test_device_types(self):
AddonDeviceType.objects.create(addon_id=337141,
device_type=self.device)
webapps = list(Webapp.objects.filter(id=337141))
with self.assertNumQueries(0):
for webapp in webapps:
assert webapp._device_types
eq_(webapp.device_types, [DEVICE_TYPES[self.device]])
def test_device_type_cache(self):
webapp = Webapp.objects.get(id=337141)
webapp._device_types = []
with self.assertNumQueries(0):
eq_(webapp.device_types, [])
class TestDetailsComplete(mkt.site.tests.TestCase):
def setUp(self):
self.device = DEVICE_TYPES.keys()[0]
self.webapp = Webapp.objects.create(status=mkt.STATUS_NULL)
def fail(self, value):
assert not self.webapp.details_complete(), value
reasons = self.webapp.details_errors()
assert value in reasons[0], reasons
def test_fail(self):
self.fail('email')
self.webapp.support_email = 'a@a.com'
self.webapp.save()
self.fail('name')
self.webapp.name = 'name'
self.webapp.save()
self.fail('device')
self.webapp.addondevicetype_set.create(device_type=self.device)
self.webapp.save()
self.fail('category')
self.webapp.update(categories=['books'])
self.fail('screenshot')
self.webapp.previews.create()
eq_(self.webapp.details_complete(), True)
self.webapp.support_email = ''
self.webapp.save()
eq_(self.webapp.details_complete(), False)
self.fail('support email or URL')
self.webapp.support_url = 'http://test.com/'
self.webapp.save()
eq_(self.webapp.details_complete(), True)
class TestAddonExcludedRegion(mkt.site.tests.WebappTestCase):
def setUp(self):
super(TestAddonExcludedRegion, self).setUp()
self.excluded = self.app.addonexcludedregion
eq_(list(self.excluded.values_list('id', flat=True)), [])
self.er = self.app.addonexcludedregion.create(
region=mkt.regions.GBR.id)
eq_(list(self.excluded.values_list('id', flat=True)), [self.er.id])
def test_exclude_multiple(self):
other = AddonExcludedRegion.objects.create(addon=self.app,
region=mkt.regions.BRA.id)
self.assertSetEqual(self.excluded.values_list('id', flat=True),
[self.er.id, other.id])
def test_remove_excluded(self):
self.er.delete()
eq_(list(self.excluded.values_list('id', flat=True)), [])
def test_get_region(self):
eq_(self.er.get_region(), mkt.regions.GBR)
def test_unicode(self):
eq_(unicode(self.er), '%s: %s' % (self.app, mkt.regions.GBR.slug))
class TestContentRating(mkt.site.tests.WebappTestCase):
def setUp(self):
self.app = self.get_app()
@mock.patch.object(mkt.regions.BRA, 'ratingsbody',
mkt.ratingsbodies.CLASSIND)
@mock.patch.object(mkt.regions.USA, 'ratingsbody', mkt.ratingsbodies.ESRB)
@mock.patch.object(mkt.regions.VEN, 'ratingsbody',
mkt.ratingsbodies.GENERIC)
def test_get_regions_and_slugs(self):
classind_rating = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=0)
regions = classind_rating.get_regions()
assert mkt.regions.BRA in regions
assert mkt.regions.USA not in regions
assert mkt.regions.VEN not in regions
slugs = classind_rating.get_region_slugs()
assert mkt.regions.BRA.slug in slugs
assert mkt.regions.USA.slug not in slugs
assert mkt.regions.VEN.slug not in slugs
@mock.patch.object(mkt.regions.BRA, 'ratingsbody',
mkt.ratingsbodies.CLASSIND)
@mock.patch.object(mkt.regions.DEU, 'ratingsbody', mkt.ratingsbodies.ESRB)
@mock.patch.object(mkt.regions.VEN, 'ratingsbody',
mkt.ratingsbodies.GENERIC)
def test_get_regions_and_slugs_generic_fallback(self):
gen_rating = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=0)
regions = gen_rating.get_regions()
assert mkt.regions.BRA not in regions
assert mkt.regions.DEU not in regions
assert mkt.regions.VEN in regions
slugs = gen_rating.get_region_slugs()
assert mkt.regions.BRA.slug not in slugs
assert mkt.regions.DEU.slug not in slugs
assert mkt.regions.VEN.slug not in slugs
# We have a catch-all 'generic' region for all regions wo/ r.body.
assert mkt.regions.GENERIC_RATING_REGION_SLUG in slugs
@mock.patch.object(mkt.ratingsbodies.CLASSIND, 'name', 'CLASSIND')
@mock.patch.object(mkt.ratingsbodies.CLASSIND_10, 'name', '10+')
@mock.patch.object(mkt.ratingsbodies.ESRB_E, 'name', 'Everybody 10+')
@mock.patch.object(mkt.ratingsbodies.ESRB_E, 'label', '10')
def test_get_ratings(self):
# Infer the label from the name.
cr = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.CLASSIND.id,
rating=mkt.ratingsbodies.CLASSIND_10.id)
eq_(cr.get_rating().label, '10')
eq_(cr.get_body().label, 'classind')
# When already has label set.
eq_(ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.ESRB.id,
rating=mkt.ratingsbodies.ESRB_E.id).get_rating().label,
'10')
class TestContentRatingsIn(mkt.site.tests.WebappTestCase):
def test_not_in_region(self):
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region), [])
for region in mkt.regions.ALL_REGIONS:
AddonExcludedRegion.objects.create(addon=self.app,
region=region.id)
eq_(self.get_app().content_ratings_in(region=region), [])
def test_in_region_and_category(self):
cat = 'games'
self.app.update(categories=[cat])
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.listed_in(region=region, category=cat), True)
def test_in_region_and_not_in_category(self):
cat = 'games'
for region in mkt.regions.ALL_REGIONS:
eq_(self.app.content_ratings_in(region=region, category=cat), [])
@mock.patch.object(mkt.regions.COL, 'ratingsbody', None)
@mock.patch.object(mkt.regions.BRA, 'ratingsbody',
mkt.ratingsbodies.CLASSIND)
def test_generic_fallback(self):
# Test region with no rating body returns generic content rating.
crs = ContentRating.objects.create(
addon=self.app, ratings_body=mkt.ratingsbodies.GENERIC.id,
rating=mkt.ratingsbodies.GENERIC_3.id)
eq_(self.app.content_ratings_in(region=mkt.regions.COL), [crs])
# Test region with rating body does not include generic content rating.
assert crs not in self.app.content_ratings_in(region=mkt.regions.BRA)
class TestIARCCert(mkt.site.tests.WebappTestCase):
def test_no_cert(self):
with self.assertRaises(IARCCert.DoesNotExist):
self.app.iarc_cert
def test_set_iarc_certificate_string(self):
cert_id = uuid.uuid4()
self.app.set_iarc_certificate(unicode(cert_id))
eq_(uuid.UUID(self.app.iarc_cert.cert_id), cert_id)
def test_set_iarc_certificate_uuid(self):
cert_id = uuid.uuid4()
self.app.set_iarc_certificate(cert_id)
eq_(uuid.UUID(self.app.iarc_cert.cert_id), cert_id)
def test_set_iarc_certificate_hexstring(self):
cert_id = uuid.uuid4()
self.app.set_iarc_certificate(cert_id.hex)
eq_(uuid.UUID(self.app.iarc_cert.cert_id), cert_id)
class TestQueue(mkt.site.tests.WebappTestCase):
def test_in_rereview_queue(self):
assert not self.app.in_rereview_queue()
RereviewQueue.objects.create(addon=self.app)
assert self.app.in_rereview_queue()
def test_in_escalation_queue(self):
assert not self.app.in_escalation_queue()
EscalationQueue.objects.create(addon=self.app)
assert self.app.in_escalation_queue()
class TestPackagedSigning(mkt.site.tests.WebappTestCase):
@mock.patch('lib.crypto.packaged.sign')
def test_not_packaged(self, sign):
self.app.update(is_packaged=False)
assert not self.app.sign_if_packaged(self.app.current_version.pk)
assert not sign.called
@mock.patch('lib.crypto.packaged.sign')
def test_packaged(self, sign):
self.app.update(is_packaged=True)
assert self.app.sign_if_packaged(self.app.current_version.pk)
eq_(sign.call_args[0][0], self.app.current_version.pk)
@mock.patch('lib.crypto.packaged.sign')
def test_packaged_reviewer(self, sign):
self.app.update(is_packaged=True)
assert self.app.sign_if_packaged(self.app.current_version.pk,
reviewer=True)
eq_(sign.call_args[0][0], self.app.current_version.pk)
eq_(sign.call_args[1]['reviewer'], True)
class TestUpdateStatus(mkt.site.tests.TestCase):
def setUp(self):
# Disabling signals to simplify these tests. We call update_status()
# manually in them.
version_changed_signal.disconnect(version_changed,
dispatch_uid='version_changed')
post_save.disconnect(update_status, sender=Version,
dispatch_uid='version_update_status')
post_delete.disconnect(update_status, sender=Version,
dispatch_uid='version_update_status')
def tearDown(self):
version_changed_signal.connect(version_changed,
dispatch_uid='version_changed')
post_save.connect(update_status, sender=Version,
dispatch_uid='version_update_status')
post_delete.connect(update_status, sender=Version,
dispatch_uid='version_update_status')
def test_no_versions(self):
app = Webapp.objects.create(status=mkt.STATUS_PUBLIC)
app.update_status()
eq_(app.status, mkt.STATUS_NULL)
def test_version_no_files(self):
app = Webapp.objects.create(status=mkt.STATUS_PUBLIC)
Version(addon=app).save()
app.update_status()
eq_(app.status, mkt.STATUS_NULL)
def test_only_version_deleted(self):
app = app_factory(status=mkt.STATUS_REJECTED)
app.latest_version.delete()
app.update_status()
eq_(app.status, mkt.STATUS_NULL)
def test_other_version_deleted(self):
app = app_factory(status=mkt.STATUS_REJECTED)
version_factory(addon=app)
app.latest_version.delete()
app.update_status()
eq_(app.status, mkt.STATUS_REJECTED)
def test_one_version_pending(self):
app = app_factory(status=mkt.STATUS_REJECTED,
file_kw=dict(status=mkt.STATUS_DISABLED))
version_factory(addon=app,
file_kw=dict(status=mkt.STATUS_PENDING))
with mock.patch('mkt.webapps.models.Webapp.is_fully_complete') as comp:
comp.return_value = True
app.update_status()
eq_(app.status, mkt.STATUS_PENDING)
def test_one_version_pending_not_fully_complete(self):
app = app_factory(status=mkt.STATUS_REJECTED,
file_kw=dict(status=mkt.STATUS_DISABLED))
version_factory(addon=app,
file_kw=dict(status=mkt.STATUS_PENDING))
with mock.patch('mkt.webapps.models.Webapp.is_fully_complete') as comp:
comp.return_value = False
app.update_status()
eq_(app.status, mkt.STATUS_REJECTED) # Didn't change.
def test_one_version_public(self):
app = app_factory(status=mkt.STATUS_PUBLIC)
version_factory(addon=app,
file_kw=dict(status=mkt.STATUS_DISABLED))
app.update_status()
eq_(app.status, mkt.STATUS_PUBLIC)
def test_was_approved_then_new_version(self):
app = app_factory(status=mkt.STATUS_APPROVED)
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app,
file_kw=dict(status=mkt.STATUS_PENDING))
app.update_status()
eq_(app.status, mkt.STATUS_APPROVED)
def test_blocklisted(self):
app = app_factory(status=mkt.STATUS_BLOCKED)
app.latest_version.delete()
app.update_status()
eq_(app.status, mkt.STATUS_BLOCKED)
class TestInstalled(mkt.site.tests.TestCase):
def setUp(self):
user = UserProfile.objects.create(email='f@f.com')
app = Webapp.objects.create()
self.m = functools.partial(Installed.objects.safer_get_or_create,
user=user, addon=app)
def test_install_type(self):
assert self.m(install_type=apps.INSTALL_TYPE_USER)[1]
assert not self.m(install_type=apps.INSTALL_TYPE_USER)[1]
assert self.m(install_type=apps.INSTALL_TYPE_REVIEWER)[1]
class TestAppFeatures(DynamicBoolFieldsTestMixin, mkt.site.tests.TestCase):
def setUp(self):
super(TestAppFeatures, self).setUp()
# Fields used by DynamicBoolFieldsTestMixin methods.
self.model = AppFeatures
self.related_name = 'features'
self.BOOL_DICT = mkt.constants.features.APP_FEATURES
self.flags = ('APPS', 'GEOLOCATION', 'PAY', 'SMS')
self.expected = [u'App Management API', u'Geolocation', u'Web Payment',
u'WebSMS']
self.af = AppFeatures.objects.get()
def _get_related_bool_obj(self):
return getattr(self.app.current_version, self.related_name)
def test_to_list(self):
self._flag()
obj = self._get_related_bool_obj()
eq_(obj.to_list(), ['apps', 'geolocation', 'pay', 'sms'])
def test_to_names(self):
self._flag()
obj = self._get_related_bool_obj()
eq_(obj.to_names(), self.expected)
def test_default_false(self):
obj = self.model(version=self.app.current_version)
for field in self.BOOL_DICT:
eq_(getattr(obj, 'has_%s' % field.lower()), False)
class TestRatingDescriptors(mkt.site.tests.TestCase):
def setUp(self):
super(TestRatingDescriptors, self).setUp()
def test_desc_mapping(self):
descs = RatingDescriptors.objects.create(addon=app_factory())
for body, mapping in DESCS.items():
for native, rating_desc_field in mapping.items():
assert hasattr(descs, rating_desc_field), rating_desc_field
def test_reverse_desc_mapping(self):
descs = RatingDescriptors.objects.create(addon=app_factory())
for field in descs._fields():
ok_(isinstance(REVERSE_DESCS.get(field), basestring))
def test_iarc_deserialize(self):
descs = RatingDescriptors.objects.create(
addon=app_factory(), has_esrb_blood=True, has_pegi_scary=True,
has_classind_drugs_legal=True)
self.assertSetEqual(descs.iarc_deserialize().split(', '),
[u'ClassInd_DrogasLicitas', u'PEGI_Fear',
u'ESRB_Blood'])
eq_(descs.iarc_deserialize(body=mkt.ratingsbodies.ESRB), u'ESRB_Blood')
eq_(descs.iarc_deserialize(
body=mkt.ratingsbodies.CLASSIND), u'ClassInd_DrogasLicitas')
class TestRatingInteractives(mkt.site.tests.TestCase):
def setUp(self):
super(TestRatingInteractives, self).setUp()
def test_interactives_mapping(self):
interactives = RatingInteractives.objects.create(addon=app_factory())
for native, field in INTERACTIVES.items():
assert hasattr(interactives, field)
def test_reverse_interactives_mapping(self):
interactives = RatingInteractives.objects.create(addon=app_factory())
for field in interactives._fields():
ok_(isinstance(REVERSE_INTERACTIVES.get(field), basestring), field)
def test_iarc_deserialize(self):
interactives = RatingInteractives.objects.create(
addon=app_factory(), has_users_interact=True, has_shares_info=True)
self.assertSetEqual(
interactives.iarc_deserialize().split(', '),
['IE_SharesInfo', 'IE_UsersInteract'])
class TestManifestUpload(BaseUploadTest, mkt.site.tests.TestCase):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestManifestUpload, self).setUp()
self.addCleanup(translation.deactivate)
def manifest(self, name):
return os.path.join(settings.ROOT, 'mkt', 'developers', 'tests',
'addons', name)
@mock.patch('mkt.webapps.models.parse_addon')
def test_manifest_updated_developer_name(self, parse_addon):
parse_addon.return_value = {
'version': '4.0',
'developer_name': u'Méâ'
}
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
app = Webapp.objects.get(pk=337141)
app.manifest_updated('', upload)
version = app.current_version.reload()
eq_(version.version, '4.0')
eq_(version.developer_name, u'Méâ')
@mock.patch('mkt.webapps.models.parse_addon')
def test_manifest_updated_long_developer_name(self, parse_addon):
truncated_developer_name = u'é' * 255
long_developer_name = truncated_developer_name + u'ßßßß'
parse_addon.return_value = {
'version': '4.1',
'developer_name': long_developer_name,
}
# Note: we need a valid FileUpload instance, but in the end we are not
# using its contents since we are mocking parse_addon().
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
app = Webapp.objects.get(pk=337141)
app.manifest_updated('', upload)
version = app.current_version.reload()
eq_(version.version, '4.1')
eq_(version.developer_name, truncated_developer_name)
def test_manifest_url(self):
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
addon = Webapp.from_upload(upload)
eq_(addon.manifest_url, upload.name)
def test_homescreen(self):
upload = self.get_upload(abspath=self.manifest('mozscreen.webapp'))
addon = Webapp.from_upload(upload)
ok_(addon.is_homescreen())
def test_no_homescreen(self):
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
addon = Webapp.from_upload(upload)
ok_(not addon.is_homescreen())
def test_app_domain(self):
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
upload.name = 'http://mozilla.com/my/rad/app.webapp' # manifest URL
addon = Webapp.from_upload(upload)
eq_(addon.app_domain, 'http://mozilla.com')
def test_non_english_app(self):
upload = self.get_upload(abspath=self.manifest('non-english.webapp'))
upload.name = 'http://mozilla.com/my/rad/app.webapp' # manifest URL
addon = Webapp.from_upload(upload)
eq_(addon.default_locale, 'it')
eq_(unicode(addon.name), 'ItalianMozBall')
eq_(addon.name.locale, 'it')
def test_webapp_default_locale_override(self):
with nested(tempfile.NamedTemporaryFile('w', suffix='.webapp'),
open(self.manifest('mozball.webapp'))) as (tmp, mf):
mf = json.load(mf)
mf['default_locale'] = 'es'
tmp.write(json.dumps(mf))
tmp.flush()
upload = self.get_upload(abspath=tmp.name)
addon = Webapp.from_upload(upload)
eq_(addon.default_locale, 'es')
def test_webapp_default_locale_unsupported(self):
with nested(tempfile.NamedTemporaryFile('w', suffix='.webapp'),
open(self.manifest('mozball.webapp'))) as (tmp, mf):
mf = json.load(mf)
mf['default_locale'] = 'gb'
tmp.write(json.dumps(mf))
tmp.flush()
upload = self.get_upload(abspath=tmp.name)
addon = Webapp.from_upload(upload)
eq_(addon.default_locale, 'en-US')
def test_browsing_locale_does_not_override(self):
with translation.override('fr'):
# Upload app with en-US as default.
upload = self.get_upload(abspath=self.manifest('mozball.webapp'))
addon = Webapp.from_upload(upload)
eq_(addon.default_locale, 'en-US') # not fr
@raises(forms.ValidationError)
def test_malformed_locales(self):
manifest = self.manifest('malformed-locales.webapp')
upload = self.get_upload(abspath=manifest)
Webapp.from_upload(upload)
class TestGeodata(mkt.site.tests.WebappTestCase):
def setUp(self):
super(TestGeodata, self).setUp()
self.geo = self.app.geodata
def test_app_geodata(self):
assert isinstance(Webapp(id=337141).geodata, Geodata)
@mock.patch.object(settings, 'PRE_GENERATE_APKS', True)
@mock.patch('mkt.webapps.tasks.pre_generate_apk')
class TestPreGenAPKs(mkt.site.tests.WebappTestCase):
def setUp(self):
super(TestPreGenAPKs, self).setUp()
self.manifest_url = 'http://some-app.com/manifest.webapp'
self.app.update(status=mkt.STATUS_PUBLIC,
manifest_url=self.manifest_url)
# Set up the app to support Android.
self.app.addondevicetype_set.create(device_type=mkt.DEVICE_MOBILE.id)
def switch_device(self, device_id):
self.app.addondevicetype_set.all().delete()
self.app.addondevicetype_set.create(device_type=device_id)
def test_approved_apps(self, pre_gen_task):
assert not pre_gen_task.delay.called
self.app.save()
pre_gen_task.delay.assert_called_with(self.app.id)
def test_unapproved_apps(self, pre_gen_task):
self.app.update(status=mkt.STATUS_REJECTED)
assert not pre_gen_task.delay.called, (
'APKs for unapproved apps should not be pre-generated')
def test_disabled(self, pre_gen_task):
with self.settings(PRE_GENERATE_APKS=False):
self.app.save()
assert not pre_gen_task.delay.called, (
'task should not be called if PRE_GENERATE_APKS is False')
def test_ignore_firefox_os_apps(self, pre_gen_task):
self.switch_device(mkt.DEVICE_GAIA.id)
self.app.save()
assert not pre_gen_task.delay.called, (
'task should not be called for Firefox OS apps')
def test_treat_tablet_as_android(self, pre_gen_task):
self.switch_device(mkt.DEVICE_TABLET.id)
self.app.save()
assert pre_gen_task.delay.called, (
'task should be called for tablet apps')
class TestSearchSignals(ESTestCase):
def setUp(self):
super(TestSearchSignals, self).setUp()
self.addCleanup(self.cleanup)
def cleanup(self):
for index in settings.ES_INDEXES.values():
try:
self.es.indices.delete(index=index)
except elasticsearch.NotFoundError:
pass
def test_create(self):
eq_(WebappIndexer.search().count(), 0)
app_factory()
self.refresh('webapp')
eq_(WebappIndexer.search().count(), 1)
def test_update(self):
app = app_factory()
self.refresh('webapp')
eq_(WebappIndexer.search().count(), 1)
prev_name = unicode(app.name)
app.name = 'yolo'
app.save()
self.refresh('webapp')
eq_(WebappIndexer.search().count(), 1)
eq_(WebappIndexer.search().query('term', name=prev_name).count(), 0)
eq_(WebappIndexer.search().query('term', name='yolo').count(), 1)
| 38.123529 | 79 | 0.643375 |
ace495a0e5c7a37eb137e9f2e168a4ee9fb89c37 | 505 | py | Python | tests/vanilla/test_mutable_object.py | filfreire/questions-three | 1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8 | [
"MIT"
] | 5 | 2019-07-22T06:04:07.000Z | 2021-07-23T06:01:51.000Z | tests/vanilla/test_mutable_object.py | filfreire/questions-three | 1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8 | [
"MIT"
] | 15 | 2020-07-28T17:33:40.000Z | 2021-08-23T17:30:05.000Z | tests/vanilla/test_mutable_object.py | filfreire/questions-three | 1d1d621d5647407bf2d1b271e0b9c7c9f1afc5c8 | [
"MIT"
] | 4 | 2019-08-25T22:41:59.000Z | 2020-10-21T14:28:15.000Z | from unittest import TestCase, main
from expects import expect, equal, raise_error
from questions_three.vanilla import MutableObject
class TestMutableObject(TestCase):
def test_object_is_mutable(self):
thing = MutableObject()
name = "ximinez"
value = 77
def attempt():
setattr(thing, name, value)
expect(attempt).not_to(raise_error(AttributeError))
expect(getattr(thing, name)).to(equal(value))
if "__main__" == __name__:
main()
| 21.956522 | 59 | 0.673267 |
ace495ab3e18e46adece0fe68d90b1524ef11f77 | 18,623 | py | Python | app.py | iam100/SegFault | bd4f51c071df4e7e13e5c3c5b0d190470c9d22de | [
"MIT"
] | null | null | null | app.py | iam100/SegFault | bd4f51c071df4e7e13e5c3c5b0d190470c9d22de | [
"MIT"
] | null | null | null | app.py | iam100/SegFault | bd4f51c071df4e7e13e5c3c5b0d190470c9d22de | [
"MIT"
] | null | null | null | # Imports from Flask
from flask import Flask
from flask import render_template
from flask import flash
from flask import redirect
from flask import url_for
from flask import session
from flask import logging
from flask import request
from json import *
# Imports for MySQL
from flask_mysqldb import MySQL
# Imports for wtforms
from wtforms import Form
from wtforms import StringField
from wtforms import TextAreaField
from wtforms import PasswordField
from wtforms import validators
# Imports from passlib
from passlib.hash import sha256_crypt
# Imports from Functools
from functools import wraps
from math import floor
app = Flask(__name__)
app.debug = True
# MySQL config
app.config['MYSQL_HOST'] = 'localhost'
app.config['MYSQL_USER'] = 'root'
app.config['MYSQL_PASSWORD'] = 'Anush@1510'
app.config['MYSQL_DB'] = 'segfault'
app.config['MYSQL_CURSORCLASS'] = 'DictCursor'
# init MySQL
mysql = MySQL(app)
# Registration Form
class RegisterForm(Form):
name = StringField('Name', [validators.Length(min=1, max=50)])
username = StringField('Username', [validators.Length(min=4, max=25)])
email = StringField('E-Mail', [validators.Length(min=6, max=50)])
password = PasswordField('Password', [
validators.DataRequired(),
validators.EqualTo('confirm', message='Passwords do not match')
])
confirm = PasswordField('Confirm Password')
# Check if user logged in
def is_loggedin(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash('Unauthorized, Please Login first', 'danger')
return redirect(url_for('login'))
return wrap
# Redirecting to Home page
@app.route('/', methods=['GET', 'POST'])
def index():
cur = mysql.connection.cursor()
cur.execute("SELECT * FROM questions")
cur2 = mysql.connection.cursor()
for row in cur:
result = cur2.execute("SELECT * FROM answers WHERE qid = %s",[row['id']])
if result > 0:
cur2.execute("UPDATE questions SET answd = %s WHERE id = %s",(row['id'],[row['id']]))
mysql.connection.commit()
cur2.close()
cur.close()
friends = []
cur = mysql.connection.cursor()
cur.execute("SELECT statement FROM questions")
for row in cur:
friends.append(row['statement'])
cur.close()
if request.method == 'POST':
# Get form fields
username = request.form['username']
password_candidate = request.form['password']
# cursor
cur = mysql.connection.cursor()
# Get user by username
result = cur.execute("SELECT * FROM users where user_username = %s", [username])
if result > 0:
# Get stored hash
data = cur.fetchone()
password = data['password']
# Compare Passwords
if sha256_crypt.verify(password_candidate, password):
# Correct password
session['logged_in'] = True
session['username'] = username
# Flash will flash a message
flash('You are now logged in', 'success')
return redirect(url_for('dashboard'))
else:
error = 'Wrong Password'
return render_template('home.html',friends=friends, error=error)
# Close the connection to the database
cur.close()
else:
error = 'Username Not Found'
return render_template('home.html',friends=friends, error=error)
return render_template('home.html',friends=friends)
# Redirecting to the about page
@app.route('/about')
def about():
return render_template('about.html')
# Redirecting to the dashboard
@app.route('/dashboard')
@is_loggedin
def dashboard():
username = session['username']
# Create cursor
cur = mysql.connection.cursor()
# Get questions
results = cur.execute("SELECT * FROM questions where poster = %s", [username])
questions = cur.fetchall()
# Get user by username
result = cur.execute("SELECT * FROM users where user_username = %s", [username])
if result > 0:
data = cur.fetchone()
name = data['user_name']
if results>0:
return render_template('dashboard.html', questions=questions, name=name)
else:
msg = "No Questions Asked Yet"
return render_template('dashboard.html', msg=msg, name=name)
# Accessing the user profile
@app.route('/profile')
@is_loggedin
def profile():
# cursor
cur = mysql.connection.cursor()
username = session['username']
no_questions = 0
no_answers = 0
# Get user by username
result = cur.execute("SELECT * FROM users where user_username = %s", [username])
if result > 0:
data = cur.fetchone()
name = data['user_name']
email = data['user_email']
id = data['user_id']
date = data['register_date']
result = cur.execute("SELECT * FROM questions WHERE poster = %s", [username])
if result > 0:
no_questions = result
result = cur.execute("SELECT * FROM answers WHERE author = %s", [username])
if result > 0:
no_answers = result
return render_template('profile.html', name=name, email=email, id=id, date=date, no_questions=no_questions, no_answers=no_answers)
@app.route('/search',methods = ['GET','POST'])
def search():
strin=request.form['search']
results = []
results2 = []
results3 = []
cur = mysql.connection.cursor()
query = "'%"+strin+"%'"
no = cur.execute("SELECT * FROM questions WHERE statement LIKE "+query)
for row in cur:
results.append(row)
no2 = cur.execute("SELECT * FROM users WHERE user_username LIKE"+query)
for row in cur:
results2.append(row)
cur.close()
return render_template("search.html",results=results,results2=results2,no2=no2,no=no)
# Register page
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = RegisterForm(request.form)
if request.method == 'POST' and form.validate():
name = form.name.data
email = form.email.data
username = form.username.data
password = sha256_crypt.encrypt(str(form.password.data))
# Create a Cursor
cur = mysql.connection.cursor()
cur.execute("INSERT INTO users(user_name, user_email, user_username, password) VALUES(%s, %s, %s, %s)", (name, email, username, password))
# Commit to db
mysql.connection.commit()
# Close connection
cur.close()
flash('You are now registered and you can login', 'success')
return redirect(url_for('index'))
return render_template('signup.html', form=form)
# User login
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
# Get form fields
username = request.form['username']
password_candidate = request.form['password']
# cursor
cur = mysql.connection.cursor()
# Get user by username
result = cur.execute("SELECT * FROM users where user_username = %s", [username])
if result > 0:
# Get stored hash
data = cur.fetchone()
password = data['password']
# Compare Passwords
if sha256_crypt.verify(password_candidate, password):
# Correct password
session['logged_in'] = True
session['username'] = username
# Flash will flash a message
flash('You are now logged in', 'success')
return redirect(url_for('dashboard'))
else:
error = 'Wrong Password'
return render_template('login.html', error=error)
# Close the connection to the database
cur.close()
else:
error = 'Username Not Found'
return render_template('login.html', error=error)
return render_template('login.html')
# Logout
@app.route('/logout')
@is_loggedin
def logout():
session.clear()
flash('You are now logged out', 'success')
return redirect(url_for('login'))
# Question Class
class QuestionForm(Form):
statement = StringField('Question', [validators.Length(min=1, max=280)])
body = TextAreaField('Description', [validators.Length(max=500)])
# Add Question Page
@app.route('/addquestion', methods=['GET', 'POST'])
@is_loggedin
def addquestion():
form = QuestionForm(request.form)
if request.method == 'POST' and form.validate():
statement = form.statement.data
body = form.body.data
# Create cursor
cur = mysql.connection.cursor()
cur.execute("INSERT INTO questions(statement,body,poster) VALUES(%s, %s, %s)",(statement, body, session['username']))
mysql.connection.commit()
cur.close()
flash('Question Posted', 'success')
return redirect(url_for('dashboard'))
return render_template('addquestion.html', form=form)
@app.route('/questions',defaults={'id':1})
@app.route('/questions/<int:id>/')
def question(id):
cur = mysql.connection.cursor()
result = cur.execute("SELECT * FROM questions ORDER BY id DESC")
skip = (id-1)
last = floor(result)
if last == 0:
last = 1
cur.execute("SELECT * FROM questions ORDER BY id DESC LIMIT %s,%s",(skip,1))
qs = cur.fetchall()
if id > last :
return redirect(url_for('question',id = last))
if result > 0:
return render_template('questions.html',last=last, qs=qs,id=id)
else:
msg = "No questions found"
return render_template('questions.html',msg=msg,id=id)
cur.close()
@app.route('/questions/answered',defaults={'id':1})
@app.route('/questions/answered/<int:id>/')
def answered_question(id):
cur = mysql.connection.cursor()
result = cur.execute("SELECT * FROM questions WHERE answd != 0 ORDER BY id DESC ")
skip = (id-1)
last = floor(result)
if last == 0:
last = 1
cur.execute("SELECT * FROM questions WHERE answd != 0 ORDER BY id DESC LIMIT %s,%s",(skip,1))
qs = cur.fetchall()
if id > last :
return redirect(url_for('answered_question',id = last))
if result > 0:
return render_template('answered_questions.html',last=last, qs=qs,id=id)
else:
msg = "No questions found"
return render_template('answered_questions.html',msg=msg,id=id)
cur.close()
@app.route('/questions/unanswered',defaults={'id':1})
@app.route('/questions/unanswered/<int:id>/')
def unanswered_question(id):
cur = mysql.connection.cursor()
result = cur.execute("SELECT * FROM questions WHERE answd = 0 ORDER BY id DESC ")
skip = (id-1)
last = floor(result)
if last == 0:
last = 1
cur.execute("SELECT * FROM questions WHERE answd = 0 ORDER BY id DESC LIMIT %s,%s",(skip,1))
qs = cur.fetchall()
if id > last :
return redirect(url_for('unanswered_question',id = last))
if result > 0:
return render_template('unanswered_questions.html',last=last, qs=qs,id=id)
else:
msg = "No questions found"
return render_template('unanswered_questions.html',msg=msg,id=id)
cur.close()
@app.route('/editquestion/<string:id>',methods = ['GET','POST'])
@is_loggedin
def editquestion(id):
cur2 = mysql.connection.cursor()
result = cur2.execute("SELECT * FROM questions WHERE id = %s AND poster = %s",([id],[session['username']]))
one_qs = cur2.fetchone()
cur2.close()
form = QuestionForm(request.form)
form.statement.data = one_qs['statement']
form.body.data = one_qs['body']
if request.method == 'POST' and form.validate :
body = request.form['body']
statement = request.form['statement']
cur=mysql.connection.cursor()
cur.execute("UPDATE questions SET body=%s,statement=%s WHERE id = %s ",(body,statement,[id]))
mysql.connection.commit()
cur.close()
flash ('Question Updated','success')
return redirect(url_for('questions',id = id))
return render_template('editquestion.html',form=form,one_qs=one_qs)
# Delete questions
@app.route('/delete_question/<string:id>', methods=['POST'])
@is_loggedin
def delete_question(id):
# Create a cursor
cur = mysql.connection.cursor()
# Execute Cursor
cur.execute("DELETE FROM questions where id=%s", [id])
mysql.connection.commit()
cur.close()
flash ('Question Deleted','success')
return redirect(url_for('dashboard'))
@app.route('/delete_answer/<string:aid>/<int:qid>',methods=['GET','POST'])
@is_loggedin
def delete_answer(aid,qid):
cur = mysql.connection.cursor()
# Execute Cursor
cur.execute("DELETE FROM answers where id=%s", [aid])
mysql.connection.commit()
cur.close()
flash ('Answer Deleted','success')
return redirect(url_for('questions',id = qid ))
# Comment form
class CommentForm(Form):
body = TextAreaField('',[validators.Length(min=1,max=80)])
@app.route('/question/<string:id>/', methods=['GET', 'POST'])
def questions(id):
cur = mysql.connection.cursor()
if 'logged_in' not in session:
username = None
uid = 0
else:
username = session['username']
cur.execute("SELECT * FROM users WHERE user_username = %s",[username])
us = cur.fetchone()
uid = us['user_id']
cur.execute("SELECT * FROM questions WHERE id = %s",[id])
one_qs = cur.fetchone()
cur.close()
cur = mysql.connection.cursor()
auths = cur.execute("SELECT * FROM answers WHERE (author,qid) = (%s,%s)",([username],[id]))
cur.close()
cur2 = mysql.connection.cursor()
result = cur2.execute("SELECT * FROM answers WHERE qid = %s ORDER BY upvote DESC",[id])
answers = cur2.fetchall()
cur2.close()
comments = []
cur = mysql.connection.cursor()
cur.execute("SELECT * FROM answers WHERE qid = %s ORDER BY upvote DESC",[id])
for row in cur:
cur2 = mysql.connection.cursor()
cur2.execute("SELECT * FROM comments WHERE ansid = %s",[row['id']])
comments.append(cur2.fetchall())
cur2.close()
cur.close()
form1 = CommentForm(request.form)
if request.method == 'POST':
answerid = request.form['idd']
if form1.validate() :
body = form1.body.data
cur3 = mysql.connection.cursor()
cur3.execute("INSERT INTO comments(ansid,body,author) VALUES(%s, %s, %s)",([answerid],[body], session['username']))
mysql.connection.commit()
cur3.close()
flash('Comment Posted', 'success')
return redirect(url_for('questions',id=id))
ups = 0
if result > 0:
return render_template('question.html',no_of_up=ups,uid=uid, form1=form1,one_qs=one_qs, answers=answers, username=username, auths=auths,comments=comments)
else:
msg = "Not Answered Yet"
return render_template('question.html',no_of_up=ups,uid=uid, form1=form1,one_qs=one_qs, msg=msg, username=username, auths=auths,comments=comments)
@app.route('/upvote/<string:user_id>/<string:q_id>/<int:ans_id>/')
@is_loggedin
def upvote(user_id,q_id,ans_id):
cur = mysql.connection.cursor()
result = cur.execute("SELECT * FROM answers WHERE id = %s",[ans_id])
if result == 0:
abort(404)
post = cur.fetchone()
result = cur.execute("SELECT * FROM votes WHERE userid = %s AND ansid = %s",([user_id],[ans_id]))
cur.close()
if result > 0:
cur = mysql.connection.cursor()
cur.execute("DELETE FROM votes WHERE userid = %s AND ansid = %s",([user_id],[ans_id]))
mysql.connection.commit()
number = cur.execute("SELECT * FROM votes WHERE ansid = %s",[ans_id])
cur.execute("UPDATE answers SET upvote = %s WHERE id = %s",(number,ans_id))
mysql.connection.commit()
cur.close()
flash("Upvote Removed","danger")
return redirect(url_for('questions',id=q_id))
else :
cur = mysql.connection.cursor()
cur.execute("INSERT INTO votes(ansid,userid) VALUES(%s,%s)",([ans_id],[user_id]))
mysql.connection.commit()
number = cur.execute("SELECT * FROM votes WHERE ansid = %s",[ans_id])
cur.execute("UPDATE answers SET upvote = %s WHERE id = %s",(number,ans_id))
mysql.connection.commit()
cur.close()
flash("Upvoted","success")
return redirect(url_for('questions',id=q_id))
return redirect(url_for('questions',id=q_id))
class AnswerForm(Form):
body = TextAreaField('Your Answer:',[validators.Length(min=5)])
@app.route('/addanswer/<string:id>', methods=['GET', 'POST'])
@is_loggedin
def addanswer(id):
form = AnswerForm(request.form)
cur2 = mysql.connection.cursor()
cur2.execute("SELECT * FROM questions WHERE id = %s",[id])
one_qs = cur2.fetchone()
cur2.close()
if request.method == 'POST' and form.validate():
body = form.body.data
# Create cursor
cur = mysql.connection.cursor()
cur.execute("INSERT INTO answers(qid,body,author) VALUES(%s, %s, %s)",([id], body, session['username']))
mysql.connection.commit()
cur.close()
flash('Question Answered', 'success')
return redirect(url_for('dashboard'))
return render_template('addanswer.html', form=form ,one_qs=one_qs)
@app.route('/editanswer/<string:id>',methods = ['GET','POST'])
@is_loggedin
def editanswer(id):
form = AnswerForm(request.form)
cur2 = mysql.connection.cursor()
result = cur2.execute("SELECT * FROM questions WHERE id = %s",[id])
one_qs = cur2.fetchone()
cur2.close()
cur = mysql.connection.cursor()
cur.execute("SELECT * FROM answers WHERE qid = %s AND author = %s ",([id],[session['username']]))
one_ans = cur.fetchone()
form.body.data = one_ans['body']
if request.method == 'POST' and form.validate :
body = request.form['body']
cur=mysql.connection.cursor()
cur.execute("UPDATE answers SET body=%s WHERE author = %s",(body,session['username']))
mysql.connection.commit()
cur.close()
flash ('Answer Updated','success')
return redirect(url_for('question'))
return render_template('editanswer.html',form=form,one_qs=one_qs)
# Running the app if app.py is the main module
if __name__ == '__main__':
# Encryption Key
app.secret_key='bZ\x85\xb2\xfc1$\xe6\n\xa1\xc0\xce\xdd\x9f\x815\xc0\xe4\xac\xc6\xfc\x0e\xa9\xa0V'
# Starting the app
app.run()
| 29.007788 | 162 | 0.628739 |
ace495fe107cda73ff71322134438e266bfd666e | 18,883 | py | Python | Sublime Text 3/sublime_plugin.py | joaolucasp/Evil-Square | 41992308ad632df8e36b28edcef1bea916ea5339 | [
"MIT"
] | 1 | 2021-08-04T18:03:46.000Z | 2021-08-04T18:03:46.000Z | Simulador/Sublime Text 3/sublime_plugin.py | GKuabara/tomb-of-the-mask | edef54e11c127560da802176840ad110a7f5999f | [
"MIT"
] | null | null | null | Simulador/Sublime Text 3/sublime_plugin.py | GKuabara/tomb-of-the-mask | edef54e11c127560da802176840ad110a7f5999f | [
"MIT"
] | null | null | null | import sublime
import threading
import imp
import importlib
import os
import sys
import zipfile
import sublime_api
import traceback
api_ready = False
application_command_classes = []
window_command_classes = []
text_command_classes = []
all_command_classes = [application_command_classes, window_command_classes, text_command_classes]
all_callbacks = {'on_new': [], 'on_clone': [], 'on_load': [], 'on_pre_close': [], 'on_close': [],
'on_pre_save': [], 'on_post_save': [], 'on_modified': [],
'on_selection_modified': [],'on_activated': [], 'on_deactivated': [],
'on_query_context': [], 'on_query_completions': [],
'on_text_command': [], 'on_window_command': [],
'on_post_text_command': [], 'on_post_window_command': [],
'on_modified_async': [],
'on_selection_modified_async': [],
'on_pre_save_async': [],
'on_post_save_async': [],
'on_activated_async': [],
'on_deactivated_async': [],
'on_new_async': [],
'on_load_async': [],
'on_clone_async': []}
def unload_module(module):
if "plugin_unloaded" in module.__dict__:
module.plugin_unloaded()
# Check unload_handler too, for backwards compat
if "unload_handler" in module.__dict__:
module.unload_handler()
# Unload the old plugins
if "plugins" in module.__dict__:
for p in module.plugins:
for cmd_cls_list in all_command_classes:
try:
cmd_cls_list.remove(p)
except ValueError:
pass
for c in all_callbacks.values():
try:
c.remove(p)
except ValueError:
pass
def unload_plugin(modulename):
print("unloading plugin", modulename)
was_loaded = modulename in sys.modules
if was_loaded:
m = sys.modules[modulename]
unload_module(m)
def reload_plugin(modulename):
print("reloading plugin", modulename)
if modulename in sys.modules:
m = sys.modules[modulename]
unload_module(m)
m = imp.reload(m)
else:
m = importlib.import_module(modulename)
module_plugins = []
on_activated_targets = []
for type_name in dir(m):
try:
t = m.__dict__[type_name]
if t.__bases__:
is_plugin = False
if issubclass(t, ApplicationCommand):
application_command_classes.append(t)
is_plugin = True
if issubclass(t, WindowCommand):
window_command_classes.append(t)
is_plugin = True
if issubclass(t, TextCommand):
text_command_classes.append(t)
is_plugin = True
if is_plugin:
module_plugins.append(t)
if issubclass(t, EventListener):
obj = t()
for p in all_callbacks.items():
if p[0] in dir(obj):
p[1].append(obj)
if "on_activated" in dir(obj):
on_activated_targets.append(obj)
module_plugins.append(obj)
except AttributeError:
pass
if len(module_plugins) > 0:
m.plugins = module_plugins
if api_ready:
if "plugin_loaded" in m.__dict__:
try:
m.plugin_loaded()
except:
traceback.print_exc()
# Synthesize any required on_activated calls
for el in on_activated_targets:
w = sublime.active_window()
if w:
v = w.active_view()
if v:
try:
el.on_activated(v)
except:
traceback.print_exc()
def create_application_commands():
cmds = []
for class_ in application_command_classes:
cmds.append(class_())
sublime_api.notify_application_commands(cmds)
def create_window_commands(window_id):
window = sublime.Window(window_id)
cmds = []
for class_ in window_command_classes:
cmds.append(class_(window))
return cmds
def create_text_commands(view_id):
view = sublime.View(view_id)
cmds = []
for class_ in text_command_classes:
cmds.append(class_(view))
return cmds
def on_api_ready():
global api_ready
api_ready = True
for m in list(sys.modules.values()):
if "plugin_loaded" in m.__dict__:
try:
m.plugin_loaded()
except:
traceback.print_exc()
# Synthesize an on_activated call
w = sublime.active_window()
if w:
view_id = sublime_api.window_active_view(w.window_id)
if view_id != 0:
try:
on_activated(view_id)
except:
traceback.print_exc()
def on_new(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_new']:
try:
callback.on_new(v)
except:
traceback.print_exc()
def on_new_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_new_async']:
try:
callback.on_new_async(v)
except:
traceback.print_exc()
def on_clone(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_clone']:
try:
callback.on_clone(v)
except:
traceback.print_exc()
def on_clone_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_clone_async']:
try:
callback.on_clone_async(v)
except:
traceback.print_exc()
def on_load(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_load']:
try:
callback.on_load(v)
except:
traceback.print_exc()
def on_load_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_load_async']:
try:
callback.on_load_async(v)
except:
traceback.print_exc()
def on_pre_close(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_pre_close']:
try:
callback.on_pre_close(v)
except:
traceback.print_exc()
def on_close(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_close']:
try:
callback.on_close(v)
except:
traceback.print_exc()
def on_pre_save(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_pre_save']:
try:
callback.on_pre_save(v)
except:
traceback.print_exc()
def on_pre_save_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_pre_save_async']:
try:
callback.on_pre_save_async(v)
except:
traceback.print_exc()
def on_post_save(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_post_save']:
try:
callback.on_post_save(v)
except:
traceback.print_exc()
def on_post_save_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_post_save_async']:
try:
callback.on_post_save_async(v)
except:
traceback.print_exc()
def on_modified(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_modified']:
try:
callback.on_modified(v)
except:
traceback.print_exc()
def on_modified_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_modified_async']:
try:
callback.on_modified_async(v)
except:
traceback.print_exc()
def on_selection_modified(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_selection_modified']:
try:
callback.on_selection_modified(v)
except:
traceback.print_exc()
def on_selection_modified_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_selection_modified_async']:
try:
callback.on_selection_modified_async(v)
except:
traceback.print_exc()
def on_activated(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_activated']:
try:
callback.on_activated(v)
except:
traceback.print_exc()
def on_activated_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_activated_async']:
try:
callback.on_activated_async(v)
except:
traceback.print_exc()
def on_deactivated(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_deactivated']:
try:
callback.on_deactivated(v)
except:
traceback.print_exc()
def on_deactivated_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_deactivated_async']:
try:
callback.on_deactivated_async(v)
except:
traceback.print_exc()
def on_query_context(view_id, key, operator, operand, match_all):
v = sublime.View(view_id)
for callback in all_callbacks['on_query_context']:
try:
val = callback.on_query_context(v, key, operator, operand, match_all)
if val:
return True
except:
traceback.print_exc()
return False
def normalise_completion(c):
if len(c) == 1:
return (c[0], "", "")
elif len(c) == 2:
return (c[0], "", c[1])
else:
return c
def on_query_completions(view_id, prefix, locations):
v = sublime.View(view_id)
completions = []
flags = 0
for callback in all_callbacks['on_query_completions']:
try:
res = callback.on_query_completions(v, prefix, locations)
if isinstance(res, tuple):
completions += [normalise_completion(c) for c in res[0]]
flags |= res[1]
elif isinstance(res, list):
completions += [normalise_completion(c) for c in res]
except:
traceback.print_exc()
return (completions,flags)
def on_text_command(view_id, name, args):
v = sublime.View(view_id)
for callback in all_callbacks['on_text_command']:
try:
res = callback.on_text_command(v, name, args)
if isinstance(res, tuple):
return res
elif res:
return (res, None)
except:
traceback.print_exc()
return ("", None)
def on_window_command(window_id, name, args):
window = sublime.Window(window_id)
for callback in all_callbacks['on_window_command']:
try:
res = callback.on_window_command(window, name, args)
if isinstance(res, tuple):
return res
elif res:
return (res, None)
except:
traceback.print_exc()
return ("", None)
def on_post_text_command(view_id, name, args):
v = sublime.View(view_id)
for callback in all_callbacks['on_post_text_command']:
try:
callback.on_post_text_command(v, name, args)
except:
traceback.print_exc()
def on_post_window_command(window_id, name, args):
window = sublime.Window(window_id)
for callback in all_callbacks['on_post_window_command']:
try:
callback.on_post_window_command(window, name, args)
except:
traceback.print_exc()
class Command(object):
def name(self):
clsname = self.__class__.__name__
name = clsname[0].lower()
last_upper = False
for c in clsname[1:]:
if c.isupper() and not last_upper:
name += '_'
name += c.lower()
else:
name += c
last_upper = c.isupper()
if name.endswith("_command"):
name = name[0:-8]
return name
def is_enabled_(self, args):
ret = None
try:
if args:
if 'event' in args:
del args['event']
ret = self.is_enabled(**args)
else:
ret = self.is_enabled()
except TypeError:
ret = self.is_enabled()
if not isinstance(ret, bool):
raise ValueError("is_enabled must return a bool", self)
return ret
def is_enabled(self):
return True
def is_visible_(self, args):
ret = None
try:
if args:
ret = self.is_visible(**args)
else:
ret = self.is_visible()
except TypeError:
ret = self.is_visible()
if not isinstance(ret, bool):
raise ValueError("is_visible must return a bool", self)
return ret
def is_visible(self):
return True
def is_checked_(self, args):
ret = None
try:
if args:
ret = self.is_checked(**args)
else:
ret = self.is_checked()
except TypeError:
ret = self.is_checked()
if not isinstance(ret, bool):
raise ValueError("is_checked must return a bool", self)
return ret
def is_checked(self):
return False
def description_(self, args):
try:
if args != None:
return self.description(**args)
else:
return self.description()
except TypeError as e:
return ""
def description(self):
return ""
class ApplicationCommand(Command):
def run_(self, edit_token, args):
if args:
if 'event' in args:
del args['event']
return self.run(**args)
else:
return self.run()
def run(self):
pass
class WindowCommand(Command):
def __init__(self, window):
self.window = window
def run_(self, edit_token, args):
if args:
if 'event' in args:
del args['event']
return self.run(**args)
else:
return self.run()
def run(self):
pass
class TextCommand(Command):
def __init__(self, view):
self.view = view
def run_(self, edit_token, args):
if args:
if 'event' in args:
del args['event']
edit = self.view.begin_edit(edit_token, self.name(), args)
try:
return self.run(edit, **args)
finally:
self.view.end_edit(edit)
else:
edit = self.view.begin_edit(edit_token, self.name())
try:
return self.run(edit)
finally:
self.view.end_edit(edit)
def run(self, edit):
pass
class EventListener(object):
pass
class MultizipImporter(object):
def __init__(self):
self.loaders = []
self.file_loaders = []
def find_module(self, fullname, path = None):
if not path:
for l in self.loaders:
if l.name == fullname:
return l
for l in self.loaders:
if path == [l.zippath]:
if l.has(fullname):
return l
return None
class ZipLoader(object):
def __init__(self, zippath):
self.zippath = zippath
self.name = os.path.splitext(os.path.basename(zippath))[0]
self.contents = {"":""}
self.packages = {""}
z = zipfile.ZipFile(zippath, 'r')
files = [i.filename for i in z.infolist()]
for f in files:
base, ext = os.path.splitext(f)
if ext != ".py":
continue
paths = base.split('/')
if len(paths) > 0 and paths[len(paths) - 1] == "__init__":
paths.pop()
self.packages.add('.'.join(paths))
try:
self.contents['.'.join(paths)] = z.read(f).decode('utf-8')
except UnicodeDecodeError:
print(f, "in", zippath, "is not utf-8 encoded, unable to load plugin")
continue
while len(paths) > 1:
paths.pop()
parent = '.'.join(paths)
if parent not in self.contents:
self.contents[parent] = ""
self.packages.add(parent)
z.close()
def has(self, fullname):
key = '.'.join(fullname.split('.')[1:])
if key in self.contents:
return True
override_file = os.path.join(override_path, os.sep.join(fullname.split('.')) + '.py')
if os.path.isfile(override_file):
return True
override_package = os.path.join(override_path, os.sep.join(fullname.split('.')))
if os.path.isdir(override_package):
return True
return False
def load_module(self, fullname):
if fullname in sys.modules:
mod = sys.modules[fullname]
else:
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__ = self.zippath + "/" + fullname
mod.__name__ = fullname
mod.__path__ = [self.zippath]
mod.__loader__ = self
key = '.'.join(fullname.split('.')[1:])
if key in self.contents:
source = self.contents[key]
source_path = key + " in " + self.zippath
is_pkg = key in self.packages
try:
override_file = os.path.join(override_path, os.sep.join(fullname.split('.')) + '.py')
override_package_init = os.path.join(os.path.join(override_path, os.sep.join(fullname.split('.'))), '__init__.py')
if os.path.isfile(override_file):
with open(override_file, 'r') as f:
source = f.read()
source_path = override_file
elif os.path.isfile(override_package_init):
with open(override_package_init, 'r') as f:
source = f.read()
source_path = override_package_init
is_pkg = True
except:
pass
if is_pkg:
mod.__package__ = mod.__name__
else:
mod.__package__ = fullname.rpartition('.')[0]
exec(compile(source, source_path, 'exec'), mod.__dict__)
return mod
override_path = None
multi_importer = MultizipImporter()
sys.meta_path.insert(0, multi_importer)
def update_compressed_packages(pkgs):
multi_importer.loaders = [ZipLoader(p) for p in pkgs]
def set_override_path(path):
global override_path
override_path = path
| 27.566423 | 126 | 0.562464 |
ace4982ea60989309cb52baa612afe05baad99a5 | 3,270 | py | Python | importer/NlAwNl.py | Vesihiisi/COH-tools | a874f076cb93b93722efb1be56a66a9380bcb7c4 | [
"MIT"
] | 4 | 2017-01-12T14:43:28.000Z | 2017-09-08T20:29:30.000Z | importer/NlAwNl.py | Vesihiisi/COH-tools | a874f076cb93b93722efb1be56a66a9380bcb7c4 | [
"MIT"
] | 103 | 2017-01-13T13:25:03.000Z | 2018-09-05T12:29:41.000Z | importer/NlAwNl.py | Vesihiisi/COH-tools | a874f076cb93b93722efb1be56a66a9380bcb7c4 | [
"MIT"
] | 2 | 2017-03-23T10:22:54.000Z | 2018-01-08T09:25:03.000Z | from Monument import Monument, Dataset
import importer_utils as utils
import importer as importer
class NlAwNl(Monument):
def set_address(self):
if self.has_non_empty_attribute("adres"):
if utils.contains_digit(self.adres):
town = utils.remove_markup(self.plaats)
address = "{}, {}".format(self.adres, town)
self.add_statement("located_street", address)
else:
self.add_to_report("adres", self.adres, "located_street")
def update_labels(self):
nl = utils.remove_markup(self.omschrijving)
self.add_label("nl", nl)
def update_descriptions(self):
desc = "cultural heritage monument in Aruba"
self.add_description("en", desc)
def set_adm_location(self):
aruba = "Q21203"
self.add_statement("located_adm", aruba)
def set_location(self):
loc_q = None
loc_dic = self.data_files["settlements"]
if self.has_non_empty_attribute("plaats"):
if utils.count_wikilinks(self.plaats) == 1:
loc_q = utils.q_from_first_wikilink("nl", self.plaats)
else:
loc_match = utils.get_item_from_dict_by_key(dict_name=loc_dic,
search_term=self.plaats,
search_in="itemLabel",
return_content_of="item")
if len(loc_match) == 1:
loc_q = loc_match[0]
if loc_q:
self.add_statement("location", loc_q)
else:
self.add_to_report("plaats", self.plaats, "location")
def set_inception(self):
if self.has_non_empty_attribute("bouwjaar"):
if utils.legit_year(self.bouwjaar):
inc_year = {"time_value": {"year": self.bouwjaar}}
self.add_statement("inception", inc_year)
else:
self.add_to_report("bouwjaar", self.bouwjaar, "inception")
def set_heritage_id(self):
wlm_name = self.mapping["table_name"].upper()
wlm = "{}-{}".format(wlm_name, str(self.objectnr))
self.add_statement("wlm_id", wlm)
def __init__(self, db_row_dict, mapping, data_files, existing, repository):
Monument.__init__(self, db_row_dict, mapping,
data_files, existing, repository)
self.set_monuments_all_id("objectnr")
self.set_changed()
self.set_wlm_source()
self.set_heritage_id()
self.set_heritage()
self.set_country()
self.set_coords()
self.set_location()
self.set_adm_location()
self.set_address()
self.set_is()
self.set_image()
self.set_commonscat()
self.set_inception()
self.update_labels()
self.update_descriptions()
self.set_wd_item(self.find_matching_wikidata(mapping))
if __name__ == "__main__":
"""Command line entry point for importer."""
args = importer.handle_args()
dataset = Dataset("nl-aw", "nl", NlAwNl)
dataset.data_files = {"settlements": "aruba_settlements.json"}
importer.main(args, dataset)
| 36.741573 | 85 | 0.58104 |
ace498a4f25bd831548d27207bc4b41ec9ad967e | 638 | py | Python | metadata/tests/unit/test_basics.py | defendercrypt/amundsen | 83c728b646020f60cf2270c12e766fe4af8c9948 | [
"Apache-2.0"
] | 2,072 | 2020-08-11T20:16:48.000Z | 2022-03-31T07:04:05.000Z | metadata/tests/unit/test_basics.py | defendercrypt/amundsen | 83c728b646020f60cf2270c12e766fe4af8c9948 | [
"Apache-2.0"
] | 795 | 2020-08-11T15:24:39.000Z | 2022-03-31T18:56:13.000Z | metadata/tests/unit/test_basics.py | defendercrypt/amundsen | 83c728b646020f60cf2270c12e766fe4af8c9948 | [
"Apache-2.0"
] | 671 | 2020-08-11T20:39:56.000Z | 2022-03-31T08:39:07.000Z | # Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
import unittest
from flask import current_app
from metadata_service import create_app
class BasicTestCase(unittest.TestCase):
"""
Test the service if it can standup
"""
def setUp(self) -> None:
self.app = create_app(
config_module_class='metadata_service.config.LocalConfig')
self.app_context = self.app.app_context()
self.app_context.push()
def tearDown(self) -> None:
self.app_context.pop()
def test_app_exists(self) -> None:
self.assertFalse(current_app is None)
| 23.62963 | 70 | 0.689655 |
ace4994d54809a52a4962d55b1ad5d4c5356b6ab | 11,544 | py | Python | ideaman_sync/doc2vec/__init__.py | LibRec-Practical/ideaman-offline | f8341fc9ca77adcc1191c01037dda18c02d77b29 | [
"MIT"
] | 1 | 2021-06-21T06:41:12.000Z | 2021-06-21T06:41:12.000Z | ideaman_sync/doc2vec/__init__.py | LibRec-Practical/ideaman-offline | f8341fc9ca77adcc1191c01037dda18c02d77b29 | [
"MIT"
] | null | null | null | ideaman_sync/doc2vec/__init__.py | LibRec-Practical/ideaman-offline | f8341fc9ca77adcc1191c01037dda18c02d77b29 | [
"MIT"
] | null | null | null | import sys, os
sys.path.append("../../")
sys.path.extend([os.path.join(root, name) for root, dirs, _ in os.walk("../../") for name in dirs])
import time
from datetime import datetime
import logging
import re
import gensim.models.doc2vec
import gensim.utils
import smart_open
import jieba
from milvus import Milvus, MetricType
from stop_words import safe_get_stop_words
from ideaman_util.paper import Paper
from ideaman_util.config import *
from ideaman_util.db import conn, cur
stop_words = safe_get_stop_words('en')
stopwords = {}.fromkeys(stop_words)
logging.basicConfig(filename='./doc2vec.txt',
filemode='w',
level=logging.DEBUG,
format='[%(asctime)s] - [%(levelname)s] - [PID:%(process)d] - [%(filename)s:%(funcName)s:%(lineno)d] - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S' # 注意月份和天数不要搞乱了,这里的格式化符与time模块相同
)
def cut_stopwords(line):
"""
去除停用词
:param line: 输入 str
:return: final: 去除停用词后的 str
"""
segs = jieba.cut(clean_text(line), cut_all=False)
final = ""
for seg in segs:
if seg not in stopwords:
final += seg
return final
def clean_text(text):
"""
Clean text
:param text: the string of text
:return: text string after cleaning
"""
# unit
text.replace("$$", "")
text = re.sub(r"(\d+)kgs ", lambda m: m.group(1) + ' kg ', text) # e.g. 4kgs => 4 kg
text = re.sub(r"(\d+)kg ", lambda m: m.group(1) + ' kg ', text) # e.g. 4kg => 4 kg
text = re.sub(r"(\d+)k ", lambda m: m.group(1) + '000 ', text) # e.g. 4k => 4000
text = re.sub(r"\$(\d+)", lambda m: m.group(1) + ' dollar ', text)
text = re.sub(r"(\d+)\$", lambda m: m.group(1) + ' dollar ', text)
# acronym
text = re.sub(r"can\'t", "can not", text)
text = re.sub(r"cannot", "can not ", text)
text = re.sub(r"what\'s", "what is", text)
text = re.sub(r"What\'s", "what is", text)
text = re.sub(r"\'ve ", " have ", text)
text = re.sub(r"n\'t", " not ", text)
text = re.sub(r"i\'m", "i am ", text)
text = re.sub(r"I\'m", "i am ", text)
text = re.sub(r"\'re", " are ", text)
text = re.sub(r"\'d", " would ", text)
text = re.sub(r"\'ll", " will ", text)
text = re.sub(r"c\+\+", "cplusplus", text)
text = re.sub(r"c \+\+", "cplusplus", text)
text = re.sub(r"c \+ \+", "cplusplus", text)
text = re.sub(r"c#", "csharp", text)
text = re.sub(r"f#", "fsharp", text)
text = re.sub(r"g#", "gsharp", text)
text = re.sub(r" e mail ", " email ", text)
text = re.sub(r" e \- mail ", " email ", text)
text = re.sub(r" e\-mail ", " email ", text)
text = re.sub(r",000", '000', text)
text = re.sub(r"\'s", " ", text)
# spelling correction
text = re.sub(r"ph\.d", "phd", text)
text = re.sub(r"PhD", "phd", text)
text = re.sub(r"pokemons", "pokemon", text)
text = re.sub(r"pokémon", "pokemon", text)
text = re.sub(r"pokemon go ", "pokemon-go ", text)
text = re.sub(r" e g ", " eg ", text)
text = re.sub(r" b g ", " bg ", text)
text = re.sub(r" 9 11 ", " 911 ", text)
text = re.sub(r" j k ", " jk ", text)
text = re.sub(r" fb ", " facebook ", text)
text = re.sub(r"facebooks", " facebook ", text)
text = re.sub(r"facebooking", " facebook ", text)
text = re.sub(r"insidefacebook", "inside facebook", text)
text = re.sub(r"donald trump", "trump", text)
text = re.sub(r"the big bang", "big-bang", text)
text = re.sub(r"the european union", "eu", text)
text = re.sub(r" usa ", " america ", text)
text = re.sub(r" us ", " america ", text)
text = re.sub(r" u s ", " america ", text)
text = re.sub(r" U\.S\. ", " america ", text)
text = re.sub(r" US ", " america ", text)
text = re.sub(r" American ", " america ", text)
text = re.sub(r" America ", " america ", text)
text = re.sub(r" quaro ", " quora ", text)
text = re.sub(r" mbp ", " macbook-pro ", text)
text = re.sub(r" mac ", " macbook ", text)
text = re.sub(r"macbook pro", "macbook-pro", text)
text = re.sub(r"macbook-pros", "macbook-pro", text)
text = re.sub(r" 1 ", " one ", text)
text = re.sub(r" 2 ", " two ", text)
text = re.sub(r" 3 ", " three ", text)
text = re.sub(r" 4 ", " four ", text)
text = re.sub(r" 5 ", " five ", text)
text = re.sub(r" 6 ", " six ", text)
text = re.sub(r" 7 ", " seven ", text)
text = re.sub(r" 8 ", " eight ", text)
text = re.sub(r" 9 ", " nine ", text)
text = re.sub(r"googling", " google ", text)
text = re.sub(r"googled", " google ", text)
text = re.sub(r"googleable", " google ", text)
text = re.sub(r"googles", " google ", text)
text = re.sub(r" rs(\d+)", lambda m: ' rs ' + m.group(1), text)
text = re.sub(r"(\d+)rs", lambda m: ' rs ' + m.group(1), text)
text = re.sub(r"the european union", " eu ", text)
text = re.sub(r"dollars", " dollar ", text)
# punctuation
text = re.sub(r"\+", " + ", text)
text = re.sub(r"'", " ", text)
text = re.sub(r"-", " - ", text)
text = re.sub(r"/", " / ", text)
text = re.sub(r"\\", " \ ", text)
text = re.sub(r"=", " = ", text)
text = re.sub(r"\^", " ^ ", text)
text = re.sub(r":", " : ", text)
text = re.sub(r"", " . ", text)
text = re.sub(r",", " , ", text)
text = re.sub(r"\?", " ? ", text)
text = re.sub(r"!", " ! ", text)
text = re.sub(r"\"", " \" ", text)
text = re.sub(r"&", " & ", text)
text = re.sub(r"\|", " | ", text)
text = re.sub(r";", " ; ", text)
text = re.sub(r"\(", " ( ", text)
text = re.sub(r"\)", " ) ", text)
# symbol replacement
text = re.sub(r"&", " and ", text)
text = re.sub(r"\|", " or ", text)
text = re.sub(r"=", " equal ", text)
text = re.sub(r"\+", " plus ", text)
text = re.sub(r"₹", " rs ", text) # 测试!
text = re.sub(r"\$", " ", text)
text = re.sub(r" ", " ", text)
text = re.sub(r" ", " ", text)
text = re.sub(r" ", " ", text)
# remove extra space
text = ' '.join(text.split())
return text
def read_file(fname, tokens_only=False):
with smart_open.open(fname, encoding="utf-8") as f:
for i, line in enumerate(f):
print(line)
print(cut_stopwords(line))
return 0
tokens = gensim.utils.simple_preprocess(cut_stopwords(line))
if tokens_only:
yield tokens
else:
# For training data, add tags
yield gensim.models.doc2vec.TaggedDocument(tokens, [i])
def read_mysql(start_date_str, end_date_str, tokens_only=False):
ONE_DAY = 86400000
start_date = datetime.strptime(start_date_str, '%Y-%m-%d').timestamp() * 1000
end_date = datetime.strptime(end_date_str, '%Y-%m-%d').timestamp() * 1000
res = Paper.query_by_time_interval(start_date, end_date)
for index, item in enumerate(res):
line = item.title + " . " + item.description
tokens = gensim.utils.simple_preprocess(cut_stopwords(line))
if tokens_only:
yield tokens
else:
# For training data, add tags
yield gensim.models.doc2vec.TaggedDocument(tokens, [index])
def train(start_date_str, end_date_str):
logging.info("读取文件中。。。")
train_corpus = list(read_mysql(start_date_str, end_date_str))
print(len(train_corpus))
logging.info("生成模型")
model = gensim.models.doc2vec.Doc2Vec(vector_size=128, min_count=64, epochs=1024)
model.build_vocab(train_corpus)
logging.info("训练模型开始")
model.train(train_corpus, total_examples=model.corpus_count, epochs=model.epochs)
logging.info("保存模型")
model.save("./doc2vec.model")
def incremental_train(start_date_str, end_date_str):
logging.info("读取文件中。。。")
train_corpus = list(read_mysql(start_date_str, end_date_str))
print(len(train_corpus))
logging.info("加载模型")
model = gensim.models.doc2vec.Doc2Vec.load("./doc2vec.model")
total_examples = model.corpus_count + len(train_corpus)
logging.info("训练模型开始")
model.train(train_corpus, total_examples=model.corpus_count, epochs=model.epochs)
logging.info("保存模型")
model.save("./doc2vec.model")
def get_vector(model, line):
"""
获得文章的向量
:param final: list
:return: final: list
"""
vec = model.infer_vector(line)
return vec
def predict(start_date_str, end_date_str):
print("加载模型")
model = gensim.models.doc2vec.Doc2Vec.load("./doc2vec.model")
print("建立milvus链接")
client = Milvus(host=milvus_ip, port='19530')
print("读取数据ing")
start_date = datetime.strptime(start_date_str, '%Y-%m-%d').timestamp() * 1000
end_date = datetime.strptime(end_date_str, '%Y-%m-%d').timestamp() * 1000
res = Paper.query_by_time_interval(start_date, end_date)
num = 0
start = time.time()
id_list = []
user_id_list = []
vecs = []
for i in res:
paper_id = i.id
paper_user_id = i.user_id
paper_str = i.title + " . " + i.description
vec = get_vector(model, [paper_str])
# 将词向量写入到Milvus
id_list.append(paper_id)
user_id_list.append(paper_user_id)
vecs.append(list(vec))
# 将词向量写入数据库
paper_vec = str(vec).replace('\n', '').replace('[', '').replace(']', '').replace(" ", " ").replace(" ",
",")[1:]
paper_vec = paper_vec.replace(",,", ",0,")
Paper.update_SQL('doc_vector', paper_vec, paper_user_id)
num += 1
if num % 200 == 0:
print("完成了", num, '篇', '--用时:', time.time() - start)
start = time.time()
# hybrid_entities = [
# {"name": "id", "values": id_list, "type": DataType.INT32},
# {"name": "Vec", "values": vecs, "type": DataType.FLOAT_VECTOR}
# ]
client.insert('ideaman', records=vecs, ids=id_list)
client.flush(collection_name_array=["ideaman"])
user_id_list.clear()
id_list.clear()
vecs.clear()
def run_offline_paper():
client = Milvus(host=milvus_ip, port='19530')
cur.execute("SELECT ID ,doc_vector FROM paper")
papers = cur.fetchall()
for i in papers:
try:
id = i[0]
vec = i[1].split(",")
vec = [eval(j) for j in vec]
res = client.search(collection_name='ideaman', query_records=[vec], top_k=51)
status = res[0].code
if status == 0:
topKqueryResult = [str(j) for j in res[-1]._id_array[0]]
paper_vecs = ",".join(topKqueryResult[1:])
sql = 'INSERT INTO offline_paper(paper_id , recs) VALUES({} , "{}")'.format(id, paper_vecs)
cur.execute(sql)
try:
conn.commit()
except:
conn.rollback()
except:
pass
def delete_milvus():
client = Milvus(host=milvus_ip, port='19530')
print(client.get_collection_stats(collection_name="ideaman"))
print(client.get_collection_info("ideaman"))
client.drop_collection("ideaman")
param = {'collection_name': 'ideaman', 'dimension': 128, 'index_file_size': 1024, 'metric_type': MetricType.L2}
client.create_collection(param)
if __name__ == '__main__':
delete_milvus()
train(start_date_str, end_date_str)
predict(start_date_str,end_date_str)
run_offline_paper() | 37 | 136 | 0.555787 |
ace49967111ab3d3581533750d8e8bb0cd41edff | 1,664 | py | Python | code/02-Data-Engineering/pyspark/01-General/1-CreateDatabaseObjects.py | FaisalHajazi/NYCTaxi | 9db6878321890a5d67ba96607402a0b2a368e6ea | [
"MIT"
] | 68 | 2019-05-13T13:51:44.000Z | 2022-03-21T10:02:12.000Z | code/02-Data-Engineering/pyspark/01-General/1-CreateDatabaseObjects.py | FaisalHajazi/NYCTaxi | 9db6878321890a5d67ba96607402a0b2a368e6ea | [
"MIT"
] | 4 | 2019-04-04T16:00:17.000Z | 2019-04-04T17:28:26.000Z | code/02-Data-Engineering/pyspark/01-General/1-CreateDatabaseObjects.py | FaisalHajazi/NYCTaxi | 9db6878321890a5d67ba96607402a0b2a368e6ea | [
"MIT"
] | 62 | 2019-05-21T10:24:33.000Z | 2022-03-25T13:00:13.000Z | # Databricks notebook source
# MAGIC %md
# MAGIC # What's in this exercise?
# MAGIC
# MAGIC 1) Database definition<BR>
# MAGIC 2) External remote JDBC table definition
# COMMAND ----------
# MAGIC %md
# MAGIC ### 1. Create the taxi_db database in Databricks
# COMMAND ----------
# MAGIC %md
# MAGIC ##### 1.1. Create database
# COMMAND ----------
# MAGIC %sql
# MAGIC CREATE DATABASE IF NOT EXISTS taxi_db;
# COMMAND ----------
# MAGIC %md
# MAGIC ##### 1.2. Validate
# COMMAND ----------
# MAGIC %sql
# MAGIC SHOW DATABASES;
# COMMAND ----------
display(spark.catalog.listDatabases())
# COMMAND ----------
# MAGIC %md
# MAGIC ### 2. Create tables in Azure SQL database table from the portal - data explorer
# COMMAND ----------
# MAGIC %md
# MAGIC Create the following 3 tables:<br>
# MAGIC BATCH_JOB_HISTORY => Persist ETL job metadata<br>
# MAGIC TRIPS_BY_YEAR => Report<br>
# MAGIC TRIPS_BY_HOUR => Report<br>
# COMMAND ----------
# MAGIC %md
# MAGIC ```
# MAGIC DROP TABLE IF EXISTS dbo.BATCH_JOB_HISTORY;
# MAGIC CREATE TABLE BATCH_JOB_HISTORY(
# MAGIC batch_id int,
# MAGIC batch_step_id int,
# MAGIC batch_step_description varchar(100),
# MAGIC batch_step_status varchar(30),
# MAGIC batch_step_time varchar(30)
# MAGIC );
# MAGIC
# MAGIC DROP TABLE IF EXISTS TRIPS_BY_YEAR;
# MAGIC CREATE TABLE TRIPS_BY_YEAR(
# MAGIC taxi_type varchar(30),
# MAGIC trip_year int,
# MAGIC trip_count bigint
# MAGIC );
# MAGIC
# MAGIC DROP TABLE IF EXISTS TRIPS_BY_HOUR;
# MAGIC CREATE TABLE TRIPS_BY_HOUR(
# MAGIC taxi_type varchar(30),
# MAGIC trip_year int,
# MAGIC pickup_hour int,
# MAGIC trip_count bigint
# MAGIC );
# MAGIC
# MAGIC ``` | 21.333333 | 88 | 0.675481 |
ace49a41356661fb937032c6447ac6c12d615cd2 | 2,834 | py | Python | stacker_blueprints/iam_roles.py | ShopStyle/stacker_blueprints | 5cfc0eae66adb06b0409520c8f69d750755de8b7 | [
"BSD-2-Clause"
] | 43 | 2015-12-30T13:47:57.000Z | 2020-12-05T00:36:57.000Z | stacker_blueprints/iam_roles.py | ShopStyle/stacker_blueprints | 5cfc0eae66adb06b0409520c8f69d750755de8b7 | [
"BSD-2-Clause"
] | 87 | 2015-12-22T23:00:43.000Z | 2019-07-25T15:27:11.000Z | stacker_blueprints/iam_roles.py | ShopStyle/stacker_blueprints | 5cfc0eae66adb06b0409520c8f69d750755de8b7 | [
"BSD-2-Clause"
] | 40 | 2016-01-25T12:27:38.000Z | 2020-12-28T14:48:22.000Z | from stacker.blueprints.base import Blueprint
from troposphere import (
GetAtt,
Output,
Ref,
Sub,
iam,
)
from awacs.aws import Policy
from awacs.helpers.trust import (
get_default_assumerole_policy,
get_lambda_assumerole_policy
)
class Roles(Blueprint):
VARIABLES = {
"Ec2Roles": {
"type": list,
"description": "names of ec2 roles to create",
"default": [],
},
"LambdaRoles": {
"type": list,
"description": "names of lambda roles to create",
"default": [],
},
}
def __init__(self, *args, **kwargs):
super(Roles, self).__init__(*args, **kwargs)
self.roles = []
self.policies = []
def create_role(self, name, assumerole_policy):
t = self.template
role = t.add_resource(
iam.Role(
name,
AssumeRolePolicyDocument=assumerole_policy,
)
)
t.add_output(
Output(name + "RoleName", Value=Ref(role))
)
t.add_output(
Output(name + "RoleArn", Value=GetAtt(role.title, "Arn"))
)
self.roles.append(role)
return role
def create_ec2_role(self, name):
return self.create_role(name, get_default_assumerole_policy())
def create_lambda_role(self, name):
return self.create_role(name, get_lambda_assumerole_policy())
def generate_policy_statements(self):
"""Should be overridden on a subclass to create policy statements.
By subclassing this blueprint, and overriding this method to generate
a list of :class:`awacs.aws.Statement` types, a
:class:`troposphere.iam.PolicyType` will be created and attached to
the roles specified here.
If not specified, no Policy will be created.
"""
return []
def create_policy(self, name):
statements = self.generate_policy_statements()
if not statements:
return
t = self.template
policy = t.add_resource(
iam.PolicyType(
"{}Policy".format(name),
PolicyName=Sub("${AWS::StackName}-${Name}-policy", Name=name),
PolicyDocument=Policy(
Statement=statements,
),
Roles=[Ref(role) for role in self.roles],
)
)
t.add_output(
Output(name + "PolicyName", Value=Ref(policy))
)
self.policies.append(policy)
def create_template(self):
variables = self.get_variables()
for role in variables['Ec2Roles']:
self.create_ec2_role(role)
for role in variables['LambdaRoles']:
self.create_lambda_role(role)
self.create_policy()
| 26 | 78 | 0.56916 |
ace49c14ba1f4858fcd3425e05e08c5087341ef2 | 1,324 | py | Python | contrib/HDF5Tools/Examples/example_4.py | xylar/cdat | 8a5080cb18febfde365efc96147e25f51494a2bf | [
"BSD-3-Clause"
] | 62 | 2018-03-30T15:46:56.000Z | 2021-12-08T23:30:24.000Z | contrib/HDF5Tools/Examples/example_4.py | xylar/cdat | 8a5080cb18febfde365efc96147e25f51494a2bf | [
"BSD-3-Clause"
] | 114 | 2018-03-21T01:12:43.000Z | 2021-07-05T12:29:54.000Z | contrib/HDF5Tools/Examples/example_4.py | CDAT/uvcdat | 5133560c0c049b5c93ee321ba0af494253b44f91 | [
"BSD-3-Clause"
] | 14 | 2018-06-06T02:42:47.000Z | 2021-11-26T03:27:00.000Z | import HDF5Tools, vcs
path = './'
fnm = 'OMI-Aura_L2-OMAERUV_2007m0205t1530-o13622_v888-2007m0205t185330.he5'
print ' Open an HDF5 file, but this time using the OMI class, this is a particular type of HDF5/EOS files'
HDF = HDF5Tools.HDF5_OMI(path+fnm)
print 'We can now list the actual variables and their shape:'
vars = HDF.listvariables()
for v in vars:
print 'Variable:',v,HDF.variables[v].shape,HDF.variables[v]._group
print 'And the dimensions ones that have been separated with "dimension_kw" keyword'
print 'display a var'
uva = HDF('UVAerosolIndex')
x=vcs.init()
m = x.createmeshfill('omi')
m.datawc_x1=-65
m.datawc_x2=-40
m.datawc_y1=-20
m.datawc_y2=10
sc = vcs.mkscale(-2,1)
sc.insert(0,-1.E20) # Extension on the left
sc.append(1.E20) # Extension on the right
colors = vcs.getcolors(sc)
m.levels = sc
m.fillareacolors = colors
x.plot(uva,m,ratio='autot')
raw_input('press enter')
print 'Ok now will read another var, w/o reading lat/lon'
print 'We will simply pass the grid to the read call'
salb = HDF('SurfaceAlbedo',grid=uva.getGrid())
print salb.shape
salb = salb[...,0]
print salb.shape
salb=salb(latitude=(-25,15),longitude=(-70,-30))
print salb.shape
x.clear()
sc = vcs.mkscale(0,.13)
colors = vcs.getcolors(sc)
m.levels = sc
m.fillareacolors = colors
x.plot(salb,m,ratio='autot')
raw_input('done')
| 29.422222 | 106 | 0.73565 |
ace49cb3eb41bfac18f3cb7109f92633afc26457 | 495 | py | Python | examples/indicator.py | dalejung/pandas-composition | e73e5295b2d2f44f09805dcf06db12108c555197 | [
"MIT"
] | 5 | 2015-04-08T20:58:25.000Z | 2018-04-22T00:10:44.000Z | examples/indicator.py | dalejung/pandas-composition | e73e5295b2d2f44f09805dcf06db12108c555197 | [
"MIT"
] | null | null | null | examples/indicator.py | dalejung/pandas-composition | e73e5295b2d2f44f09805dcf06db12108c555197 | [
"MIT"
] | null | null | null | from pandas_composition import UserSeries
import pandas.io.data as pdd
df = pdd.get_data_yahoo('AAPL')
class Indicator(UserSeries):
def __init__(self, *args, **kwargs):
source = kwargs.pop('source')
self.source = source
def plot(self, source_col='close'):
pass
def get_gaps(df, offset=0):
gap_up = df.Open > (df.High.shift(1) + offset)
gap_down = df.Open < (df.Low.shift(1) - offset)
gaps = gap_up & gap_down
return Indicator(gaps, source=df)
| 26.052632 | 51 | 0.658586 |
ace49cce5865c1209ac7a4264ddb4340102fdb45 | 2,442 | py | Python | netbox/extras/utils.py | TheFlyingCorpse/netbox | a226f06b1beb575011d783b202d76cb74d3b1f79 | [
"Apache-2.0"
] | 4,994 | 2019-07-01T13:15:44.000Z | 2022-03-31T19:55:45.000Z | netbox/extras/utils.py | emersonfelipesp/netbox | fecca5ad83fb6b48a2f15982dfd3242653f105f9 | [
"Apache-2.0"
] | 4,045 | 2019-07-01T14:24:09.000Z | 2022-03-31T16:07:39.000Z | netbox/extras/utils.py | emersonfelipesp/netbox | fecca5ad83fb6b48a2f15982dfd3242653f105f9 | [
"Apache-2.0"
] | 1,225 | 2019-07-01T15:34:03.000Z | 2022-03-31T16:47:09.000Z | import collections
from django.db.models import Q
from django.utils.deconstruct import deconstructible
from taggit.managers import _TaggableManager
from extras.constants import EXTRAS_FEATURES
from extras.registry import registry
def is_taggable(obj):
"""
Return True if the instance can have Tags assigned to it; False otherwise.
"""
if hasattr(obj, 'tags'):
if issubclass(obj.tags.__class__, _TaggableManager):
return True
return False
def image_upload(instance, filename):
"""
Return a path for uploading image attchments.
"""
path = 'image-attachments/'
# Rename the file to the provided name, if any. Attempt to preserve the file extension.
extension = filename.rsplit('.')[-1].lower()
if instance.name and extension in ['bmp', 'gif', 'jpeg', 'jpg', 'png']:
filename = '.'.join([instance.name, extension])
elif instance.name:
filename = instance.name
return '{}{}_{}_{}'.format(path, instance.content_type.name, instance.object_id, filename)
@deconstructible
class FeatureQuery:
"""
Helper class that delays evaluation of the registry contents for the functionality store
until it has been populated.
"""
def __init__(self, feature):
self.feature = feature
def __call__(self):
return self.get_query()
def get_query(self):
"""
Given an extras feature, return a Q object for content type lookup
"""
query = Q()
for app_label, models in registry['model_features'][self.feature].items():
query |= Q(app_label=app_label, model__in=models)
return query
def extras_features(*features):
"""
Decorator used to register extras provided features to a model
"""
def wrapper(model_class):
# Initialize the model_features store if not already defined
if 'model_features' not in registry:
registry['model_features'] = {
f: collections.defaultdict(list) for f in EXTRAS_FEATURES
}
for feature in features:
if feature in EXTRAS_FEATURES:
app_label, model_name = model_class._meta.label_lower.split('.')
registry['model_features'][feature][app_label].append(model_name)
else:
raise ValueError('{} is not a valid extras feature!'.format(feature))
return model_class
return wrapper
| 31.307692 | 94 | 0.65561 |
ace49d3da1fddec3c0e278637a3a0961edae957e | 3,489 | py | Python | craigslistbargain/multi_trainer.py | ijcai2022-5500/anego | 9a2e5f29f0ec0787ad8ce7822089345053442887 | [
"MIT"
] | null | null | null | craigslistbargain/multi_trainer.py | ijcai2022-5500/anego | 9a2e5f29f0ec0787ad8ce7822089345053442887 | [
"MIT"
] | null | null | null | craigslistbargain/multi_trainer.py | ijcai2022-5500/anego | 9a2e5f29f0ec0787ad8ce7822089345053442887 | [
"MIT"
] | 1 | 2022-01-02T02:39:23.000Z | 2022-01-02T02:39:23.000Z | import argparse
import random
import json
import numpy as np
from onmt.Utils import use_gpu
from cocoa.core.util import read_json
from cocoa.core.schema import Schema
from cocoa.core.scenario_db import ScenarioDB
from cocoa.neural.loss import ReinforceLossCompute
import cocoa.options
from core.scenario import Scenario
from core.controller import Controller
from systems import get_system
from neural.rl_trainer import RLTrainer
from neural import build_optim
import options
import torch
from multi_manager import MultiRunner
class MultiTrainer(MultiRunner):
def __init__(self, args, addr):
super(MultiTrainer, self).__init__(args, addr)
def simulate(self, cmd):
i, batch_size, real_batch = cmd
data = self.trainer.sample_data(i, batch_size, self.args, real_batch=real_batch)
return data
def train(self, cmd):
epoch, batches, rewards, train_mode = cmd
if train_mode == 'normal':
pretrain_number = 3
for i in range(pretrain_number):
info = self.trainer.update_a2c(self.args, batches, rewards, self.trainer.model, self.trainer.critic,
discount=self.args.discount_factor, fix_policy=True)
info = self.trainer.update_a2c(self.args, batches, rewards, self.trainer.model, self.trainer.critic,
discount=self.args.discount_factor)
for i in range(pretrain_number):
info = self.trainer.update_a2c(self.args, batches, rewards, self.trainer.model, self.trainer.critic,
discount=self.args.discount_factor, fix_policy=True)
elif train_mode == 'fix_value':
info = self.trainer.update_a2c(self.args, batches, rewards, self.trainer.model, self.trainer.critic,
discount=self.args.discount_factor, fix_value=True)
elif train_mode == 'fix_policy':
info = self.trainer.update_a2c(self.args, batches, rewards, self.trainer.model, self.trainer.critic,
discount=self.args.discount_factor, fix_policy=True)
else:
info = self.trainer.update_a2c(self.args, batches, rewards, self.trainer.model, self.trainer.critic,
discount=self.args.discount_factor)
return info
def valid(self, cmd):
if len(cmd) == 2:
start, length = cmd
infos = self.trainer.validate(self.args, length, start=start)
else:
start, length, split, exchange = cmd
infos = self.trainer.validate(self.args, length, start=start, split=split, exchange=exchange)
return infos
def save_model(self, cmd):
i, valid_stats = cmd
self.trainer.drop_checkpoint(self.args, i + 1, valid_stats,
model_opt=self.trainer.agents[self.trainer.training_agent].env.model_args)
# if self.args.update_oppo:
# self.trainer.update_opponent(['policy', 'critic'])
def update_model(self, cmd):
model_idx, model_p, critic_p = cmd
env = self.systems[model_idx].env
env.model.load_state_dict(model_p)
env.critic.load_state_dict(critic_p)
def fetch_model(self, cmd):
model_idx = cmd[0]
env = self.systems[model_idx].env
return env.model.state_dict(), env.critic.state_dict()
| 39.202247 | 116 | 0.636572 |
ace49db1073e26ce6d930adbf405367d959e1601 | 16,969 | py | Python | python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py | CheQiXiao/Paddle | 1410d72284c8a803088d88c05cf85a6c4ba6fc29 | [
"Apache-2.0"
] | 1 | 2021-06-10T04:35:57.000Z | 2021-06-10T04:35:57.000Z | python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py | Minovoo/Paddle | ab41a9ee8902dbf461b55ef9347071d7eb71fd76 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/dygraph_to_static/test_tensor_shape.py | Minovoo/Paddle | ab41a9ee8902dbf461b55ef9347071d7eb71fd76 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy
import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.jit import declarative
def dyfunc_tensor_shape_1(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.reshape(x, shape=x.shape)
return res
def dyfunc_tensor_shape_2(x):
x = paddle.to_tensor(x)
shape = x.shape
shape2 = shape
res = paddle.reshape(x, shape2)
return res
def dyfunc_tensor_shape_3(x):
# Transform y.shape but run y.shape actually because y is not Tensor
x = fluid.dygraph.to_variable(x)
y = numpy.ones(5)
res = fluid.layers.reshape(x, shape=y.shape)
return res
def dyfunc_tensor_shape_4(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.reshape(x, shape=(-1, x.shape[0], len(x.shape)))
return res
def dyfunc_tensor_shape_5(x):
# `res = fluid.layers.reshape(x, shape=(-1, s))` to
# `res = fluid.layers.reshape(x, shape=(-1,
# paddle.jit.dy2static.convert_var_shape(x)[0]))`
x = fluid.dygraph.to_variable(x)
s = x.shape[0]
res = fluid.layers.reshape(x, shape=(-1, s))
return res
def dyfunc_tensor_shape_6(x):
# `res = fluid.layers.reshape(x, shape=(-1, s))` to
# `res = fluid.layers.reshape(x, shape=(-1,
# paddle.jit.dy2static.convert_var_shape(x)[0:]))`
x = fluid.dygraph.to_variable(x)
s = x.shape[0:]
res = fluid.layers.reshape(x, shape=s)
return res
def dyfunc_tuple_shape_1(x):
x = paddle.to_tensor(x)
a, b = x.shape
res = paddle.reshape(x, shape=(b, a))
return res
def dyfunc_tuple_shape_2(x):
x = paddle.to_tensor(x)
shape = x.shape
a, b = shape
res = paddle.reshape(x, shape=(b, a))
return res
def dyfunc_paddle_shape_api(x):
x = paddle.to_tensor(x)
# paddle.shape will not be converted.
a = paddle.shape(x)[0]
# alias api will also not be converted.
alias_old_api = paddle.fluid.layers
b = alias_old_api.shape(x)[1]
res = paddle.reshape(x, shape=(b, a))
return res
def dyfunc_with_if_1(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.reshape(x, [-1, 1])
x_shape_0 = x.shape[0]
if x_shape_0 < 1:
# `res.shape[0]` is transformed into
# `paddle.jit.dy2static.convert_var_shape(res)[0]`
if res.shape[0] > 1:
res = fluid.layers.fill_constant(
value=2, shape=x.shape, dtype="int32")
else:
res = fluid.layers.fill_constant(
value=3, shape=x.shape, dtype="int32")
return res
def dyfunc_with_if_2(x):
x = fluid.dygraph.to_variable(x)
# `len(x.shape)` will not be transformed because x.shape is not used by Paddle api.
if len(x.shape) < 1:
res = x
else:
res = fluid.layers.fill_constant(value=8, shape=x.shape, dtype="int32")
return res
def dyfunc_with_for_1(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `x.shape[0]` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
for i in range(x.shape[0]):
res += 1
return res
def dyfunc_with_for_2(x):
x = fluid.dygraph.to_variable(x)
x_shape_0 = x.shape[0]
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `x_shape_0` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
for i in range(x_shape_0):
res += 1
return res
def dyfunc_with_for_3(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `len(x.shape)` is not transformed.
for i in range(len(x.shape)):
res += 1
return res
def dyfunc_with_while_1(x):
x = fluid.dygraph.to_variable(x)
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
# `x.shape[0]` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
i = 1
while i < x.shape[0]:
res += 1
i = i + 2
return res
def dyfunc_with_while_2(x):
x = fluid.dygraph.to_variable(x)
x_shape_0 = x.shape[0]
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
i = 1
# `x_shape_0` is transformed into `paddle.jit.dy2static.convert_var_shape(x)[0]`
while i < x_shape_0:
res += 1
i = i + 2
return res
def dyfunc_with_while_3(x):
x = fluid.dygraph.to_variable(x)
x_shape = x.shape
res = fluid.layers.fill_constant(value=0, shape=[1], dtype="int32")
i = 1
# `len(x.shape)` is not transformed.
while len(x_shape) > i:
res += 1
i += 1
return res
def dyfunc_with_while_4(x):
x = paddle.to_tensor(x)
y = numpy.ones(5)
y_shape_0 = y.shape[0]
i = 1
# Transform y_shape_0 but run y.shape[0] actually because y is not Tensor
while y_shape_0 > i:
x += 1
i += 1
return x
def dyfunc_change_shape_after_assign(x):
x = paddle.to_tensor(x)
a, b = x.shape
x = paddle.reshape(x, shape=(-1, 1))
res = paddle.reshape(x, shape=(b, a))
return res
# 1. Basic tests without control flow
class TestTensorShapeBasic(unittest.TestCase):
def setUp(self):
self.input = numpy.ones(5).astype("int32")
self.place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
self._set_input_spec()
self._set_expected_op_num()
self.init_test_func()
def init_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_1
def _set_input_spec(self):
self.input_spec = [paddle.static.InputSpec(shape=[5], dtype="int32")]
def _run(self, to_static):
with fluid.dygraph.guard():
if to_static:
res = declarative(self.dygraph_func)(self.input).numpy()
else:
res = self.dygraph_func(self.input).numpy()
return res
def get_dygraph_output(self):
return self._run(to_static=False)
def get_static_output(self):
return self._run(to_static=True)
def test_transformed_static_result(self):
static_res = self.get_static_output()
dygraph_res = self.get_dygraph_output()
self.assertTrue(
numpy.allclose(dygraph_res, static_res),
msg='dygraph res is {}\nstatic_res is {}'.format(dygraph_res,
static_res))
def _set_expected_op_num(self):
self.expected_op_num = 2
self.expected_shape_op_num = 0
self.expected_slice_op_num = 0
def _compute_op_num(self, program):
self.op_num = sum([len(block.ops) for block in program.blocks])
self.shape_op_num = 0
self.slice_op_num = 0
for block in program.blocks:
self.shape_op_num += len(
[op for op in block.ops if op.type == "shape"])
self.slice_op_num += len(
[op for op in block.ops if op.type == "slice"])
def test_op_num(self):
static_layer = paddle.jit.to_static(self.dygraph_func, self.input_spec)
program = static_layer.main_program
self._compute_op_num(program)
self.assertEqual(self.op_num, self.expected_op_num)
self.assertEqual(self.shape_op_num, self.expected_shape_op_num)
self.assertEqual(self.slice_op_num, self.expected_slice_op_num)
class TestTensorShapeBasic2(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_2
def _set_expected_op_num(self):
self.expected_op_num = 3
self.expected_shape_op_num = 1
self.expected_slice_op_num = 0
class TestTensorShapeBasic3(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_3
class TestTensorShapeBasic4(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_4
class TestTensorShapeBasic5(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_5
def _set_expected_op_num(self):
self.expected_op_num = 4
self.expected_shape_op_num = 1
self.expected_slice_op_num = 1
class TestTensorShapeBasic6(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_6
def _set_expected_op_num(self):
self.expected_op_num = 4
self.expected_shape_op_num = 1
self.expected_slice_op_num = 1
class TestTupleShape1(TestTensorShapeBasic):
def init_test_func(self):
self.input = numpy.ones((5, 7)).astype("int32")
self.input_spec = [paddle.static.InputSpec(shape=[5, 7], dtype="int32")]
self.dygraph_func = dyfunc_tuple_shape_1
def _set_expected_op_num(self):
self.expected_op_num = 6
self.expected_shape_op_num = 2
self.expected_slice_op_num = 2
class TestTupleShape2(TestTensorShapeBasic):
def init_test_func(self):
self.input = numpy.ones((5, 7)).astype("int32")
self.input_spec = [paddle.static.InputSpec(shape=[5, 7], dtype="int32")]
self.dygraph_func = dyfunc_tuple_shape_2
def _set_expected_op_num(self):
self.expected_op_num = 5
self.expected_shape_op_num = 1
self.expected_slice_op_num = 2
class TestPaddleShapeApi(TestTensorShapeBasic):
def init_test_func(self):
self.input = numpy.ones((5, 7)).astype("int32")
self.input_spec = [paddle.static.InputSpec(shape=[5, 7], dtype="int32")]
self.dygraph_func = dyfunc_paddle_shape_api
def _set_expected_op_num(self):
self.expected_op_num = 6
self.expected_shape_op_num = 2
self.expected_slice_op_num = 2
# 2. Tests with control flow if
class TestTensorShapeInIf1(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_with_if_1
def _set_expected_op_num(self):
self.expected_op_num = 4
self.expected_shape_op_num = 1
self.expected_slice_op_num = 1
class TestTensorShapeInIf2(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_with_if_2
def _set_expected_op_num(self):
self.expected_op_num = 14
self.expected_shape_op_num = 2
self.expected_slice_op_num = 1
# 3. Tests with control flow for loop
class TestTensorShapeInFor1(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_with_for_1
def _set_expected_op_num(self):
self.expected_op_num = 22
self.expected_shape_op_num = 3
self.expected_slice_op_num = 3
class TestTensorShapeInFor2(TestTensorShapeInFor1):
def init_test_func(self):
self.dygraph_func = dyfunc_with_for_2
def _set_expected_op_num(self):
self.expected_op_num = 9
self.expected_shape_op_num = 1
self.expected_slice_op_num = 1
class TestTensorShapeInFor3(TestTensorShapeInFor1):
def init_test_func(self):
self.dygraph_func = dyfunc_with_for_3
def _set_expected_op_num(self):
self.expected_op_num = 25
self.expected_shape_op_num = 6
self.expected_slice_op_num = 3
# 4. Tests with control flow while loop
class TestTensorShapeInWhile1(TestTensorShapeInFor1):
def init_test_func(self):
self.dygraph_func = dyfunc_with_while_1
class TestTensorShapeInWhile2(TestTensorShapeInFor1):
def init_test_func(self):
self.dygraph_func = dyfunc_with_while_2
def _set_expected_op_num(self):
self.expected_op_num = 6
self.expected_shape_op_num = 1
self.expected_slice_op_num = 1
class TestTensorShapeInWhile3(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_with_while_3
def _set_expected_op_num(self):
self.expected_op_num = 3
self.expected_shape_op_num = 1
self.expected_slice_op_num = 0
class TestTensorShapeInWhile4(TestTensorShapeBasic):
def init_test_func(self):
self.dygraph_func = dyfunc_with_while_4
def _set_expected_op_num(self):
self.expected_op_num = 5
self.expected_shape_op_num = 0
self.expected_slice_op_num = 0
# 5. Test op num for negetive dim
class TestOpNumBasicWithTensorShape(unittest.TestCase):
def setUp(self):
self._set_input_spec()
self._set_test_func()
self._set_expected_op_num()
def _set_input_spec(self):
self.input_spec = [
paddle.static.InputSpec(
shape=[-1, 5], dtype="int32")
]
def _set_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_1
def _set_expected_op_num(self):
self.expected_op_num = 3
self.expected_shape_op_num = 1
self.expected_slice_op_num = 0
def _compute_op_num(self, program):
self.op_num = sum([len(block.ops) for block in program.blocks])
self.shape_op_num = 0
self.slice_op_num = 0
for block in program.blocks:
self.shape_op_num += len(
[op for op in block.ops if op.type == "shape"])
self.slice_op_num += len(
[op for op in block.ops if op.type == "slice"])
def test_op_num(self):
static_layer = paddle.jit.to_static(self.dygraph_func, self.input_spec)
program = static_layer.main_program
self._compute_op_num(program)
self.assertEqual(self.op_num, self.expected_op_num)
self.assertEqual(self.shape_op_num, self.expected_shape_op_num)
self.assertEqual(self.slice_op_num, self.expected_slice_op_num)
class TestOpNumBasicWithTensorShape4(TestOpNumBasicWithTensorShape):
def _set_test_func(self):
self.dygraph_func = dyfunc_tensor_shape_4
def _set_expected_op_num(self):
self.expected_op_num = 6
self.expected_shape_op_num = 1
self.expected_slice_op_num = 1
class TestOpNumWithTensorShapeTuple1(TestOpNumBasicWithTensorShape):
def _set_test_func(self):
self.dygraph_func = dyfunc_tuple_shape_1
def _set_expected_op_num(self):
self.expected_op_num = 7
self.expected_shape_op_num = 2
self.expected_slice_op_num = 2
class TestOpNumWithTensorShapeInIf1(TestOpNumBasicWithTensorShape):
def _set_test_func(self):
self.dygraph_func = dyfunc_with_if_1
def _set_expected_op_num(self):
self.expected_op_num = 28
self.expected_shape_op_num = 4
self.expected_slice_op_num = 2
class TestOpNumWithTensorShapeInFor1(TestOpNumBasicWithTensorShape):
def _set_test_func(self):
self.dygraph_func = dyfunc_with_for_1
def _set_expected_op_num(self):
self.expected_op_num = 22
self.expected_shape_op_num = 3
self.expected_slice_op_num = 3
class TestOpNumWithTensorShapeInWhile1(TestOpNumBasicWithTensorShape):
def _set_test_func(self):
self.dygraph_func = dyfunc_with_while_1
def _set_expected_op_num(self):
self.expected_op_num = 22
self.expected_shape_op_num = 3
self.expected_slice_op_num = 3
class TestChangeShapeAfterAssign(TestTensorShapeBasic):
def init_test_func(self):
self.input = numpy.ones((2, 3)).astype("int32")
self.input_spec = [paddle.static.InputSpec(shape=[2, 3], dtype="int32")]
self.dygraph_func = dyfunc_change_shape_after_assign
def _set_expected_op_num(self):
self.expected_op_num = 7
self.expected_shape_op_num = 2
self.expected_slice_op_num = 2
def dyfunc_with_static_convert_var_shape(x):
# Note: this will create `batch_size__static_convert_var_shape_suffix_0` firstly.
batch_size = x.shape[0]
if len(x.shape) < 1:
res = x
else:
# Test for correctly to find `batch_size__static_convert_var_shape_suffix_0` in
# deeply nested scope.
res = fluid.layers.fill_constant(
value=8, shape=[batch_size], dtype="int32")
return res
class TestFindStatiConvertVarShapeSuffixVar(unittest.TestCase):
def test(self):
x_spec = paddle.static.InputSpec(shape=[None, 10])
func = paddle.jit.to_static(dyfunc_with_if_2, input_spec=[x_spec])
# Call this function to trigger program translation.
func.concrete_program
if __name__ == '__main__':
unittest.main()
| 29.875 | 87 | 0.673287 |
ace49e1a067d98be333c6fe4a1e6e0fd9759971c | 422 | py | Python | awesimsoss/cli.py | jotaylor/awesimsoss | e8047cad598d0af8c7b41ddaae1ea7d01d116eaf | [
"MIT"
] | 4 | 2019-12-17T19:04:25.000Z | 2020-09-22T15:53:09.000Z | awesimsoss/cli.py | jotaylor/awesimsoss | e8047cad598d0af8c7b41ddaae1ea7d01d116eaf | [
"MIT"
] | 94 | 2018-10-17T18:03:57.000Z | 2021-03-01T07:34:21.000Z | awesimsoss/cli.py | jotaylor/awesimsoss | e8047cad598d0af8c7b41ddaae1ea7d01d116eaf | [
"MIT"
] | 8 | 2018-10-17T20:45:49.000Z | 2021-04-14T11:41:41.000Z | # -*- coding: utf-8 -*-
"""Console script for awesimsoss."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for awesimsoss."""
click.echo("Replace this message by putting your code into "
"awesimsoss.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
| 22.210526 | 68 | 0.64455 |
ace49f5aeb756767be86cf5555763f23baca3f24 | 5,852 | py | Python | envSERVOKIT/lib/python3.7/site-packages/board.py | markvogt/Adafruit_CircuitPython_ServoKit | 8b3f5de38dae0f0f574f0ed15be23aad817fc80d | [
"MIT"
] | null | null | null | envSERVOKIT/lib/python3.7/site-packages/board.py | markvogt/Adafruit_CircuitPython_ServoKit | 8b3f5de38dae0f0f574f0ed15be23aad817fc80d | [
"MIT"
] | null | null | null | envSERVOKIT/lib/python3.7/site-packages/board.py | markvogt/Adafruit_CircuitPython_ServoKit | 8b3f5de38dae0f0f574f0ed15be23aad817fc80d | [
"MIT"
] | null | null | null | # The MIT License (MIT)
#
# Copyright (c) 2017 cefn for adafruit industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`board` - Define ids for available pins
=================================================
See `CircuitPython:board` in CircuitPython for more details.
* Author(s): cefn
"""
import sys
from adafruit_blinka.agnostic import board_id, detector
import adafruit_platformdetect.constants.boards as ap_board
# pylint: disable=wildcard-import,unused-wildcard-import,ungrouped-imports
if board_id == ap_board.FEATHER_HUZZAH:
from adafruit_blinka.board.feather_huzzah import *
elif board_id == ap_board.NODEMCU:
from adafruit_blinka.board.nodemcu import *
elif board_id == ap_board.PYBOARD:
from adafruit_blinka.board.pyboard import *
elif detector.board.any_raspberry_pi_40_pin:
from adafruit_blinka.board.raspberrypi.raspi_40pin import *
elif detector.board.any_raspberry_pi_cm:
from adafruit_blinka.board.raspberrypi.raspi_cm import *
elif detector.board.RASPBERRY_PI_A or detector.board.RASPBERRY_PI_B_REV1:
from adafruit_blinka.board.raspberrypi.raspi_1b_rev1 import *
elif detector.board.RASPBERRY_PI_B_REV2:
from adafruit_blinka.board.raspberrypi.raspi_1b_rev2 import *
elif board_id == ap_board.BEAGLEBONE_BLACK:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_GREEN:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_BLACK_INDUSTRIAL:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_GREEN_WIRELESS:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_BLACK_WIRELESS:
from adafruit_blinka.board.beagleboard.beaglebone_black import *
elif board_id == ap_board.BEAGLEBONE_POCKETBEAGLE:
from adafruit_blinka.board.beagleboard.beaglebone_pocketbeagle import *
elif board_id == ap_board.ORANGE_PI_PC:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_R1:
from adafruit_blinka.board.orangepi.orangepir1 import *
elif board_id == ap_board.ORANGE_PI_ZERO:
from adafruit_blinka.board.orangepi.orangepizero import *
elif board_id == ap_board.ORANGE_PI_ONE:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_PC_PLUS:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_LITE:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.ORANGE_PI_PLUS_2E:
from adafruit_blinka.board.orangepi.orangepipc import *
elif board_id == ap_board.GIANT_BOARD:
from adafruit_blinka.board.giantboard import *
elif board_id == ap_board.JETSON_TX1:
from adafruit_blinka.board.nvidia.jetson_tx1 import *
elif board_id == ap_board.JETSON_TX2:
from adafruit_blinka.board.nvidia.jetson_tx2 import *
elif board_id == ap_board.JETSON_XAVIER:
from adafruit_blinka.board.nvidia.jetson_xavier import *
elif board_id == ap_board.JETSON_NANO:
from adafruit_blinka.board.nvidia.jetson_nano import *
elif board_id == ap_board.JETSON_NX:
from adafruit_blinka.board.nvidia.jetson_nx import *
elif board_id == ap_board.CORAL_EDGE_TPU_DEV:
from adafruit_blinka.board.coral_edge_tpu import *
elif board_id == ap_board.ODROID_C2:
from adafruit_blinka.board.hardkernel.odroidc2 import *
elif board_id == ap_board.ODROID_C4:
from adafruit_blinka.board.hardkernel.odroidc4 import *
elif board_id == ap_board.ODROID_N2:
from adafruit_blinka.board.hardkernel.odroidn2 import *
elif board_id == ap_board.DRAGONBOARD_410C:
from adafruit_blinka.board.dragonboard_410c import *
elif board_id == ap_board.FTDI_FT232H:
from adafruit_blinka.board.ftdi_ft232h import *
elif board_id == ap_board.BINHO_NOVA:
from adafruit_blinka.board.binho_nova import *
elif board_id == ap_board.MICROCHIP_MCP2221:
from adafruit_blinka.board.microchip_mcp2221 import *
elif board_id == ap_board.SIFIVE_UNLEASHED:
from adafruit_blinka.board.hifive_unleashed import *
elif board_id == ap_board.PINE64:
from adafruit_blinka.board.pine64 import *
elif board_id == ap_board.CLOCKWORK_CPI3:
from adafruit_blinka.board.clockworkcpi3 import *
elif board_id == ap_board.ONION_OMEGA2:
from adafruit_blinka.board.onion.omega2 import *
elif board_id == ap_board.ROCK_PI_S:
from adafruit_blinka.board.radxa.rockpis import *
elif "sphinx" in sys.modules:
pass
else:
raise NotImplementedError("Board not supported {}".format(board_id))
def I2C():
"""The singleton I2C interface"""
import busio
return busio.I2C(SCL, SDA)
def SPI():
"""The singleton SPI interface"""
import busio
return busio.SPI(SCLK, MOSI, MISO)
| 34.222222 | 79 | 0.7811 |
ace49f5bc638d91c8d7bf23ece451b42c156118a | 1,029 | py | Python | 47-Happy-Numbers/main.py | PawelZabinski/ocr-code-challenges-files | 24d30de694a00f2190790003778c6d65b8b2554b | [
"MIT"
] | null | null | null | 47-Happy-Numbers/main.py | PawelZabinski/ocr-code-challenges-files | 24d30de694a00f2190790003778c6d65b8b2554b | [
"MIT"
] | null | null | null | 47-Happy-Numbers/main.py | PawelZabinski/ocr-code-challenges-files | 24d30de694a00f2190790003778c6d65b8b2554b | [
"MIT"
] | null | null | null | import functools
import itertools
ITERATION_LIMIT = 10_000
# Happy Numbers
# A happy number is defined by the following process. Starting with any positive integer, replace the number by the sum of the squares of its digits, and repeat the process
# until the number equals 1 (where it will stay), or it loops endlessly in a cycle which does not include 1. Those numbers for which this process ends in 1 are happy numbers,
# while those that do not end in 1 are unhappy numbers. Have the programme find the first 8 happy numbers.
def evaluate_positive_integer(integer):
return functools.reduce(lambda x, y: x + y, [int(i) ** 2 for i in str(integer)])
def main():
for i in itertools.count():
new_integer = i
iteration_count = 0
while not new_integer == 1:
iteration_count += 1
if iteration_count > ITERATION_LIMIT:
break
new_integer = evaluate_positive_integer(new_integer)
else:
print(f'{i} is a happy number!')
if __name__ == '__main__':
main() | 33.193548 | 174 | 0.707483 |
ace49fa63a270204ce2105f96944d9073554e1af | 6,403 | py | Python | sdk/python/pulumi_aws/iam/instance_profile.py | dixler/pulumi-aws | 88838ed6d412c092717a916b0b5b154f68226c3a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/iam/instance_profile.py | dixler/pulumi-aws | 88838ed6d412c092717a916b0b5b154f68226c3a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/iam/instance_profile.py | dixler/pulumi-aws | 88838ed6d412c092717a916b0b5b154f68226c3a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class InstanceProfile(pulumi.CustomResource):
arn: pulumi.Output[str]
"""
The ARN assigned by AWS to the instance profile.
"""
create_date: pulumi.Output[str]
"""
The creation timestamp of the instance profile.
"""
name: pulumi.Output[str]
"""
The profile's name. If omitted, this provider will assign a random, unique name.
"""
name_prefix: pulumi.Output[str]
"""
Creates a unique name beginning with the specified prefix. Conflicts with `name`.
"""
path: pulumi.Output[str]
"""
Path in which to create the profile.
"""
role: pulumi.Output[str]
"""
The role name to include in the profile.
"""
roles: pulumi.Output[list]
"""
A list of role names to include in the profile. The current default is 1. If you see an error message similar to `Cannot exceed quota for InstanceSessionsPerInstanceProfile: 1`, then you must contact AWS support and ask for a limit increase.
"""
unique_id: pulumi.Output[str]
"""
The [unique ID][1] assigned by AWS.
"""
def __init__(__self__, resource_name, opts=None, name=None, name_prefix=None, path=None, role=None, roles=None, __props__=None, __name__=None, __opts__=None):
"""
Provides an IAM instance profile.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: The profile's name. If omitted, this provider will assign a random, unique name.
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.
:param pulumi.Input[str] path: Path in which to create the profile.
:param pulumi.Input[str] role: The role name to include in the profile.
:param pulumi.Input[list] roles:
A list of role names to include in the profile. The current default is 1. If you see an error message similar to `Cannot exceed quota for InstanceSessionsPerInstanceProfile: 1`, then you must contact AWS support and ask for a limit increase.
> This content is derived from https://github.com/terraform-providers/terraform-provider-aws/blob/master/website/docs/r/iam_instance_profile.html.markdown.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['name'] = name
__props__['name_prefix'] = name_prefix
__props__['path'] = path
__props__['role'] = role
__props__['roles'] = roles
__props__['arn'] = None
__props__['create_date'] = None
__props__['unique_id'] = None
super(InstanceProfile, __self__).__init__(
'aws:iam/instanceProfile:InstanceProfile',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, arn=None, create_date=None, name=None, name_prefix=None, path=None, role=None, roles=None, unique_id=None):
"""
Get an existing InstanceProfile resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The ARN assigned by AWS to the instance profile.
:param pulumi.Input[str] create_date: The creation timestamp of the instance profile.
:param pulumi.Input[str] name: The profile's name. If omitted, this provider will assign a random, unique name.
:param pulumi.Input[str] name_prefix: Creates a unique name beginning with the specified prefix. Conflicts with `name`.
:param pulumi.Input[str] path: Path in which to create the profile.
:param pulumi.Input[str] role: The role name to include in the profile.
:param pulumi.Input[list] roles:
A list of role names to include in the profile. The current default is 1. If you see an error message similar to `Cannot exceed quota for InstanceSessionsPerInstanceProfile: 1`, then you must contact AWS support and ask for a limit increase.
:param pulumi.Input[str] unique_id: The [unique ID][1] assigned by AWS.
> This content is derived from https://github.com/terraform-providers/terraform-provider-aws/blob/master/website/docs/r/iam_instance_profile.html.markdown.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["arn"] = arn
__props__["create_date"] = create_date
__props__["name"] = name
__props__["name_prefix"] = name_prefix
__props__["path"] = path
__props__["role"] = role
__props__["roles"] = roles
__props__["unique_id"] = unique_id
return InstanceProfile(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 48.877863 | 258 | 0.67281 |
ace49fecc0b94265c6fae48ef41abedf729d1527 | 1,322 | py | Python | assignments/assignment_5/assignment5(i).py | eu-snehagupta/learningpython | 2a3404b165b97da9656a2d8d4f4d7d038127a693 | [
"MIT"
] | null | null | null | assignments/assignment_5/assignment5(i).py | eu-snehagupta/learningpython | 2a3404b165b97da9656a2d8d4f4d7d038127a693 | [
"MIT"
] | null | null | null | assignments/assignment_5/assignment5(i).py | eu-snehagupta/learningpython | 2a3404b165b97da9656a2d8d4f4d7d038127a693 | [
"MIT"
] | null | null | null | #Task 1:
#You have given four list of dictionaries Input data.
#process all the dictionaries in data, to craete one dictionary with only unique keys i.e.
#if A has dict {"milk": 2 } and C has { "milk": 1} , the find sict will have only one
#{ "milk": 3}.
#Then write the final dictionary to a csv file using DictWriter
import csv
newdata_ = dict()
A = [{"coke": 1 }, {"milk": 2 }, {"curd": 3 }, { "MILK": 1}, {"juice": 3 } ]
B = [{"orange": 1 }, {"papaya": 2 }, {"pineapple": 3 }, { "apple": 1}, {"papaya": 3 } ]
C = [{"jeans": 1 }, {"shirt": 2 }, {"jeans": 3 }, { "milk": 1}, {"SHIRT": 3 } ]
D = [{"HISTORY": 1 }, {"history": 2 }, {"maths": 3 }, { "civics": 1}, {"maths": 3 } ]
data = [A, B, C, D]
def process_dict_lists(data=data):
for elements in data:
for subelements in elements:
for key, value in subelements.items():
key_ = key.lower()
newdata_[key_] = (newdata_[key_] if key_ in newdata_.keys() else 0) + value
return(newdata_)
def write_dict_csv(newdata_):
filetowrite = open("assignment5(i)_datafile.csv", "w")
writer = csv.DictWriter(filetowrite, newdata_.keys())
writer.writeheader()
writer.writerow(newdata_)
filetowrite.close()
if __name__ == "__main__":
newdata_ = process_dict_lists()
write_dict_csv(newdata_)
| 36.722222 | 92 | 0.602874 |
ace4a00f10045c5ad66a5f94fb808762339a70f4 | 735 | py | Python | modules/make_keras_pickleable.py | jonmarty/Roman-Coin-Image-Search | c55976d951166d4759c03e9898fe37ab4fa0b0f9 | [
"MIT"
] | null | null | null | modules/make_keras_pickleable.py | jonmarty/Roman-Coin-Image-Search | c55976d951166d4759c03e9898fe37ab4fa0b0f9 | [
"MIT"
] | null | null | null | modules/make_keras_pickleable.py | jonmarty/Roman-Coin-Image-Search | c55976d951166d4759c03e9898fe37ab4fa0b0f9 | [
"MIT"
] | null | null | null | import types
import tempfile
import keras.models
def make_keras_pickleable():
def __getstate__(self):
model_str = ""
with tempfile.NamedTemporaryFile(suffix='.hdf5', delete=True) as fd:
keras.models.save_model(self, fd.name, overwrite=True)
model_str = fd.read()
d = { 'model_str': model_str }
return d
def __setstate__(self, state):
with tempfile.NamedTemporaryFile(suffix='.hdf5', delete=True) as fd:
fd.write(state['model_str'])
fd.flush()
model = keras.models.load_model(fd.name)
self.__dict__ = model.__dict__
cls = keras.models.Model
cls.__getstate__ = __getstate__
cls.__setstate__ = __setstate__ | 30.625 | 76 | 0.638095 |
ace4a0572237ab03ebf5f21659bbe561f27f7497 | 10,423 | py | Python | drivers/hal/st/scripts/prebuild.py | flyghost/OneOS-V2.1.0 | 6fedab0558c07fe679d63ba1eb8ee9992c044d86 | [
"Apache-2.0"
] | null | null | null | drivers/hal/st/scripts/prebuild.py | flyghost/OneOS-V2.1.0 | 6fedab0558c07fe679d63ba1eb8ee9992c044d86 | [
"Apache-2.0"
] | null | null | null | drivers/hal/st/scripts/prebuild.py | flyghost/OneOS-V2.1.0 | 6fedab0558c07fe679d63ba1eb8ee9992c044d86 | [
"Apache-2.0"
] | null | null | null | import sys
import glob
import os.path
import re
from build_tools import *
import importlib
import importlib.util
def gen_stm32_bsp_file(prj_path, bsp_path):
source = prj_path + "/" + bsp_path + "/main.c"
target = prj_path + "/" + bsp_path + "/bsp.c"
f1 = open(source, 'r+', newline = '')
f2 = open(target, 'w+', newline = '')
defined_sdram = False
for ss in f1.readlines():
if ss.find("SDRAM_HandleTypeDef", 0) != -1:
defined_sdram = True
ss = ss.replace("#include \"main.h\"",
"#include \"main.h\"\n"
+ "#include <oneos_config.h>")
ss = ss.replace("int main(void)", "int hardware_init(void)")
ss = ss.replace("/* USER CODE END WHILE */", "/* USER CODE END WHILE */\n return 0;")
ss = ss.replace("if (HAL_ETH_Init", "if (0 && HAL_ETH_Init")
if defined_sdram:
ss = ss.replace(" MX_FMC_Init();",
" MX_FMC_Init();\n"
+ " void SDRAM_Initialization_Sequence(SDRAM_HandleTypeDef *hsdram);\n"
+ " SDRAM_Initialization_Sequence(&hsdram1);")
ss = ss.replace(" SystemClock_Config();",
"#ifndef DEFAULT_SYSTEM_CLOCK_CONFIG\n"
+ " SystemClock_Config();\n"
+ "#endif")
ss = ss.replace("MX_OPENAMP_Init(RPMSG_REMOTE, NULL);", "//MX_OPENAMP_Init(RPMSG_REMOTE, NULL);")
f2.write(ss)
f1.close()
f2.close()
def gen_stm32_it_file(prj_path, bsp_path):
source = glob.glob(prj_path + "/" + bsp_path + "stm32*_it.c")[0]
target = source.split('_it.c')[0] + "_it_bsp.c"
file = open(source, 'r+', newline = '')
target_ss = ''
lpuart1_fix_status = 0
for ss in file.readlines():
if "USER CODE END LPUART1_IRQn 0" in ss:
lpuart1_fix_status = 1
if "HAL_UART_IRQHandler(&huart1);" in ss and lpuart1_fix_status == 1:
lpuart1_fix_status = 2
ss = ss.replace("huart1", "hlpuart1")
if "USER CODE BEGIN LPUART1_IRQn 1" in ss:
lpuart1_fix_status = 3
if IsDefined(['OS_USING_SERIAL', 'HAL_UART_MODULE_ENABLED']):
ss = ss.replace('/* USER CODE END EV */', \
'/* USER CODE END EV */\n'\
'#ifdef HAL_UART_MODULE_ENABLED\n'\
'int HAL_USART_IDLE_PROCESS(UART_HandleTypeDef *huart);\n'\
'#endif')
if 'HAL_UART_IRQHandler(' in ss:
huart = ss.split('&')[1].split(')')[0]
ss = ' if (HAL_USART_IDLE_PROCESS(&%s))\n'\
' return;\n'\
' \n'\
' HAL_UART_IRQHandler(&%s);\n' % (huart, huart)
ss = ss.replace("void SDMMC1_IRQHandler(void)", "void SDMMC1_IRQHandler_remove(void)")
ss = ss.replace("void SDMMC2_IRQHandler(void)", "void SDMMC2_IRQHandler_remove(void)")
ss = ss.replace("void SDIO_IRQHandler(void)", "void SDIO_IRQHandler_remove(void)")
target_ss += ss
file.close()
file = open(target, 'w+', newline = '')
file.write(target_ss)
file.close()
def gen_stm32_devices_file(prj_path, bsp_path):
for name in glob.glob(prj_path + "/" + bsp_path + '*msp.c'):
name = os.path.basename(name)
print(name)
source = prj_path + "/" + bsp_path + "/main.c"
target = prj_path + "/board/peripherals.c"
msp = prj_path + "/" + bsp_path + "/" + name
f1 = open(source, 'r+', newline = '')
f2 = open(target, 'w+', newline = '')
f3 = open(msp, 'r+', newline = '')
device_type_list = [
'ADC_HandleTypeDef',
'CAN_HandleTypeDef',
'CEC_HandleTypeDef',
'CRC_HandleTypeDef',
'CRYP_HandleTypeDef',
'DAC_HandleTypeDef',
'DCMI_HandleTypeDef',
'DFSDM_Channel_HandleTypeDef',
'DFSDM_Filter_HandleTypeDef',
'DMA_HandleTypeDef',
'DMA2D_HandleTypeDef',
'DSI_HandleTypeDef',
'ETH_HandleTypeDef',
'EXTI_HandleTypeDef',
'HASH_HandleTypeDef',
'HCD_HandleTypeDef',
'I2C_HandleTypeDef',
'I2S_HandleTypeDef',
'IRDA_HandleTypeDef',
'IWDG_HandleTypeDef',
'JPEG_HandleTypeDef',
'LPTIM_HandleTypeDef',
'LTDC_HandleTypeDef',
'MDIOS_HandleTypeDef',
'MMC_HandleTypeDef',
'NAND_HandleTypeDef',
'NOR_HandleTypeDef',
'PCD_HandleTypeDef',
'QSPI_HandleTypeDef',
'RNG_HandleTypeDef',
'RTC_HandleTypeDef',
'SAI_HandleTypeDef',
'SD_HandleTypeDef',
'SDRAM_HandleTypeDef',
'SMARTCARD_HandleTypeDef',
'SMBUS_HandleTypeDef',
'SPDIFRX_HandleTypeDef',
'SPI_HandleTypeDef',
'SRAM_HandleTypeDef',
'TIM_HandleTypeDef',
'UART_HandleTypeDef',
'USART_HandleTypeDef',
'WWDG_HandleTypeDef',
'HRTIM_HandleTypeDef',
]
AddDefined('HAL_GPIO_MODULE_ENABLED')
AddDefined('HAL_FLASH_MODULE_ENABLED')
for ss in f1.readlines():
for device_type in device_type_list:
index = ss.find(device_type, 0)
if index != 0:
continue
index1 = ss.find(';', 0)
instance = ss[len(device_type)+2:index1]
f2.write('extern ' + ss)
instance_NAME = str(instance.upper())
index_type_name = ss.find('_HandleTypeDef', 0)
type_name = ss[index:index_type_name]
type_NAME = str(type_name.upper())
key = "HAL_" + type_NAME + "_MODULE_ENABLED"
#print(key)
AddDefined(key)
if device_type == 'I2C_HandleTypeDef':
index2 = -1
index3 = -1
index4 = -1
gpio_pin = ['0x00','0x00']
f3 = open(msp, 'r+', newline = '')
for gpio in f3.readlines():
index = gpio.find(instance_NAME + ' GPIO Configuration', 0)
if index > 0:
index2 = index
index3 = gpio.find(' P', 0)
index_SCL = gpio.find('_SCL', 0)
index_SDA = gpio.find('_SDA', 0)
if index2 != -1 and index3 != -1:
gpio_type = gpio[index3+2:index3+3]
gpio_pin_byte0 = gpio[index3+3:index3+4]
gpio_pin_byte1 = gpio[index3+4:index3+5]
if gpio_pin_byte1 == ' ':
gpio_num = (ord(gpio_type) - ord('A'))*16 + ord(gpio_pin_byte0)-ord('0')
else:
gpio_num = (ord(gpio_type) - ord('A'))*16 + (ord(gpio_pin_byte0)-ord('0'))*10 + ord(gpio_pin_byte1)-ord('0')
if (index_SCL > 0):
gpio_pin[0] = hex(gpio_num)
index_SCL = -1
if (index_SDA > 0):
gpio_pin[1] = hex(gpio_num)
index_SDA = -1
continue
index4 = gpio.find('*/', 0)
flag_fined_pin = 0
if index4 != -1 and index2 != -1:
index2 = -1
flag_fined_pin = 1
instance_intercept = instance[0:3]
f2.write('struct stm32_' + instance_intercept + '_info ' + instance + '_info = {.instance = &h' + instance + ', ')
f2.write('.scl = ' + gpio_pin[0] + ', ')
f2.write('.sda = ' + gpio_pin[1] + '};\n')
f3.close()
break
if flag_fined_pin == 1:
f2.write('OS_HAL_DEVICE_DEFINE("' + device_type + '", "hard_' + instance + '", ' + instance + "_info);\n\n")
elif device_type == 'HCD_HandleTypeDef':
instance_intercept = instance[0:3]
f2.write('struct stm32_' + instance_intercept + '_info ' + instance + '_info = {.instance = &h' + instance + ', .host_type = ' + instance[-6:] + '};\n')
f2.write('OS_HAL_DEVICE_DEFINE("' + device_type + '", "hard_' + instance + '", ' + instance + "_info);\n\n")
elif device_type == 'PCD_HandleTypeDef':
instance_intercept = instance[0:3]
f2.write('struct stm32_' + instance_intercept + '_info ' + instance + '_info = {.instance = &h' + instance + ', .interface_type = "%s"};\n' % (instance[-14:].upper()))
f2.write('OS_HAL_DEVICE_DEFINE("' + device_type + '", "hard_' + instance + '", ' + instance + "_info);\n\n")
else:
f2.write('OS_HAL_DEVICE_DEFINE("' + device_type + '", "' + instance + '", h' + instance + ');\n\n')
f1.close()
f2.close()
def gen_stm32_middlewares_file(prj_path, bsp_path, ioc_path):
if ioc_path == None:
return
ioc_file = prj_path + "/" + ioc_path
with open(ioc_file, 'r+', newline = '') as fd:
for ss in fd.readlines():
if 'VP_OPENAMP_VS_OPENAMP.Mode=OpenAmp_Activated' in ss:
AddDefined('HAL_OPENAMP_MODULE_ENABLED')
def prebuild(prj_path, bsp_path = '/board/CubeMX_Config/Src/', ioc_path = None):
print("project " + prj_path)
gen_stm32_bsp_file(prj_path, bsp_path)
gen_stm32_devices_file(prj_path, bsp_path)
gen_stm32_middlewares_file(prj_path, bsp_path, ioc_path)
gen_stm32_it_file(prj_path, bsp_path)
loader = importlib.machinery.SourceFileLoader('prebuild.py', Env['OS_ROOT'] + '/drivers/boot/cotex-m/prebuild.py')
spec = importlib.util.spec_from_loader(loader.name, loader)
mod = importlib.util.module_from_spec(spec)
loader.exec_module(mod)
mod.gen_cotex_m_link_file(prj_path)
| 40.87451 | 183 | 0.50638 |
ace4a3628283d9da1b7a95d852c5dfa523bbc592 | 4,801 | py | Python | configs/top_down/lite_hrnet/Envisat/litehrnet_18_coco_256x256_Envisat+IC.py | kuldeepbrd1/Lite-HRNet-1 | f2d90dc131dd4761080cc58fe75302f5725eb684 | [
"Apache-2.0"
] | 1 | 2022-03-25T00:27:35.000Z | 2022-03-25T00:27:35.000Z | configs/top_down/lite_hrnet/Envisat/litehrnet_18_coco_256x256_Envisat+IC.py | femalegeekinsv/Lite-HRNet | f2d90dc131dd4761080cc58fe75302f5725eb684 | [
"Apache-2.0"
] | null | null | null | configs/top_down/lite_hrnet/Envisat/litehrnet_18_coco_256x256_Envisat+IC.py | femalegeekinsv/Lite-HRNet | f2d90dc131dd4761080cc58fe75302f5725eb684 | [
"Apache-2.0"
] | 1 | 2022-03-25T00:28:42.000Z | 2022-03-25T00:28:42.000Z | log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=10)
evaluation = dict(interval=10, metric='mAP')
optimizer = dict(
type='Adam',
lr=2e-3,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
# warmup=None,
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=50,
hooks=[dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')])
channel_cfg = dict(
num_output_channels=12,
dataset_joints=12,
dataset_channel=list(range(12)),
inference_channel=list(range(12))
)
# model settings
model = dict(
type='TopDown',
pretrained=None,
backbone=dict(
type='LiteHRNet',
in_channels=3,
extra=dict(
stem=dict(stem_channels=32, out_channels=32, expand_ratio=1),
num_stages=3,
stages_spec=dict(
num_modules=(2, 4, 2),
num_branches=(2, 3, 4),
num_blocks=(2, 2, 2),
module_type=('LITE', 'LITE', 'LITE'),
with_fuse=(True, True, True),
reduce_ratios=(8, 8, 8),
num_channels=(
(40, 80),
(40, 80, 160),
(40, 80, 160, 320),
)),
with_head=True,
)),
keypoint_head=dict(
type='TopDownSimpleHead',
in_channels=40,
out_channels=channel_cfg['num_output_channels'],
num_deconv_layers=0,
extra=dict(final_conv_kernel=1, ),
),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process=True,
shift_heatmap=True,
unbiased_decoding=False,
modulate_kernel=11),
loss_pose=dict(type='JointsMSELoss', use_target_weight=True))
data_cfg = dict(
image_size=[256, 256],
heatmap_size=[64, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
bbox_thr=1.0,
use_gt_bbox=True,
image_thr=0.0,
bbox_file='data/Envisat/Envisat+IC/train.json',
)
val_data_cfg = dict(
image_size=[256, 256],
heatmap_size=[64, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
bbox_thr=1.0,
use_gt_bbox=True,
image_thr=0.0,
bbox_file='data/Envisat/Envisat+IC/val.json',
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=30,
scale_factor=0.25),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=[
'img',
],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data_root = '../data/Envisat'
data = dict(
samples_per_gpu=64,
workers_per_gpu=4,
train=dict(
type='TopDownEnvisatCocoDataset',
ann_file=f'{data_root}/Envisat+IC/train.json',
img_prefix=f'{data_root}/Envisat+IC/train/',
data_cfg=data_cfg,
pipeline=train_pipeline),
val=dict(
type='TopDownEnvisatCocoDataset',
ann_file=f'{data_root}/Envisat+IC/val.json',
img_prefix=f'{data_root}/Envisat+IC/val/',
data_cfg=val_data_cfg,
pipeline=val_pipeline),
test=dict(
type='TopDownEnvisatCocoDataset',
ann_file=f'{data_root}/Envisat+IC/test.json',
img_prefix=f'{data_root}/Envisat+IC/test/',
data_cfg=data_cfg,
pipeline=val_pipeline),
)
| 27.751445 | 78 | 0.602375 |
ace4a491fe541d32b5c105ac5e4010d360de9448 | 1,122 | py | Python | tests/integration/route53/domains/__init__.py | Yurzs/boto | d739d6c52877699206e69b9901bbe92ea437ba5d | [
"MIT"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | tests/integration/route53/domains/__init__.py | Yurzs/boto | d739d6c52877699206e69b9901bbe92ea437ba5d | [
"MIT"
] | 4,640 | 2015-07-08T16:19:08.000Z | 2019-12-02T15:01:27.000Z | tests/integration/route53/domains/__init__.py | Yurzs/boto | d739d6c52877699206e69b9901bbe92ea437ba5d | [
"MIT"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
| 51 | 76 | 0.770945 |
ace4a4e4023ac0ce7588d7f8c31b7bc1dd3eb2eb | 43,827 | py | Python | kgtk/reshape/kgtkimplode.py | vishalbelsare/kgtk | 7dbcc901a5d52cc9d1af97715e12697e5b460dc7 | [
"MIT"
] | 222 | 2020-03-31T17:45:04.000Z | 2022-03-30T22:48:08.000Z | kgtk/reshape/kgtkimplode.py | vishalbelsare/kgtk | 7dbcc901a5d52cc9d1af97715e12697e5b460dc7 | [
"MIT"
] | 510 | 2020-04-02T00:32:44.000Z | 2022-03-29T01:20:22.000Z | kgtk/reshape/kgtkimplode.py | vishalbelsare/kgtk | 7dbcc901a5d52cc9d1af97715e12697e5b460dc7 | [
"MIT"
] | 41 | 2020-03-31T17:45:07.000Z | 2022-03-22T02:49:44.000Z | """Copy records from the first KGTK file to the output file,
imploding data type-specific columns into a single column./
"""
from argparse import ArgumentParser, Namespace
import ast
import attr
from pathlib import Path
import sys
import typing
from kgtk.kgtkformat import KgtkFormat
from kgtk.io.kgtkreader import KgtkReader, KgtkReaderOptions
from kgtk.io.kgtkwriter import KgtkWriter
from kgtk.reshape.kgtkidbuilder import KgtkIdBuilder, KgtkIdBuilderOptions
from kgtk.utils.argparsehelpers import optional_bool
from kgtk.value.kgtkvalue import KgtkValue, KgtkValueFields
from kgtk.value.kgtkvalueoptions import KgtkValueOptions, DEFAULT_KGTK_VALUE_OPTIONS
@attr.s(slots=True, frozen=True)
class KgtkImplode(KgtkFormat):
input_file_path: Path = attr.ib(validator=attr.validators.instance_of(Path))
output_file_path: typing.Optional[Path] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(Path)))
reject_file_path: typing.Optional[Path] = attr.ib(validator=attr.validators.optional(attr.validators.instance_of(Path)))
type_names: typing.List[str] = \
attr.ib(validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(str),
iterable_validator=attr.validators.instance_of(list)))
without_fields: typing.List[str] = \
attr.ib(validator=attr.validators.deep_iterable(member_validator=attr.validators.instance_of(str),
iterable_validator=attr.validators.instance_of(list)))
# attr.converters.default_if_none(...) does not seem to work.
reader_options: KgtkReaderOptions = attr.ib(validator=attr.validators.instance_of(KgtkReaderOptions))
value_options: KgtkValueOptions = attr.ib(validator=attr.validators.instance_of(KgtkValueOptions))
column_name: str = attr.ib(validator=attr.validators.instance_of(str), default=KgtkFormat.NODE2)
overwrite_column: bool = attr.ib(validator=attr.validators.instance_of(bool), default=True)
prefix: str = attr.ib(validator=attr.validators.instance_of(str), default= KgtkFormat.NODE2 + ";" + KgtkFormat.KGTK_NAMESPACE)
validate: bool = attr.ib(validator=attr.validators.instance_of(bool), default=True)
escape_pipes: bool = attr.ib(validator=attr.validators.instance_of(bool), default=True)
quantities_include_numbers: bool = attr.ib(validator=attr.validators.instance_of(bool), default=True)
general_strings: bool = attr.ib(validator=attr.validators.instance_of(bool), default=True)
remove_prefixed_columns: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
ignore_unselected_types: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
retain_unselected_types: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# Some messages are noisy unless asked to be quiet. Verbose overrides this.
quiet: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
# attr.converters.default_if_none(...) does not seem to work.
# value_options: KgtkValueOptions = attr.ib(default=None,
# converter=attr.converters.default_if_none(DEFAULT_KGTK_VALUE_OPTIONS),
# validator=attr.validators.instance_of(KgtkValueOptions))
build_id: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
idbuilder_options: typing.Optional[KgtkIdBuilderOptions] = attr.ib(default=None)
error_file: typing.TextIO = attr.ib(default=sys.stderr)
verbose: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
very_verbose: bool = attr.ib(validator=attr.validators.instance_of(bool), default=False)
def unwrap(self, val: str)->str:
"""
Remove optional outer string wrappers from a number or symbol
extracted from an exploded column.
We do *not* attempt to remove escape characters (\) from the body
of the value: they should not appear in numbers, and are
discouraged in symbols.
We do *not* attempt to undouble internal quotes (("") or ('')) from the
body of the value: they should not appear in numbers, and are
discouraged in symbols.
We accept the following wrappers:
triple double quotes
triple single quotes
double quotes
single quotes
"""
if len(val) >= 6:
if val.startswith('"""') and val.endswith('"""'):
return val[3:-3]
elif val.startswith("'''") and val.endswith("'''"):
return val[3:-3]
if len(val) >= 2:
if val.startswith('"') and val.endswith('"'):
return val[1:-1]
elif val.startswith("'") and val.endswith("'"):
return val[1:-1]
return val
def implode_empty(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
return "", True
def implode_list(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s' is not supported for implode." % (input_line_count, type_name),
file=self.error_file, flush=True)
return "", False
def implode_number(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
num_idx: int = implosion[KgtkValueFields.NUMBER_FIELD_NAME]
num_val: str = self.unwrap(row[num_idx])
if len(num_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.NUMBER_FIELD_NAME),
file=self.error_file, flush=True)
value: str = num_val
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_number(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid number." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_quantity(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
num_idx: int = implosion[KgtkValueFields.NUMBER_FIELD_NAME]
num_val: str = self.unwrap(row[num_idx])
if len(num_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.NUMBER_FIELD_NAME),
file=self.error_file, flush=True)
lt_idx: int = implosion[KgtkValueFields.LOW_TOLERANCE_FIELD_NAME]
lt: str = self.unwrap(row[lt_idx]) if lt_idx >= 0 else ""
ht_idx: int = implosion[KgtkValueFields.HIGH_TOLERANCE_FIELD_NAME]
ht: str = self.unwrap(row[ht_idx]) if ht_idx >= 0 else ""
if len(lt) > 0 ^ len(ht) > 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': low and high tolerance must both be present or absent." % (input_line_count, type_name),
file=self.error_file, flush=True)
si_idx: int = implosion[KgtkValueFields.SI_UNITS_FIELD_NAME]
si: str = self.unwrap(row[si_idx]) if si_idx >= 0 else ""
un_idx: int = implosion[KgtkValueFields.UNITS_NODE_FIELD_NAME]
un: str = self.unwrap(row[un_idx]) if un_idx >= 0 else ""
value: str = num_val
if len(lt) > 0 or len(ht) > 0:
value += "[" + lt + "," + ht + "]"
value += si + un
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
if self.quantities_include_numbers:
valid = kv.is_number_or_quantity(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid quantity or number." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
else:
valid = kv.is_quantity(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid quantity." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_string(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
if KgtkValueFields.LANGUAGE_FIELD_NAME in implosion:
language_idx: int = implosion[KgtkValueFields.LANGUAGE_FIELD_NAME]
if language_idx >= 0:
language_val: str = self.unwrap(row[language_idx])
if len(language_val) > 0:
if self.general_strings:
return self.implode_language_qualified_string(input_line_count, row, implosion, type_name)
else:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is not empty" % (input_line_count, type_name, KgtkValueFields.LANGUAGE_FIELD_NAME),
file=self.error_file, flush=True)
text_idx: int = implosion[KgtkValueFields.TEXT_FIELD_NAME]
text_val: str = row[text_idx]
if len(text_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
elif len(text_val) == 1:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is too short" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
else:
if not text_val.startswith('"'):
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field does not start with a double quote" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
if not text_val.endswith('"'):
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field does not end with a double quote" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
value: str = ""
if valid:
# This subterfuge uses Python's literal parser to parse the string.
if not self.escape_pipes:
# ast.literal_eval(...) doesn't treat backslash pipe (\|) as an escaped pipe (|).
# (this is documented behavior) so we will remove escaped pipes manually.
text_val = text_val.replace('\\|', '|')
value = KgtkFormat.stringify(ast.literal_eval(text_val))
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_string(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid string." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_language_qualified_string(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
text_idx: int = implosion[KgtkValueFields.TEXT_FIELD_NAME]
text_val: str = row[text_idx]
if len(text_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
elif len(text_val) == 1:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is too short" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
else:
if not text_val.startswith('"'):
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field does not start with a double quote" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
if not text_val.endswith('"'):
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field does not end with a double quote" % (input_line_count, type_name, KgtkValueFields.TEXT_FIELD_NAME),
file=self.error_file, flush=True)
language_idx: int = implosion[KgtkValueFields.LANGUAGE_FIELD_NAME]
language_val: str = self.unwrap(row[language_idx])
if len(language_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.LANGUAGE_FIELD_NAME),
file=self.error_file, flush=True)
suf_idx: int = implosion[KgtkValueFields.LANGUAGE_SUFFIX_FIELD_NAME]
suf: str = self.unwrap(row[suf_idx]) if suf_idx >= 0 else ""
if len(suf) > 0 and not suf.startswith("-"):
# As a siecial favor, we'll accept language suffixes that do not
# start with a dash. We'll prepend the dash.
suf = "-" + suf
value: str = ""
if valid:
# This subterfuge uses Python's literal parser to parse the string.
if not self.escape_pipes:
# ast.literal_eval(...) doesn't treat backslash pipe (\|) as an escaped pipe (|).
# (this is documented behavior) so we will remove escaped pipes manually.
text_val = text_val.replace('\\|', '|')
value = KgtkFormat.stringify(ast.literal_eval(text_val), language=language_val, language_suffix=suf)
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_language_qualified_string(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid language qualified string." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_location_coordinates(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
latitude_idx: int = implosion[KgtkValueFields.LATITUDE_FIELD_NAME]
latitude_val: str = self.unwrap(row[latitude_idx])
if len(latitude_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.LATITUDE_FIELD_NAME),
file=self.error_file, flush=True)
longitude_idx: int = implosion[KgtkValueFields.LONGITUDE_FIELD_NAME]
longitude_val: str = self.unwrap(row[longitude_idx])
if len(longitude_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.LONGITUDE_FIELD_NAME),
file=self.error_file, flush=True)
value: str = "@" + latitude_val + "/" + longitude_val
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_location_coordinates(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid location coordinates." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_date_and_times(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
date_and_times_idx: int = implosion[KgtkValueFields.DATE_AND_TIMES_FIELD_NAME]
date_and_times_val: str = self.unwrap(row[date_and_times_idx])
if len(date_and_times_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.DATE_AND_TIMES_FIELD_NAME),
file=self.error_file, flush=True)
precision_idx: int = implosion[KgtkValueFields.PRECISION_FIELD_NAME]
precision_val: str = self.unwrap(row[precision_idx]) if precision_idx >= 0 else ""
value: str = "^" + date_and_times_val
if len(precision_val) > 0:
value += "/" + precision_val
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_date_and_times(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid date and time." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_extension(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': extensions are not supported." % (input_line_count, type_name))
return "", False
def implode_boolean(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
truth_idx: int = implosion[KgtkValueFields.TRUTH_FIELD_NAME]
truth_val: str = self.unwrap(row[truth_idx])
if len(truth_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.TRUTH_FIELD_NAME),
file=self.error_file, flush=True)
value: str = truth_val
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_boolean(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid boolean." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
def implode_symbol(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
type_name: str,
)->typing.Tuple[str, bool]:
valid: bool = True
symbol_idx: int = implosion[KgtkValueFields.SYMBOL_FIELD_NAME]
symbol_val: str = self.unwrap(row[symbol_idx])
if len(symbol_val) == 0:
valid = False
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': %s field is empty" % (input_line_count, type_name, KgtkValueFields.SYMBOL_FIELD_NAME),
file=self.error_file, flush=True)
if self.escape_pipes:
symbol_val = symbol_val.replace(KgtkFormat.LIST_SEPARATOR, "\\" + KgtkFormat.LIST_SEPARATOR)
value: str = symbol_val
if valid and self.validate:
kv: KgtkValue = KgtkValue(value, options=self.value_options)
valid = kv.is_symbol(validate=True)
if not valid:
if self.verbose or not self.quiet:
print("Input line %d: data type '%s': imploded value '%s' is not a valid symbol." % (input_line_count, type_name, value),
file=self.error_file, flush=True)
return value, valid
# The imploder dispatch table:
imploders: typing.Mapping[KgtkFormat.DataType,
typing.Callable[['KgtkImplode',
int,
typing.List[str],
typing.Mapping[str, int],
str],
typing.Tuple[str, bool]]] = {
KgtkFormat.DataType.EMPTY: implode_empty,
KgtkFormat.DataType.LIST: implode_list,
KgtkFormat.DataType.NUMBER: implode_number,
KgtkFormat.DataType.QUANTITY: implode_quantity,
KgtkFormat.DataType.STRING: implode_string,
KgtkFormat.DataType.LANGUAGE_QUALIFIED_STRING: implode_language_qualified_string,
KgtkFormat.DataType.LOCATION_COORDINATES: implode_location_coordinates,
KgtkFormat.DataType.DATE_AND_TIMES: implode_date_and_times,
KgtkFormat.DataType.EXTENSION: implode_extension,
KgtkFormat.DataType.BOOLEAN: implode_boolean,
KgtkFormat.DataType.SYMBOL: implode_symbol,
}
def implode(self,
input_line_count: int,
row: typing.List[str],
implosion: typing.Mapping[str, int],
data_type_idx: int,
existing_column_idx: int,
)->typing.Tuple[str, bool]:
type_name: str = row[data_type_idx]
if type_name.upper() not in KgtkFormat.DataType.__members__:
if self.verbose or not self.quiet:
print("Input line %d: unrecognized data type '%s'." % (input_line_count, type_name), file=self.error_file, flush=True)
return "", False
if type_name.lower() not in self.type_names:
if self.retain_unselected_types and existing_column_idx >= 0:
return row[existing_column_idx], True
elif self.ignore_unselected_types:
return "", True
else:
if self.verbose or not self.quiet:
print("Input line %d: unselected data type '%s'." % (input_line_count, type_name), file=self.error_file, flush=True)
return "", False
dt: KgtkFormat.DataType = KgtkFormat.DataType[type_name.upper()]
return self.imploders[dt](self, input_line_count, row, implosion, type_name)
def process(self):
if len(self.column_name) == 0:
raise ValueError("The name of the column to implode is empty.")
selected_field_names: typing.List[str] = [ ]
field_name: str
if self.type_names is not None:
if self.verbose:
print("Validate the names of the data types to extract.", file=self.error_file, flush=True)
type_name: str
for type_name in self.type_names:
if type_name not in KgtkValueFields.DEFAULT_DATA_TYPE_FIELDS:
raise ValueError("Unknown data type name '%s'." % type_name)
# Merge this KGTK data type's fields into the list of selected fields:
for field_name in KgtkValueFields.DEFAULT_DATA_TYPE_FIELDS[type_name]:
if field_name == KgtkValueFields.VALID_FIELD_NAME:
continue # We don't need the valid field.
if field_name == KgtkValueFields.LIST_LEN_FIELD_NAME:
continue # We don't need the list length field.
if field_name not in selected_field_names:
selected_field_names.append(field_name)
if len(selected_field_names) == 0:
raise ValueError("The list of fields to implode is empty.")
if KgtkValueFields.DATA_TYPE_FIELD_NAME not in selected_field_names:
raise ValueError("The data type field '%s' has not been selected." % KgtkValueFields.DATA_TYPE_FIELD_NAME)
# Open the input file.
if self.verbose:
print("Opening the input file: %s" % self.input_file_path, file=self.error_file, flush=True)
kr: KgtkReader = KgtkReader.open(self.input_file_path,
error_file=self.error_file,
options=self.reader_options,
value_options = self.value_options,
verbose=self.verbose,
very_verbose=self.very_verbose,
)
output_column_names = kr.column_names.copy()
new_column: bool # True ==> adding the imploded column, False ==> using an existing column
column_idx: int # The index of the imploded column (new or old).
if self.column_name in kr.column_name_map:
column_idx = kr.column_name_map[self.column_name]
new_column = False
if not self.overwrite_column:
raise ValueError("Imploded column '%s' (idx %d) already exists and overwrite not allowed." % (self.column_name, column_idx))
if self.verbose:
print("Overwriting existing imploded column '%s' (idx %d)." % (self.column_name, column_idx), file=self.error_file, flush=True)
else:
column_idx = len(output_column_names)
new_column = True
output_column_names.append(self.column_name)
if self.verbose:
print("Imploded column '%s' will be created (idx %d)." % (self.column_name, column_idx), file=self.error_file, flush=True)
if self.verbose:
print("Build the map of field names to exploded columns", file=self.error_file, flush=True)
implosion: typing.MutableMapping[str, int] = { }
missing_columns: typing.List[str] = [ ]
for field_name in selected_field_names:
if field_name in self.without_fields:
if self.verbose:
print("We can do without field '%s'." % field_name, file=self.error_file, flush=True)
implosion[field_name] = -1
continue
exploded_name: str = self.prefix + field_name
if self.verbose:
print("Field '%s' becomes '%s'" % (field_name, exploded_name), file=self.error_file, flush=True)
if exploded_name in implosion:
raise ValueError("Field name '%s' is duplicated in the field list.")
if exploded_name in kr.column_names:
exploded_idx = kr.column_name_map[exploded_name]
implosion[field_name] = exploded_idx
if self.verbose:
print("Field '%s' is in column '%s' (idx=%d)" % (field_name, exploded_name, exploded_idx),
file=self.error_file, flush=True)
else:
if self.verbose:
print("Field '%s' exploded column '%s' not found." % (field_name, exploded_name), file=self.error_file, flush=True)
missing_columns.append(exploded_name)
if len(missing_columns) > 0:
raise ValueError("Missing columns: %s" % " ".join(missing_columns))
data_type_idx = implosion[KgtkValueFields.DATA_TYPE_FIELD_NAME]
# If requested, create the ID column builder.
# Assemble the list of output column names.
idb: typing.Optional[KgtkIdBuilder] = None
if self.build_id:
if self.idbuilder_options is None:
raise ValueError("ID build requested but ID builder options are missing")
idb = KgtkIdBuilder.from_column_names(output_column_names, self.idbuilder_options)
id_output_column_names = idb.column_names.copy()
else:
id_output_column_names = output_column_names.copy()
trimmed_output_column_names: typing.List[str]
if self.remove_prefixed_columns and len(self.prefix) > 0:
trimmed_output_column_names = [ ]
if self.verbose:
print("Removing columns with names that start with '%s'." % self.prefix, file=self.error_file, flush=True)
column_name: str
for column_name in id_output_column_names:
if column_name.startswith(self.prefix):
if self.verbose:
print("Removing column '%s." % column_name, file=self.error_file, flush=True)
else:
trimmed_output_column_names.append(column_name)
else:
trimmed_output_column_names = id_output_column_names
shuffle_list: typing.List[int] = [ ] # Easier to init than deal with typing.Optional.
ew: typing.Optional[KgtkWriter] = None
if self.output_file_path is not None:
if self.verbose:
print("Opening output file %s" % str(self.output_file_path), file=self.error_file, flush=True)
# Open the output file.
ew: KgtkWriter = KgtkWriter.open(trimmed_output_column_names,
self.output_file_path,
mode=kr.mode,
require_all_columns=False,
prohibit_extra_columns=True,
fill_missing_columns=False,
gzip_in_parallel=False,
verbose=self.verbose,
very_verbose=self.very_verbose)
shuffle_list = ew.build_shuffle_list(id_output_column_names)
rw: typing.Optional[KgtkWriter] = None
if self.reject_file_path is not None:
if self.verbose:
print("Opening reject file %s" % str(self.reject_file_path), file=self.error_file, flush=True)
# Open the reject file.
rw: KgtkWriter = KgtkWriter.open(kr.column_names,
self.reject_file_path,
mode=kr.mode,
require_all_columns=False,
prohibit_extra_columns=True,
fill_missing_columns=False,
gzip_in_parallel=False,
verbose=self.verbose,
very_verbose=self.very_verbose)
if self.verbose:
print("Imploding records from %s" % self.input_file_path, file=self.error_file, flush=True)
input_line_count: int = 0
imploded_value_count: int = 0
invalid_value_count: int = 0
existing_column_idx: int = -1 if new_column else column_idx
row: typing.List[str]
for row in kr:
input_line_count += 1
value: str
valid: bool
value, valid = self.implode(input_line_count, row, implosion, data_type_idx, existing_column_idx)
if valid:
imploded_value_count += 1
else:
invalid_value_count += 1
if rw is not None and not valid:
# Reject the row before implosion.
rw.write(row)
elif ew is not None:
output_row: typing.List[str] = row.copy()
if new_column:
output_row.append(value)
else:
output_row[column_idx] = value
if idb is not None:
output_row = idb.build(output_row, input_line_count)
ew.write(output_row, shuffle_list=shuffle_list)
if self.verbose:
print("Processed %d records, imploded %d values, %d invalid values." % (input_line_count, imploded_value_count, invalid_value_count),
file=self.error_file, flush=True)
if ew is not None:
ew.close()
if rw is not None:
rw.close()
def main():
"""
Test the KGTK implode processor.
"""
parser: ArgumentParser = ArgumentParser()
parser.add_argument(dest="input_file_path", help="The KGTK file with the input data. (default=%(default)s)", type=Path, nargs="?", default="-")
parser.add_argument( "--column", dest="column_name", help="The name of the column to explode. (default=%(default)s).", default="node2")
parser.add_argument( "--types", dest="type_names", nargs='*',
help="The KGTK data types for which fields should be imploded. (default=%(default)s).",
choices=KgtkFormat.DataType.choices(),
default=KgtkFormat.DataType.choices())
parser.add_argument( "--without", dest="without_fields", nargs='*',
help="The KGTK fields to do without. (default=%(default)s).",
choices=KgtkValueFields.OPTIONAL_DEFAULT_FIELD_NAMES,
default=None)
parser.add_argument("-o", "--output-file", dest="output_file_path", help="The KGTK file to write (default=%(default)s).", type=Path, default="-")
parser.add_argument( "--prefix", dest="prefix", help="The prefix for exploded column names. (default=%(default)s).", default="node2;kgtk:")
parser.add_argument( "--overwrite", dest="overwrite_column",
help="Indicate that it is OK to overwrite an existing imploded column. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=True)
parser.add_argument( "--validate", dest="validate",
help="Validate imploded values. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=True)
parser.add_argument( "--escape-pipes", dest="escape_pipes",
help="When true, pipe characters (|) need to be escaped (\\|) per KGTK file format. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
parser.add_argument( "--quantities-include-numbers", dest="quantities_include_numbers",
help="When true, numbers are acceptable quantities. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=True)
parser.add_argument( "--general-strings", dest="general_strings",
help="When true, strings may include language qualified strings. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=True)
parser.add_argument( "--remove-prefixed-columns", dest="remove_prefixed_columns",
help="When true, remove all columns beginning with the prefix from the output file. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
parser.add_argument( "--ignore-unselected-types", dest="ignore_unselected_types",
help="When true, input records with valid but unselected data types will be passed through to output. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=True)
parser.add_argument( "--retain-unselected-types", dest="retain_unselected_types",
help="When true, input records with valid but unselected data types will be retain existing data on output. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=True)
parser.add_argument( "--build-id", dest="build_id",
help="Build id values in an id column. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
parser.add_argument( "--reject-file", dest="reject_file_path", help="The KGTK file into which to write rejected records (default=%(default)s).",
type=Path, default=None)
parser.add_argument( "--quiet", dest="quiet",
help="When true, suppress certain complaints unless verbose. (default=%(default)s).",
type=optional_bool, nargs='?', const=True, default=False)
KgtkIdBuilderOptions.add_arguments(parser)
KgtkReader.add_debug_arguments(parser)
KgtkReaderOptions.add_arguments(parser, mode_options=True)
KgtkValueOptions.add_arguments(parser)
args: Namespace = parser.parse_args()
error_file: typing.TextIO = sys.stdout if args.errors_to_stdout else sys.stderr
# Build the option structures.
idbuilder_options: KgtkIdBuilderOptions = KgtkIdBuilderOptions.from_args(args)
reader_options: KgtkReaderOptions = KgtkReaderOptions.from_args(args)
value_options: KgtkValueOptions = KgtkValueOptions.from_args(args)
# Show the final option structures for debugging and documentation.
if args.show_options:
# TODO: show ifempty-specific options.
print("input: %s" % str(args.input_file_path), file=error_file, flush=True)
print("--column %s" % args.column_name, file=error_file, flush=True)
print("--prefix %s" % args.prefix, file=error_file, flush=True)
print("--overwrite %s" % str(args.overwrite_column), file=error_file, flush=True)
print("--validate %s" % str(args.validate), file=error_file, flush=True)
print("--escape-pipes %s" % str(args.escape_pipes), file=error_file, flush=True)
print("--quantities-include-numbers %s" % str(args.quantities_include_numbers), file=error_file, flush=True)
print("--general-strings %s" % str(args.general_strings), file=error_file, flush=True)
print("--remove-prefixed-columns %s" % str(args.remove_prefixed_columns), file=error_file, flush=True)
print("--ignore-unselected-types %s" % str(args.ignore_unselected_types), file=error_file, flush=True)
print("--retain-unselected-types %s" % str(args.retain_unselected_types), file=error_file, flush=True)
print("--quiets %s" % str(args.quiet), file=error_file, flush=True)
if args.type_names is not None:
print("--types %s" % " ".join(args.type_names), file=error_file, flush=True)
if args.without_fields is not None:
print("--without %s" % " ".join(args.without_fields), file=error_file, flush=True)
print("--output-file=%s" % str(args.output_file_path), file=error_file, flush=True)
if args.reject_file_path is not None:
print("--reject-file=%s" % str(args.reject_file_path), file=error_file, flush=True)
print("--build-id=%s" % str(args.build_id), file=error_file, flush=True)
idbuilder_options.show(out=error_file)
reader_options.show(out=error_file)
value_options.show(out=error_file)
without_fields: typing.List[str] = args.without_fields if args.without_fields is not None else list()
ex: KgtkImplode = KgtkImplode(
input_file_path=args.input_file_path,
column_name=args.column_name,
prefix=args.prefix,
type_names=args.type_names,
without_fields=without_fields,
overwrite_column=args.overwrite_column,
validate=args.validate,
escape_pipes=args.escape_pipes,
quantities_include_numbers=args.quantities_include_numbers,
general_strings=args.general_strings,
remove_prefixed_columns=args.remove_prefixed_columns,
ignore_unselected_types=args.ignore_unselected_types,
retain_unselected_types=args.retain_unselected_types,
output_file_path=args.output_file_path,
reject_file_path=args.reject_file_path,
quiet=args.quiet,
build_id=args.build_id,
idbuilder_options=idbuilder_options,
reader_options=reader_options,
value_options=value_options,
error_file=error_file,
verbose=args.verbose,
very_verbose=args.very_verbose)
ex.process()
if __name__ == "__main__":
main()
| 52.112961 | 168 | 0.583704 |
ace4a51d1be21805b4af05c59c094c1a71c1dc78 | 4,138 | py | Python | pyscale/zmq/socket.py | timgates42/pyscale | 22a03af18d314247c8fe7b5bf309fb641afcfc98 | [
"MIT"
] | 2 | 2015-11-05T20:38:35.000Z | 2017-03-09T04:29:58.000Z | pyscale/zmq/socket.py | timgates42/pyscale | 22a03af18d314247c8fe7b5bf309fb641afcfc98 | [
"MIT"
] | null | null | null | pyscale/zmq/socket.py | timgates42/pyscale | 22a03af18d314247c8fe7b5bf309fb641afcfc98 | [
"MIT"
] | 1 | 2021-12-24T21:04:26.000Z | 2021-12-24T21:04:26.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import re
import glob
import os.path as osp
from contextlib import contextmanager
from gevent_zeromq import zmq
from .common import patterns, format_method
from ..lib import ReqError
class ProxySocket(object):
reserved = ['_obj', '_parsed', '_key', '_value', '_attr', '_str']
def __init__(self, obj, parsed=[]):
self._obj = obj
self._parsed = []
self._str = None
def __getattr__(self, key):
self._key = key
self._attr = 'get'
return self._rpc()
def __setattr__(self, key, value):
if key in self.reserved:
return super(ProxySocket, self).__setattr__(key, value)
self._key = key
self._value = value
self._attr = 'set'
return self._rpc()
def __delattr__(self, key):
if key in self.reserved:
return super(ProxySocket, self).__delattr__(key)
self._key = key
self._attr = 'del'
return self._rpc()
def __call__(self, *args, **kwargs):
self._attr = 'call'
return self._rpc(*args, **kwargs)
def _rpc(self, *args, **kwargs):
# prepare request
if self._attr is 'call':
blob = ('__call__', args, kwargs)
elif self._attr is 'get':
blob = ('__getattribute__', [self._key], {})
elif self._attr is 'set':
blob = ('__set', [self._key, self._value], {})
elif self._attr is 'del':
blob = ('__del', [self._key], {})
elif self._attr is 'dir':
blob = ('__dir', [], {})
elif self._attr is 'len':
blob = ('__len', [], {})
else:
raise ValueError('Unknown value for attr: %s' % self.attr)
self._parsed.append(blob)
# make request
if self._obj._sock is not None:
reply = self._obj._send(self._parsed)
else:
with self._obj:
reply = self._obj._send(self._parsed)
# parse response
if 'error' in reply:
return ReqError(reply['error'])
elif 'proxy' in reply:
self._str = '(proxy: %s)' % reply['proxy']
return self
elif 'result' in reply:
return reply['result']
else:
raise ValueError('reply must be result, proxy or error')
return result
def __str__(self):
if self._str is None:
return super(ProxySocket, self).__str__()
return str(self._str)
def __repr__(self):
if self._str is None:
return super(ProxySocket, self).__repr__()
return str(self._str)
def __dir__(self):
self._attr = 'dir'
return self._rpc()
def __len__(self):
self._attr = 'len'
return self._rpc()
class Socket(object):
""" ZMQ client for all messaging patterns """
reserved = ['_name', '_type', '_pattern', '_subscription', '_context', '_sock_file', '_sock']
def __init__(self, name, _type='REQ', subscription='', context=None):
self._name = name
self._type = _type.upper()
self._pattern = patterns[self._type]
self._subscription = subscription
self._context = context or zmq.Context.instance()
self._sock_file = "ipc://tmp/sockets/%s/%s.sock" % (self._pattern, self._name)
self._sock = None
def _open(self):
if self._sock is not None:
return
self._sock = self._context.socket(getattr(zmq, self._type))
self._sock.connect(self._sock_file)
if self._pattern == 'pub':
self._sock.setsockopt(zmq.SUBSCRIBE, self._subscription)
return self
def _close(self):
if self._sock is not None:
self._sock.close()
self._sock = None
return self
def __enter__(self):
return self._open()
def __exit__(self, type, value, trace):
self._close()
def _send(self, blob):
self._sock.send_json(blob)
logging.debug("[zmq] ~> %s%s" % (self._name, ''.join([format_method(*req) for req in blob])))
return self._sock.recv_json()
# pass to proxy
def __getattr__(self, key):
return getattr(ProxySocket(self), key)
def __setattr__(self, key, value):
if key in self.reserved:
return super(Socket, self).__setattr__(key, value)
else:
return setattr(ProxySocket(self), key, value)
def __delattr__(self, key):
if key in self.reserved:
return super(Socket, self).__delattr__(key)
else:
return delattr(ProxySocket(self), key)
def __call__(self, *args, **kwargs):
return ProxySocket(self).__call__(*args, **kwargs)
def __dir__(self):
return dir(ProxySocket(self))
| 23.645714 | 95 | 0.667472 |
ace4a5384a5aed47036dbc1e682958029404d85e | 3,697 | bzl | Python | third_party/mlir/tblgen.bzl | crystina-z/tensorflow | 7ebc2afb9f55e752ed5d47c91e959f61e67ce3cf | [
"Apache-2.0"
] | null | null | null | third_party/mlir/tblgen.bzl | crystina-z/tensorflow | 7ebc2afb9f55e752ed5d47c91e959f61e67ce3cf | [
"Apache-2.0"
] | null | null | null | third_party/mlir/tblgen.bzl | crystina-z/tensorflow | 7ebc2afb9f55e752ed5d47c91e959f61e67ce3cf | [
"Apache-2.0"
] | 1 | 2021-05-13T02:54:49.000Z | 2021-05-13T02:54:49.000Z | """BUILD extensions for MLIR table generation."""
def gentbl(name, tblgen, td_file, tbl_outs, td_srcs = [], td_includes = [], td_relative_includes = [], strip_include_prefix = None, test = False):
"""gentbl() generates tabular code from a table definition file.
Args:
name: The name of the build rule for use in dependencies.
tblgen: The binary used to produce the output.
td_file: The primary table definitions file.
tbl_outs: A list of tuples (opts, out), where each opts is a string of
options passed to tblgen, and the out is the corresponding output file
produced.
td_srcs: A list of table definition files included transitively.
td_includes: A list of include paths for relative includes, provided as build targets.
td_relative_includes: A list of include paths for relative includes, provided as relative path.
strip_include_prefix: attribute to pass through to cc_library.
test: whether to create a test to invoke the tool too.
"""
srcs = []
srcs += td_srcs
if td_file not in td_srcs:
srcs += [td_file]
td_includes_cmd = [
"-I external/llvm-project/mlir/include -I external/org_tensorflow",
"-I $(GENDIR)/external/llvm-project/mlir/include",
]
for td_include in td_includes:
td_includes_cmd += [
"-I%s" % td_include,
"-I$(GENDIR)/%s" % td_include,
]
for td_include in td_relative_includes:
td_includes_cmd += [
"-I%s/%s" % (native.package_name(), td_include),
"-I$(GENDIR)/%s/%s" % (native.package_name(), td_include),
]
local_inc = "-I $$(dirname $(location %s))" % td_file
if test:
# Rule to generate shell script to invoke tblgen. This generates a very
# bare shell file which the sh_test uses.
native.genrule(
name = "%s_genrule_sh" % name,
srcs = srcs,
outs = ["%s.gen.sh" % name],
cmd = ("echo \"\\$$1\" %s \\$${@:2} -o /dev/null > $@" % local_inc),
executable = 1,
)
for (opts, out) in tbl_outs:
# All arguments to generate the output except output destination.
base_args = [
"$(location %s)" % tblgen,
"%s" % opts,
"$(location %s)" % td_file,
"-I$(GENDIR)",
] + td_includes_cmd
rule_suffix = "_".join(opts.replace("-", "_").replace("=", "_").split(" "))
# Rule to generate code using generated shell script.
native.genrule(
name = "%s_%s_genrule" % (name, rule_suffix),
srcs = srcs,
outs = [out],
tools = [tblgen],
message = "Generating code from table: %s" % td_file,
cmd = (" ".join(base_args) + " %s -o $@" % local_inc),
)
# Optionally generate rule to test tblgen invocation.
# Disable these on windows, because $(location ...) does not seem to
# work as expected on windows.
if test:
native.sh_test(
name = "%s_%s_genrule_test" % (name, rule_suffix),
srcs = ["%s.gen.sh" % name],
args = base_args,
data = srcs + [tblgen],
tags = ["no_windows"],
)
# List of opts that do not generate cc files.
skip_opts = ["-gen-op-doc"]
hdrs = [f for (opts, f) in tbl_outs if opts not in skip_opts]
native.cc_library(
name = name,
# include_prefix does not apply to textual_hdrs.
hdrs = hdrs if strip_include_prefix else [],
strip_include_prefix = strip_include_prefix,
textual_hdrs = hdrs,
)
| 39.329787 | 146 | 0.575872 |
ace4a6e8792f8979352a57229dce23127eefaea3 | 6,304 | py | Python | nova/tests/functional/regressions/test_bug_1781710.py | confi-surya/nova | adda77352cbe037f47c86bbd809c94fee269eaae | [
"Apache-2.0"
] | 1 | 2018-08-19T02:13:16.000Z | 2018-08-19T02:13:16.000Z | nova/tests/functional/regressions/test_bug_1781710.py | confi-surya/nova | adda77352cbe037f47c86bbd809c94fee269eaae | [
"Apache-2.0"
] | 2 | 2021-03-31T19:25:14.000Z | 2021-12-13T20:15:06.000Z | nova/tests/functional/regressions/test_bug_1781710.py | confi-surya/nova | adda77352cbe037f47c86bbd809c94fee269eaae | [
"Apache-2.0"
] | 1 | 2020-07-22T22:15:29.000Z | 2020-07-22T22:15:29.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler import filter_scheduler
from nova.scheduler import weights
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as image_fake
from nova.tests.unit import policy_fixture
from nova.virt import fake
class HostNameWeigher(weights.BaseHostWeigher):
def _weigh_object(self, host_state, weight_properties):
"""Prefer host1 over host2."""
weights = {'host1': 100, 'host2': 1}
return weights.get(host_state.host, 0)
class AntiAffinityMultiCreateRequest(test.TestCase,
integrated_helpers.InstanceHelperMixin):
"""Regression test for bug 1781710 introduced in Rocky.
The ServerGroupAntiAffinityFilter changed in Rocky to support the
"max_server_per_host" rule in the group's anti-affinity policy which
allows having more than one server from the same anti-affinity group
on the same host. As a result, the scheduler filter logic changed and
a regression was introduced because of how the FilterScheduler is tracking
which hosts are selected for each instance in a multi-create request.
This test uses a custom weigher to ensure that when creating two servers
in a single request that are in the same anti-affinity group with
the default "max_server_per_host" setting (1), the servers are split
across the two hosts even though normally one host would be weighed higher
than the other.
"""
def setUp(self):
super(AntiAffinityMultiCreateRequest, self).setUp()
self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(nova_fixtures.PlacementFixture())
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
# The admin API is used to get the server details to verify the
# host on which the server was built.
self.admin_api = api_fixture.admin_api
self.api = api_fixture.api
image_fake.stub_out_image_service(self)
self.addCleanup(image_fake.FakeImageService_reset)
self.start_service('conductor')
# Use the latest microversion available to make sure something does
# not regress in new microversions; cap as necessary.
self.admin_api.microversion = 'latest'
self.api.microversion = 'latest'
# Add our custom weigher.
self.flags(weight_classes=[__name__ + '.HostNameWeigher'],
group='filter_scheduler')
# disable late check on compute node to mimic devstack.
self.flags(disable_group_policy_check_upcall=True,
group='workarounds')
self.start_service('scheduler')
fake.set_nodes(['host1'])
self.addCleanup(fake.restore_nodes)
self.start_service('compute', host='host1')
fake.set_nodes(['host2'])
self.addCleanup(fake.restore_nodes)
self.start_service('compute', host='host2')
def test_anti_affinity_multi_create(self):
# Create the anti-affinity server group in which we'll create our
# two servers.
group = self.api.post_server_groups(
{'name': 'test group', 'policy': 'anti-affinity'})
# Stub out FilterScheduler._get_alternate_hosts so we can assert what
# is coming back for alternate hosts is what we'd expect after the
# initial hosts are selected for each instance.
original_get_alternate_hosts = (
filter_scheduler.FilterScheduler._get_alternate_hosts)
def stub_get_alternate_hosts(*a, **kw):
# Intercept the result so we can assert there are no alternates.
selections_to_return = original_get_alternate_hosts(*a, **kw)
# Since we only have two hosts and each host is selected for each
# server, and alternates should not include selected hosts, we
# should get back a list with two entries (one per server) and each
# entry should be a list of length 1 for the selected host per
# server with no alternates.
self.assertEqual(2, len(selections_to_return),
'There should be one host per server in the '
'anti-affinity group.')
hosts = set([])
for selection_list in selections_to_return:
self.assertEqual(1, len(selection_list), selection_list)
hosts.add(selection_list[0].service_host)
self.assertEqual(2, len(hosts), hosts)
return selections_to_return
self.stub_out('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_alternate_hosts', stub_get_alternate_hosts)
# Now create two servers in that group.
server_req = self._build_minimal_create_server_request(
self.api, 'test_anti_affinity_multi_create',
image_uuid=image_fake.AUTO_DISK_CONFIG_ENABLED_IMAGE_UUID,
networks='none')
server_req['min_count'] = 2
self.api.api_post(
'/servers', {'server': server_req,
'os:scheduler_hints': {'group': group['id']}})
selected_hosts = set([])
# Now wait for both servers to be ACTIVE and get the host on which
# each server was built.
for server in self.api.get_servers(detail=False):
server = self._wait_for_state_change(
self.admin_api, server, 'ACTIVE')
selected_hosts.add(server['OS-EXT-SRV-ATTR:host'])
# Assert that each server is on a separate host.
self.assertEqual(2, len(selected_hosts))
| 45.681159 | 79 | 0.67941 |
ace4a6efa3fc8167c973ad8c6530d1cdaba19599 | 11,234 | py | Python | Tests/test_set.py | btddg28/ironpython | 8006238c19d08db5db9bada39d765143e631059e | [
"Apache-2.0"
] | null | null | null | Tests/test_set.py | btddg28/ironpython | 8006238c19d08db5db9bada39d765143e631059e | [
"Apache-2.0"
] | null | null | null | Tests/test_set.py | btddg28/ironpython | 8006238c19d08db5db9bada39d765143e631059e | [
"Apache-2.0"
] | null | null | null | #####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
##
## Test built-in types: set/frozenset
##
from iptest.assert_util import *
from iptest.type_util import myset, myfrozenset
#--GLOBALS---------------------------------------------------------------------
s1 = [2, 4, 5]
s2 = [4, 7, 9, 10]
s3 = [2, 4, 5, 6]
#--TEST CASES------------------------------------------------------------------
def test_equality():
ne_list = [1]
for z in [s1, s2, s3, []]:
for x in (set, frozenset, myset, myfrozenset):
for y in (set, frozenset, myset, myfrozenset):
AreEqual(x(z), y(z))
AreEqual(list(x(z)), list(y(z)))
AreEqual([x(z)], [y(z)])
AreEqual(tuple(x(z)), tuple(y(z)))
AreEqual((x(z)), (y(z)))
Assert(x(z) != x(ne_list))
Assert(list(x(z)) != list(x(ne_list)))
Assert([x(z)] != [x(ne_list)])
Assert(tuple(x(z)) != tuple(x(ne_list)))
Assert((x(z)) != (x(ne_list)))
def test_sanity():
for x in (set, frozenset, myset, myfrozenset):
# creating as default
y = x()
AreEqual(len(y), 0)
# creating with 2 args
AssertError(TypeError, x, list(range(3)), 3)
#!!!AssertError(TypeError, x.__new__, str)
#!!!AssertError(TypeError, x.__new__, str, 'abc')
xs1, xs2, xs3 = x(s1), x(s2), x(s3)
# membership
AreEqual(4 in xs1, True)
AreEqual(6 in xs1, False)
# relation with another of the same type
AreEqual(xs1.issubset(xs2), False)
AreEqual(xs1.issubset(xs3), True)
AreEqual(xs3.issuperset(xs1), True)
AreEqual(xs3.issuperset(xs2), False)
# equivalent op
AreEqual(xs1 <= xs2, False)
AreEqual(xs1 <= xs3, True)
AreEqual(xs3 >= xs1, True)
AreEqual(xs3 >= xs2, False)
AreEqual(xs1.union(xs2), x([2, 4, 5, 7, 9, 10]))
AreEqual(xs1.intersection(xs2), x([4]))
AreEqual(xs1.difference(xs2), x([2, 5]))
AreEqual(xs2.difference(xs1), x([7, 9, 10]))
AreEqual(xs2.symmetric_difference(xs1), x([2, 5, 7, 9, 10]))
AreEqual(xs3.symmetric_difference(xs1), x([6]))
# equivalent op
AreEqual(xs1 | xs2, x([2, 4, 5, 7, 9, 10]))
AreEqual(xs1 & xs2, x([4]))
AreEqual(xs1 - xs2, x([2, 5]))
AreEqual(xs2 - xs1, x([7, 9, 10]))
AreEqual(xs2 ^ xs1, x([2, 5, 7, 9, 10]))
AreEqual(xs3 ^ xs1, x([6]))
# repeat with list
AreEqual(xs1.issubset(s2), False)
AreEqual(xs1.issubset(s3), True)
AreEqual(xs3.issuperset(s1), True)
AreEqual(xs3.issuperset(s2), False)
AreEqual(xs1.union(s2), x([2, 4, 5, 7, 9, 10]))
AreEqual(xs1.intersection(s2), x([4]))
AreEqual(xs1.difference(s2), x([2, 5]))
AreEqual(xs2.difference(s1), x([7, 9, 10]))
AreEqual(xs2.symmetric_difference(s1), x([2, 5, 7, 9, 10]))
AreEqual(xs3.symmetric_difference(s1), x([6]))
def test_ops():
s1, s2, s3 = 'abcd', 'be', 'bdefgh'
for t1 in (set, frozenset, myset, myfrozenset):
for t2 in (set, frozenset, myset, myfrozenset):
# set/frozenset creation
AreEqual(t1(t2(s1)), t1(s1))
# ops
for (op, exp1, exp2) in [('&', 'b', 'bd'), ('|', 'abcde', 'abcdefgh'), ('-', 'acd', 'ac'), ('^', 'acde', 'acefgh')]:
x1 = t1(s1)
exec("x1 %s= t2(s2)" % op)
AreEqual(x1, t1(exp1))
x1 = t1(s1)
exec("x1 %s= t2(s3)" % op)
AreEqual(x1, t1(exp2))
x1 = t1(s1)
exec("y = x1 %s t2(s2)" % op)
AreEqual(y, t1(exp1))
x1 = t1(s1)
exec("y = x1 %s t2(s3)" % op)
AreEqual(y, t1(exp2))
def test_none():
x, y = set([None, 'd']), set(['a', 'b', 'c', None])
AreEqual(x | y, set([None, 'a', 'c', 'b', 'd']))
AreEqual(y | x, set([None, 'a', 'c', 'b', 'd']))
AreEqual(x & y, set([None]))
AreEqual(y & x, set([None]))
AreEqual(x - y, set('d'))
AreEqual(y - x, set('abc'))
a = set()
a.add(None)
AreEqual(repr(a), 'set([None])')
def test_cmp():
"""Verify we can compare sets that aren't the same type"""
a = frozenset([1,2])
b = set([1,2])
abig = frozenset([1,2,3])
bbig = set([1,2,3])
AreEqual(cmp(a,b), 0)
AreEqual(cmp(a,bbig), -1)
AreEqual(cmp(abig,b), 1)
class sset(set): pass
class fset(frozenset): pass
a = fset([1,2])
b = sset([1,2])
abig = fset([1,2,3])
bbig = sset([1,2,3])
AreEqual(cmp(a,b), 0)
AreEqual(cmp(a,bbig), -1)
AreEqual(cmp(abig,b), 1)
def test_deque():
if is_cli or is_silverlight:
from _collections import deque
else:
from collections import deque
x = deque([2,3,4,5,6])
x.remove(2)
AreEqual(x, deque([3,4,5,6]))
x.remove(6)
AreEqual(x, deque([3,4,5]))
x.remove(4)
AreEqual(x, deque([3,5]))
# get a deque w/ head/tail backwards...
x = deque([1,2,3,4,5,6,7,8])
x.popleft()
x.popleft()
x.popleft()
x.popleft()
x.append(1)
x.append(2)
x.append(3)
x.append(4)
AreEqual(x, deque([5,6,7,8, 1, 2, 3, 4]))
x.remove(5)
AreEqual(x, deque([6,7,8, 1, 2, 3, 4]))
x.remove(4)
AreEqual(x, deque([6,7,8, 1, 2, 3]))
x.remove(8)
AreEqual(x, deque([6,7,1, 2, 3]))
x.remove(2)
AreEqual(x, deque([6,7,1, 3]))
class BadCmp:
def __eq__(self, other):
raise RuntimeError
d = deque([1,2, BadCmp()])
AssertError(RuntimeError, d.remove, 3)
x = deque()
class y(object):
def __eq__(self, other):
return True
x.append(y())
AreEqual(y() in x, True)
x = deque({}, None)
AreEqual(x, deque([]))
AssertErrorWithPartialMessage(TypeError, "takes at most 2 arguments (3 given)", deque, 'abc', 2, 2)
def test_singleton():
"""Verify that an empty frozenset is a singleton"""
AreEqual(frozenset([]) is frozenset([]), True)
x = frozenset([1, 2, 3])
AreEqual(x is frozenset(x), True)
@skip("silverlight") # no random
def test_iteration_no_mutation_bad_hash():
"""create a set w/ objects with a bad hash and enumerate through it. No exceptions should be thrown"""
import random
class c(object):
def __hash__(self):
return int(random.random()*200)
l = [c() for i in range(1000)]
b = set(l)
for x in b:
pass
def test_null_elements():
class SetSubclass(set):
pass
class FrozenSetSubclass(frozenset):
pass
for thetype in [set, frozenset, SetSubclass, FrozenSetSubclass]:
s = thetype([None])
AreEqual(s, set([None]))
AreEqual(s.copy(), set([None]))
AreEqual(s.isdisjoint(set()), True)
AreEqual(s.isdisjoint(set([None])), False)
AreEqual(s.isdisjoint(set([42])), True)
AreEqual(s.isdisjoint(set([None, 42])), False)
AreEqual(s.issubset(set()), False)
AreEqual(s.issubset(set([42])), False)
AreEqual(s.issubset(set([None])), True)
AreEqual(s.issubset(set([None, 42])), True)
AreEqual(s.issuperset(set()), True)
AreEqual(s.issuperset(set([42])), False)
AreEqual(s.issuperset(set([None])), True)
AreEqual(s.issuperset(set([None, 42])), False)
AreEqual(s.union(), set([None]))
AreEqual(s.union(set([None])), set([None]))
AreEqual(s.union(set()), set([None]))
AreEqual(s.intersection(), set([None]))
AreEqual(s.intersection(set([None])), set([None]))
AreEqual(s.intersection(set()), set())
AreEqual(s.difference(), set([None]))
AreEqual(s.difference(set([None])), set())
AreEqual(s.difference(set()), set([None]))
AreEqual(s.symmetric_difference(set([None])), set())
AreEqual(s.symmetric_difference(set()), set([None]))
# Test mutating operations
if 'add' in dir(s):
s.remove(None)
AreEqual(s, set())
s.add(None)
AreEqual(s, set([None]))
s.discard(None)
AreEqual(s, set())
s.discard(None) # make sure we don't raise exception
AssertError(KeyError, s.remove, None)
s.add(None)
s.clear()
AreEqual(s, set())
s.add(None)
AreEqual(s.pop(), None)
AreEqual(s, set())
s.update(set([None]))
AreEqual(s, set([None]))
s.intersection_update(set([42]))
AreEqual(s, set())
s.update(set([None, 42]))
s.difference_update(set([None]))
AreEqual(s, set([42]))
s.symmetric_difference_update(set([None, 42]))
AreEqual(s, set([None]))
def test_frozenness():
s = set([1,2,3])
f = frozenset(s)
s.add(4)
AreEqual(4 in f, False)
def test_set_comp():
AreEqual({locals()['x'] for x in (2,3,4)}, set([2, 3, 4]))
x = 100
{x for x in (2,3,4)}
AreEqual(x, 100)
class C:
{x for x in (2,3,4)}
AreEqual(hasattr(C, 'x'), False)
class C:
abc = {locals()['x'] for x in (2,3,4)}
AreEqual(C.abc, set([2,3,4]))
d = {}
exec(compile("abc = {locals()['x'] for x in (2,3,4)}", 'exec', 'exec'), d, d)
AreEqual(d['abc'], set([2,3,4]))
d = {'y':42}
exec(compile("abc = {y for x in (2,3,4)}", 'exec', 'exec'), d, d)
AreEqual(d['abc'], set([42]))
d = {'y':42, 't':(2,3,42)}
exec(compile("abc = {y for x in t if x == y}", 'exec', 'exec'), d, d)
AreEqual(d['abc'], set([42]))
t = (2,3,4)
v = 2
abc = {v for x in t}
AreEqual(abc, set([2]))
abc = {x for x in t if x == v}
AreEqual(abc, set([2]))
def f():
abc = {x for x in t if x == v}
AreEqual(abc, set([2]))
f()
def f():
abc = {v for x in t}
AreEqual(abc, set([2]))
class C:
abc = {v for x in t}
AreEqual(abc, set([2]))
class C:
abc = {x for x in t if x == v}
AreEqual(abc, set([2]))
#--MAIN------------------------------------------------------------------------
run_test(__name__)
| 30.362162 | 128 | 0.49733 |
ace4a6f6a3cb0dba8b652bc101c05a5dc2ff9cda | 4,042 | py | Python | google-cloud-sdk/lib/googlecloudsdk/api_lib/datapol/annotation.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/googlecloudsdk/api_lib/datapol/annotation.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/googlecloudsdk/api_lib/datapol/annotation.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 3 | 2017-07-27T18:44:13.000Z | 2020-07-25T17:48:53.000Z | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers to interact with the Annotation serivce via the Cloud Datapol API."""
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.datapol import utils
def _GetService():
"""Gets the data policy annotation service."""
return utils.GetClientInstance().taxonomyStores_dataTaxonomies_annotations
def Create(taxonomy_id,
annotation_name,
description,
parent_annotation=None,
child_annotations=None):
"""Makes an API call to create an annotation in the given taxonomy.
Args:
taxonomy_id: Id of a taxonomy.
annotation_name: Name of the annotation.
description: a short description to the annotation.
parent_annotation: Id of the parent annotation to this annotation.
child_annotations: Ids of child annotations of this annotaiton.
Returns:
An Annotation message.
"""
messages = utils.GetMessagesModule()
return _GetService().Create(
messages.DatapolTaxonomyStoresDataTaxonomiesAnnotationsCreateRequest(
parent=utils.GetTaxonomyRelativeName(taxonomy_id),
annotation=messages.Annotation(
displayName=annotation_name,
description=description,
parentAnnotation=parent_annotation,
childAnnotations=child_annotations if child_annotations else [])))
def Delete(taxonomy_id, annotation_id):
"""Makes an API call to delete an annotation.
Args:
taxonomy_id: Id of a taxonomy.
annotation_id: Id of the annotation.
Returns:
An Operation message which can be used to check on the progress of the
project creation.
"""
return _GetService().Delete(
utils.GetMessagesModule()
.DatapolTaxonomyStoresDataTaxonomiesAnnotationsDeleteRequest(
name=utils.GetAnnotationRelativeName(taxonomy_id, annotation_id)))
def Get(taxonomy_id, annotation_id):
"""Makes an API call to get the definition of an annotation.
Args:
taxonomy_id: Id of a taxonomy.
annotation_id: Id of the annotation.
Returns:
An Annotation message.
"""
return _GetService().Get(
utils.GetMessagesModule()
.DatapolTaxonomyStoresDataTaxonomiesAnnotationsGetRequest(
name=utils.GetAnnotationRelativeName(taxonomy_id, annotation_id)))
def List(taxonomy_id, limit=None):
"""Makes API calls to list annotations under the given taxonomy.
Args:
taxonomy_id: Id of a taxonomy.
limit: The number of taxonomies to limit the resutls to.
Returns:
Generator that yields taxonomies
"""
request = utils.GetMessagesModule(
).DatapolTaxonomyStoresDataTaxonomiesAnnotationsListRequest(
parent=utils.GetTaxonomyRelativeName(taxonomy_id))
return list_pager.YieldFromList(
_GetService(),
request,
limit=limit,
field='annotations',
batch_size_attribute='pageSize')
def Update(taxonomy_id, annotation_id, description):
"""Makes an API call to update an annotation.
Args:
taxonomy_id: Id of a taxonomy.
annotation_id: Id of the annotation.
description: New description to be updated.
Returns:
An Annotation message.
"""
messages = utils.GetMessagesModule()
return _GetService().Patch(
messages.DatapolTaxonomyStoresDataTaxonomiesAnnotationsPatchRequest(
name=utils.GetAnnotationRelativeName(taxonomy_id, annotation_id),
updateAnnotationRequest=messages.UpdateAnnotationRequest(
description=description)))
| 32.596774 | 80 | 0.735527 |
ace4a8fe564bec9b67d8b701cf61c1855f718f0d | 7,285 | py | Python | sac/tests/sac_test.py | sandipan1/robo_rl | 3bcb7caabeba71dd747fadf2355ac42408b7f340 | [
"MIT"
] | 5 | 2018-10-16T03:48:02.000Z | 2021-10-01T08:58:05.000Z | sac/tests/sac_test.py | sandipan1/robo_rl | 3bcb7caabeba71dd747fadf2355ac42408b7f340 | [
"MIT"
] | 1 | 2018-10-17T16:19:14.000Z | 2018-10-31T06:19:30.000Z | sac/tests/sac_test.py | sandipan1/robo_rl | 3bcb7caabeba71dd747fadf2355ac42408b7f340 | [
"MIT"
] | null | null | null | import os
import gym
import numpy as np
import torch
import torch.nn as nn
from robo_rl.common.utils import print_heading
from robo_rl.common.utils import soft_update
from robo_rl.sac import SAC
from robo_rl.sac import TanhSquasher
from tensorboardX import SummaryWriter
from torch.optim import Adam, SGD
env = gym.make("FetchReach-v1")
torch.set_default_tensor_type(torch.DoubleTensor)
# Set seeds everywhere
seed = 0
env.seed(seed)
torch.manual_seed(seed)
np.random.seed(seed)
action_dim = env.action_space.shape[0]
state_dim = env.observation_space.spaces["observation"].shape[0]
hidden_dim = [256, 256]
squasher = TanhSquasher()
logdir = "./tensorboard_log/"
os.makedirs(logdir, exist_ok=True)
writer = SummaryWriter(log_dir=logdir)
sac = SAC(state_dim=state_dim, action_dim=action_dim, writer=writer, hidden_dim=hidden_dim, squasher=squasher,
optimizer=SGD)
print_heading("Architecture of value network")
print(sac.value)
print_heading("Architecture of Q-value networks (critics)")
print(sac.critics)
print_heading("Architecture of policy")
print(sac.policy)
print_heading("Check initialisation of networks using random observation and action")
state = torch.Tensor(env.reset()["observation"])
action = sac.policy.get_action(state, squasher=squasher, evaluate=False)
state_action = torch.cat([state, action], 0)
print("Value ".ljust(20), sac.value(state))
print("Target Value ".ljust(20), sac.value_target(state))
print("Critic 1 : Q Value".ljust(20), sac.critics[0](state_action))
print("Critic 2 : Q Value".ljust(20), sac.critics[1](state_action))
print("Policy ".ljust(20), action)
state_batch = [state]
action_batch = [action]
reward_batch = []
next_state_batch = []
done_batch = []
num_steps = 2
for i in range(num_steps):
next_state, reward, done, info = env.step(action.detach().numpy())
next_state_batch.append(torch.Tensor(next_state["observation"]))
reward_batch.append(torch.Tensor([reward]))
# done will be False since just reset environment
done_batch.append(torch.Tensor([done]))
if i < num_steps - 1:
state_batch.append(next_state_batch[i])
action = sac.policy.get_action(next_state_batch[i], squasher=squasher, evaluate=False)
action_batch.append(action)
state_batch = torch.stack(state_batch).detach()
action_batch = torch.stack(action_batch).detach()
reward_batch = torch.stack(reward_batch).detach()
next_state_batch = torch.stack(next_state_batch).detach()
done_batch = torch.stack(done_batch).detach()
print_heading("Calculations for JQ")
q_hat_not_done = sac.scale_reward * reward_batch + \
sac.discount_factor * (1 - done_batch) * sac.value_target(next_state_batch)
q_hat_done = sac.scale_reward * reward_batch + \
sac.discount_factor * done_batch * sac.value_target(next_state_batch)
q_1 = sac.critics[0](torch.cat([state_batch, action_batch], 1))
q_2 = sac.critics[1](torch.cat([state_batch, action_batch], 1))
mse_loss = nn.MSELoss()
q1_loss = 0.5 * mse_loss(q_1, q_hat_not_done.detach())
q2_loss = 0.5 * mse_loss(q_2, q_hat_not_done.detach())
print("Reward".ljust(25), reward_batch[0], reward_batch[1])
print("Scale Factor".ljust(25), sac.scale_reward)
print("q_hat - not done".ljust(25), q_hat_not_done[0], q_hat_not_done[1])
print("q_hat - done".ljust(25), q_hat_done[0], q_hat_done[1])
print("q1 ".ljust(25), q_1[0], q_1[1])
print("q2 ".ljust(25), q_2[0], q_2[1])
print("q1 loss".ljust(25), q1_loss)
print("q2 loss".ljust(25), q2_loss)
print_heading("Update Q1 and Q2")
sac.critic1_optimizer.zero_grad()
q1_loss.backward()
sac.critic1_optimizer.step()
q_1 = sac.critics[0](torch.cat([state_batch, action_batch], 1))
q_2 = sac.critics[1](torch.cat([state_batch, action_batch], 1))
print("Q1 optimised, hence only Q1 should change")
print("q1 ".ljust(25), q_1[0], q_1[1])
print("q2 ".ljust(25), q_2[0], q_2[1])
sac.critic2_optimizer.zero_grad()
q2_loss.backward()
sac.critic2_optimizer.step()
q_1 = sac.critics[0](torch.cat([state_batch, action_batch], 1))
q_2 = sac.critics[1](torch.cat([state_batch, action_batch], 1))
print("Q2 optimised, hence only Q2 should change")
print("q1 ".ljust(25), q_1[0], q_1[1])
print("q2 ".ljust(25), q_2[0], q_2[1])
print_heading("Calculation of JV")
policy_action, log_prob = sac.policy.get_action(state_batch, squasher=sac.squasher, reparam=sac.reparam, evaluate=True)
q1_current_policy = sac.critics[0](torch.cat([state_batch, policy_action], 1))
q2_current_policy = sac.critics[1](torch.cat([state_batch, policy_action], 1))
min_q_value = torch.min(q1_current_policy, q2_current_policy)
v_target = min_q_value - log_prob
value = sac.value(state_batch)
value_loss = 0.5 * mse_loss(value, v_target.detach())
print("log prob".ljust(25), log_prob[0], log_prob[1])
print("q_1 current".ljust(25), q1_current_policy[0], q1_current_policy[1])
print("q_2 current".ljust(25), q2_current_policy[0], q2_current_policy[1])
print("min_q ".ljust(25), min_q_value[0], min_q_value[1])
print("v_target ".ljust(25), v_target[0], v_target[1])
print("value ".ljust(25), value[0], value[1])
print("value_loss".ljust(25), value_loss)
print_heading("Update V. Q1 and Q2 shouldn't change")
sac.value_optimizer.zero_grad()
value_loss.backward()
sac.value_optimizer.step()
value = sac.value(state_batch)
q1_current_policy = sac.critics[0](torch.cat([state_batch, policy_action], 1))
q2_current_policy = sac.critics[1](torch.cat([state_batch, policy_action], 1))
print("q_1 current".ljust(25), q1_current_policy[0], q1_current_policy[1])
print("q_2 current".ljust(25), q2_current_policy[0], q2_current_policy[1])
print("value ".ljust(25), value[0], value[1])
print_heading("Calculation of Jpi")
policy_loss = (log_prob - min_q_value).mean()
print("log_prob".ljust(25), log_prob[0], log_prob[1])
print("min_q ".ljust(25), min_q_value[0], min_q_value[1])
print("policy loss", policy_loss)
print_heading("Update policy. log prob should change. Q1 Q2 with buffer actions should not")
sac.policy_optimizer.zero_grad()
policy_loss.backward()
sac.policy_optimizer.step()
policy_action, log_prob = sac.policy.get_action(state_batch, squasher=sac.squasher, reparam=sac.reparam, evaluate=True)
q_1 = sac.critics[0](torch.cat([state_batch, action_batch], 1))
q_2 = sac.critics[1](torch.cat([state_batch, action_batch], 1))
q1_current_policy = sac.critics[0](torch.cat([state_batch, policy_action], 1))
q2_current_policy = sac.critics[1](torch.cat([state_batch, policy_action], 1))
min_q_value = torch.min(q1_current_policy, q2_current_policy)
policy_loss = (log_prob - min_q_value).mean()
print("q1 buffer".ljust(25), q_1[0], q_1[1])
print("q2 buffer".ljust(25), q_2[0], q_2[1])
print("q_1 current".ljust(25), q1_current_policy[0], q1_current_policy[1])
print("q_2 current".ljust(25), q2_current_policy[0], q2_current_policy[1])
print("min_q ".ljust(25), min_q_value[0], min_q_value[1])
print("log prob".ljust(25), log_prob[0], log_prob[1])
print("policy loss", policy_loss)
print_heading("Target value soft update")
target_value = sac.value_target(state_batch)
print("Target value before".ljust(25), target_value[0], target_value[1])
soft_update(original=sac.value, target=sac.value_target, t=sac.soft_update_tau)
target_value = sac.value_target(state_batch)
print("Target value after".ljust(25), target_value[0], target_value[1])
| 39.166667 | 119 | 0.750172 |
ace4a92155fca2afdffa8038167bc2302d01563b | 284 | py | Python | tests/utils/test_routes.py | nebulousdog/lazy-money-maker | 1c0a8d124b07a9b9ee3283d86c37bee8c765f47a | [
"MIT"
] | null | null | null | tests/utils/test_routes.py | nebulousdog/lazy-money-maker | 1c0a8d124b07a9b9ee3283d86c37bee8c765f47a | [
"MIT"
] | null | null | null | tests/utils/test_routes.py | nebulousdog/lazy-money-maker | 1c0a8d124b07a9b9ee3283d86c37bee8c765f47a | [
"MIT"
] | null | null | null | from marian.utils.routes import route_info
def test_route_info(app):
info = route_info(app)
assert list(info['headers']) == ['endpoint', 'methods', 'rule']
assert str(info['routes'][0]['endpoint']) == 'index'
assert info['routes'][0]['methods'] == 'GET,HEAD,OPTIONS'
| 35.5 | 67 | 0.65493 |
ace4ab503e1203cfcc5556416652a3d07f8b5e9d | 11,249 | py | Python | tutorials/plot_06-FOOOFGroup.py | anchandm/fooof | dcc93b14c4a6987ce7e394696af3221dd2a7bbd6 | [
"Apache-2.0"
] | 1 | 2019-03-26T16:30:43.000Z | 2019-03-26T16:30:43.000Z | tutorials/plot_06-FOOOFGroup.py | anchandm/fooof | dcc93b14c4a6987ce7e394696af3221dd2a7bbd6 | [
"Apache-2.0"
] | null | null | null | tutorials/plot_06-FOOOFGroup.py | anchandm/fooof | dcc93b14c4a6987ce7e394696af3221dd2a7bbd6 | [
"Apache-2.0"
] | null | null | null | """
06: FOOOFGroup
==============
Using FOOOFGroup to run FOOOF across multiple power spectra.
"""
###################################################################################################
# FOOOF imports: import FOOOFGroup object
from fooof import FOOOFGroup
# Import some utilities for synthesizing some test data
from fooof.synth.params import param_sampler
from fooof.synth.gen import gen_group_power_spectra
###################################################################################################
# Synthesizing Power Spectra
# --------------------------
#
# FOOOF includes some support for creating synthetic power-spectra, that mimic real data.
#
# Here we will use that functionality to create a matrix of power spectra to test with.
#
# Here we will use a helper function called :func:`param_sampler` that takes a
# list of possible parameters, and creates an object that randomly samples from
# them to generate power spectra.
#
# If you would like to generate single power spectra, you can use :func:`gen_power_spectrum`,
# also in `fooof.synth.gen`.
#
###################################################################################################
# Settings for synthesizing power spectra
n_spectra = 10
f_range = [3, 40]
# Set some options for background parameters
# Generated spectra will have an offset of either [20, 50, 35], and exponent of [2., 2.5, 1.5]
ap_opts = param_sampler([[20, 2], [50, 2.5], [35, 1.5]])
# Set some options for peak parameters
# Generated power spectra will have either no peaks, a 10 Hz peak, or a 10 Hz & 20 Hz peak
gauss_opts = param_sampler([[], [10, 0.5, 2], [10, 0.5, 2, 20, 0.3, 4]])
###################################################################################################
#
# We can now feed these settings into :func:`gen_group_power_spectra`,
# that will generate a group of power spectra for us.
#
# Note that this function also returns a list of the parameters
# used to generate each power spectrum.
#
###################################################################################################
# Generate the group of synthetic spectra
# Note that this function also returns a list of the parameters for each func
freqs, spectra, syn_params = gen_group_power_spectra(n_spectra, f_range, ap_opts, gauss_opts)
###################################################################################################
# FOOOFGroup
# ----------
#
# The FOOOFGroup object is very similar to the FOOOF object (programmatically, it inherits
# from the FOOOF object), and can be used in the same way.
#
# The main difference is that instead of running across a single power spectrum, it
# operates across 2D matrices containing multiple power spectra.
#
# Note that by 'group' we mean merely to refer to a group of power-spectra,
# not necessarily to a group in terms of multiple subjects or similar.
# Most likely, a FOOOFGroup will be run across a collection of spectra from across
# channels, and/or across trials, within or across subjects.
#
# The main difference with the FOOOFGroup object, is that it also contains a
# `power_spectra` attribute, which stores the matrix of power-spectra to be fit,
# and collects fit results into a `group_results` attribute.
#
# Otherwise, FOOOFGroup supports all the same functionality,
# accessed in the same way as the FOOOF object.
#
# Internally, it runs the exact same fitting procedure, per spectrum, as the FOOOF object.
#
###################################################################################################
# Initialize a FOOOFGroup object - it accepts all the same settings as FOOOF
fg = FOOOFGroup(peak_width_limits=[1, 8], min_peak_height=0.05, max_n_peaks=6)
###################################################################################################
# Fit a group of power spectra with the .fit() method
# The key difference (compared to FOOOF) is that it takes a 2D array of spectra
# This matrix should have the shape of [n_spectra, n_freqs]
fg.fit(freqs, spectra)
###################################################################################################
# Print out results
fg.print_results()
###################################################################################################
# Plot a summary of the results across the group
# Note: given the simulations, we expect exponents at {1.5, 2.0. 2.5} and peaks around {10, 20}
fg.plot()
###################################################################################################
#
# Just as with the FOOOF object, you can call the convenience method `report` to run
# the fitting, and print results & plots, printing out the same as above.
#
###################################################################################################
# You can also save out PDFs reports for FOOOFGroup fits, same as with FOOOF
fg.save_report()
###################################################################################################
# FOOOFGroup Data
# ---------------
#
# FOOOFGroup collects fits across power spectra into a list of FOOOFResults objects.
#
###################################################################################################
# As it runs, FOOOFGroup collects each fit results in 'group_results'
# `group_results` is a list of FOOOFResult objects
print(fg.group_results[0:2])
###################################################################################################
# get_all_data
# ------------
#
# To collect data across all model fits, and to select specific data results from this data
# you can use the :func:`get_all_data` method. This method lets you extract specific results
# by specifying a field, as a string, and (optionally) a specific column of that data, also
# as a string (or, optionally, as an integer index).
#
###################################################################################################
# Extract aperiodic data
aps = fg.get_all_data('aperiodic_params')
exps = fg.get_all_data('aperiodic_params', 'exponent')
# Extract peak data
peaks = fg.get_all_data('peak_params')
cfs = fg.get_all_data('peak_params', 'CF')
# Extract metadata about the model fit
errors = fg.get_all_data('error')
r2s = fg.get_all_data('r_squared')
###################################################################################################
# The full list of data you can specify is available in the documentation of :func:`get_all_data`
print(fg.get_all_data.__doc__)
###################################################################################################
#
# More information about the data you can extract is also documented in the FOOOFResults object
#
###################################################################################################
# Grab a particular FOOOFResults item
# Note that as a shortcut, you can index the FOOOFGroup object directly to access 'group_results'
f_res = fg[0]
# Check the documentation for the FOOOFResults - with full descriptions of the resulting data.
print(f_res.__doc__)
###################################################################################################
# Check out the extracted exponent values
# Note that this extraction will return an array of length equal to the number of model fits
# The model fit from which each data element originated is the index of this vector
print(exps)
###################################################################################################
# Check the fit center-frequencies
# Note when you extract peak data, an extra column is returned,
# specifying which model fit it came from
print(cfs)
###################################################################################################
# Saving & Loading with FOOOFGroup
# --------------------------------
#
# FOOOFGroup also support saving and loading, with same options as saving from FOOOF.
#
# The only difference in saving FOOOFGroup, is that it saves out a 'jsonlines' file,
# in which each line is a JSON object, saving the specified data and results for
# a single power spectrum.
#
# Note that saving settings together with results will save out duplicated settings
# to each line in the output file, corresponding to each individual spectrum in the group,
# and so is somewhat inefficient. It is more parsimonious to save out a single settings file,
# and a separate file that includes the results.
#
###################################################################################################
# Save out FOOOFGroup settings & results (separately)
fg.save('FG_settings', save_settings=True)
fg.save('FG_results', save_results=True)
###################################################################################################
# You can then reload this group data
nfg = FOOOFGroup()
nfg.load('FG_results')
###################################################################################################
# Print results to check that the loaded group
nfg.print_results()
###################################################################################################
# Parallel Support
# ----------------
#
# FOOOFGroup also has support for running in parallel, which can speed things up as
# each power spectrum is fit independently.
#
# The fit method includes an optional parameter 'n_jobs', which if set at 1 (as default),
# will run FOOOF linearly. If you set this parameter to some other integer, fitting will
# launch 'n_jobs' number of jobs, in parallel. Setting n_jobs to -1 will launch in
# parallel across all available cores.
#
# Note, however, that running FOOOF in parallel does not gaurantee a quicker runtime overall.
# The computation time per FOOOF-fit scales with the frequency range fit over, and the
# 'complexity' of the power spectra, in terms of number of peaks. For relatively small
# numbers of power spectra (less than ~100), across relatively small frequency ranges
# (say ~3-40Hz), running in parallel may offer no appreciable speed up.
#
###################################################################################################
# Run FOOOF fit across a group of power spectra in parallel, using all cores
fg.fit(freqs, spectra, n_jobs=-1)
###################################################################################################
# Extacting Individual Fits
# -------------------------
#
# When running FOOOF across a group of power spectra, results are stored as the FOOOFResults,
# which stores (only) the results of the model fit, not the full model fits themselves.
#
# To examine individual model fits, FOOOFGroup can regenerate FOOOF objects for individual
# power spectra, with the full model available for visualization.
#
###################################################################################################
# Extract a particular spectrum, specified by index to a FOOOF object
# Here we also specify to regenerate the the full model fit, from the results
fm = fg.get_fooof(ind=2, regenerate=True)
###################################################################################################
# Print results and plot extracted FOOOF model fit
fm.print_results()
fm.plot()
| 41.662963 | 99 | 0.54716 |
ace4abd2415b1cda48315457132de674b028c3ee | 1,350 | py | Python | setup.py | HausNet/heartbeat-client | 98ecb3c1a19e6779517cf8d632dd04fd73384728 | [
"MIT"
] | null | null | null | setup.py | HausNet/heartbeat-client | 98ecb3c1a19e6779517cf8d632dd04fd73384728 | [
"MIT"
] | null | null | null | setup.py | HausNet/heartbeat-client | 98ecb3c1a19e6779517cf8d632dd04fd73384728 | [
"MIT"
] | null | null | null | """ Create a package. Steps:
1. Update the version number in this file.
2. Create source distribution:
python setup.py sdist
3. Upload to test pypi (replace VERSION with the latest version number):
twine upload --repository-url https://test.pypi.org/legacy/ dist/hausnet-heartbeat-client-[VERSION].tar.gz
4. Test installing package:
pip install --index-url https://test.pypi.org/simple/ hausnet-heartbeat-client --user
5. Upload to pypi (replace VERSION with the latest version number):
twine upload dist/hausnet-heartbeat-client-[VERSION].tar.gz
"""
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="hausnet-heartbeat-client",
version="0.1.1",
author="HausNet Developers",
author_email="dev@hausnet.io",
description="A client for the Heartbeat monitoring service",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/HausNet/heartbeat-client",
packages=setuptools.find_packages(exclude=["tests"]),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.9',
install_requires=['bravado']
)
| 37.5 | 115 | 0.682222 |
ace4ad1638684f27490c6d4639c305d7e3166daf | 491 | py | Python | lintuasema-backend/application/api/classes/type/models.py | luomus/lintuasemasovellus | 966546795f5e6f0efd5b5d03c08577c788dba241 | [
"MIT"
] | null | null | null | lintuasema-backend/application/api/classes/type/models.py | luomus/lintuasemasovellus | 966546795f5e6f0efd5b5d03c08577c788dba241 | [
"MIT"
] | 32 | 2021-02-03T12:17:33.000Z | 2021-05-02T16:38:13.000Z | lintuasema-backend/application/api/classes/type/models.py | luomus/lintuasemasovellus | 966546795f5e6f0efd5b5d03c08577c788dba241 | [
"MIT"
] | 1 | 2021-04-18T17:26:03.000Z | 2021-04-18T17:26:03.000Z | from application.db import db
from application.api.models import Base
class Type(Base):
__base_tablename__ = 'type'
name = db.Column(db.String(144), nullable=False)
observatory_id = db.Column(db.Integer, db.ForeignKey(Base.the_prefix + 'observatory.id'), nullable=False)
Observationperiod = db.relationship("Observationperiod", backref="type", lazy=True)
def __init__ (self, name, observatory_id):
self.name=name
self.observatory_id=observatory_id | 35.071429 | 109 | 0.727088 |
ace4af14bdbfd2e4e9aa605250a6a30e1f9763a4 | 2,957 | py | Python | sparrow_cloud/message_service/sender_controller.py | jinlygenius/sparrow_cloud | 9cc8619aff48f7f439a63dddeb0ec15ca7fc2538 | [
"MIT"
] | null | null | null | sparrow_cloud/message_service/sender_controller.py | jinlygenius/sparrow_cloud | 9cc8619aff48f7f439a63dddeb0ec15ca7fc2538 | [
"MIT"
] | null | null | null | sparrow_cloud/message_service/sender_controller.py | jinlygenius/sparrow_cloud | 9cc8619aff48f7f439a63dddeb0ec15ca7fc2538 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import json
import requests
import os
from sparrow_cloud.restclient import requests_client
class TaskSender(object):
def __init__(self, message_backend_conf):
if not message_backend_conf:
raise Exception("message_backend_conf is not properly configured")
self._message_backend_conf = message_backend_conf
def base_send_task(self, exchange, routing_key, message_code, args=[], kwargs={}, delay=False, delay_time=0):
# {
# "code": "new_task",
# "args": [1,2,3],
# "kwargs": {"key": "value"},
# "exchange": "default",
# "routing_key": "default",
# "delivery_mode": "persistent",
# "delay": False,
# "delay_time": 1
# }
data = {
"code": message_code,
"exchange": exchange,
"args": args,
"kwargs": kwargs,
"routing_key": routing_key,
"delivery_mode": "persistent",
"delay": delay,
"delay_time": delay_time
}
parent_options = os.environ.get("SPARROW_TASK_PARENT_OPTIONS")
if parent_options:
# parent_options = parent_options.replace("'",'"')
try:
data['parent_options'] = json.loads(parent_options)
except:
pass
# os.environ.pop("SPARROW_TASK_PARENT_OPTIONS")
# import pdb; pdb.set_trace()
backend_service_conf = self._message_backend_conf.get('SERVICE_CONF', None)
api_path = self._message_backend_conf.get('API_PATH', None)
result = requests_client.post(backend_service_conf, api_path=api_path, json=data)
if result.status_code == 200:
try:
res = result.json()
task_id = res.get('task_id')
return task_id
except:
raise Exception(result.text)
else:
raise Exception(result.text)
def send_task(self, exchange, routing_key, message_code, delay=False, delay_time=0, *args, **kwargs):
# 发送任务
# import pdb; pdb.set_trace()
return self.base_send_task(
exchange=exchange,
routing_key=routing_key,
message_code=message_code,
args=args,
kwargs=kwargs,
delay=delay,
delay_time=delay_time
)
# def send_delayed_task(self, exchange, routing_key, message_code, delay, delay_time, *args, **kwargs):
# # 发送延时任务
# return self.base_send_task(
# exchange=exchange,
# routing_key=routing_key,
# message_code=message_code,
# args=args,
# kwargs=kwargs,
# delay=delay,
# delay_time=delay_time
# )
# if __name__ == "__main__":
# sender = TaskSender("1")
# sender.send_task("1",2,3, order_id=5, **{"test": "q"}) | 34.383721 | 113 | 0.559013 |
ace4af443423b94fb89a8b51f16a7b00c40c13fa | 32,467 | py | Python | dolfyn/plot/superaxes.py | aidanbharath/dolfyn | 7c8c62a780ae310b1ffdf04592fa77f400b04334 | [
"Apache-2.0"
] | 28 | 2016-03-07T16:31:34.000Z | 2022-03-29T03:28:36.000Z | dolfyn/plot/superaxes.py | aidanbharath/dolfyn | 7c8c62a780ae310b1ffdf04592fa77f400b04334 | [
"Apache-2.0"
] | 85 | 2015-09-04T15:51:26.000Z | 2022-03-29T20:45:08.000Z | dolfyn/plot/superaxes.py | aidanbharath/dolfyn | 7c8c62a780ae310b1ffdf04592fa77f400b04334 | [
"Apache-2.0"
] | 27 | 2016-04-02T04:02:10.000Z | 2022-03-26T02:45:06.000Z | import matplotlib as mpl
import numpy as np
import new
import matplotlib.pylab as pylab
transforms = mpl.transforms
Axes = mpl.axes.Axes
rcParams = mpl.rcParams
from . import basefuncs as bf
def axes(*args, **kwargs):
"""
Add an axes at position rect specified by:
- ``axes()`` by itself creates a default full ``subplot(111)`` window axis.
- ``axes(rect, axisbg='w')`` where *rect* = [left, bottom, width,
height] in normalized (0, 1) units. *axisbg* is the background
color for the axis, default white.
- ``axes(h)`` where *h* is an axes instance makes *h* the current
axis. An :class:`~matplotlib.axes.Axes` instance is returned.
======= ============ ================================================
kwarg Accepts Desctiption
======= ============ ================================================
axisbg color the axes background color
frameon [True|False] display the frame?
sharex otherax current axes shares xaxis attribute with otherax
sharey otherax current axes shares yaxis attribute with otherax
polar [True|False] use a polar axes?
======= ============ ================================================
Examples
--------
* :file:`examples/pylab_examples/axes_demo.py` places custom axes.
* :file:`examples/pylab_examples/shared_axis_demo.py` uses
*sharex* and *sharey*.
Notes
-----
This was copied from the pyplot axes function. Several methods
have been added to the axes.
"""
nargs = len(args)
if nargs == 0:
args = [[.1, .1, .8, .8]]
if nargs > 1:
raise TypeError('Only one non keyword arg to axes allowed')
arg = args[0]
axd = {}
newd = {}
newd['lw'] = rcParams['axes.linewidth']
try:
axd['axisbg'] = kwargs.pop('axisbg')
except:
pass
for nm in ['axisbg', 'frameon', 'sharex', 'sharey', 'polar', ]:
if nm in kwargs:
axd[nm] = kwargs.pop(nm)
if 'ticksize' in kwargs:
newd['xticksize'] = kwargs.get('ticksize')
newd['yticksize'] = kwargs.pop('ticksize')
for nm in [('lw', 'linewidth'), 'linewidth', 'xticksize',
'yticksize', ('fs', 'fontsize'), 'fontsize',
'xlocation', 'ylocation']:
if nm.__class__ is tuple:
ky = nm[0]
nm = nm[1]
else:
ky = nm
nm = nm
if ky in kwargs:
newd[nm] = kwargs.pop(ky)
if ('fig' not in kwargs) and ('figure' not in kwargs):
fig = pylab.gcf()
elif 'figure' in kwargs:
fig = kwargs.pop('figure')
else:
fig = kwargs.pop('fig')
if isinstance(arg, mpl.axes.Axes):
a = fig.sca(arg)
else:
rect = arg
a = fig.add_axes(rect, **axd)
a.set(**kwargs)
if 'xlocation' in newd:
a.xaxis.set_ticks_position(newd['xlocation'])
if newd['xlocation'] == 'top':
a.spines['bottom'].set_visible(False)
elif newd['xlocation'] == 'bottom':
a.spines['top'].set_visible(False)
if 'ylocation' in newd:
a.yaxis.set_ticks_position(newd['ylocation'])
if newd['ylocation'] == 'right':
a.spines['left'].set_visible(False)
elif newd['ylocation'] == 'left':
a.spines['right'].set_visible(False)
if 'lw' in newd:
for sp in a.spines:
a.spines[sp].set_linewidth(newd['lw'])
for tck in a.xaxis.get_ticklines():
tck.set_mew(newd['lw'])
for tck in a.yaxis.get_ticklines():
tck.set_mew(newd['lw'])
if 'xticksize' in newd:
for tck in a.xaxis.get_ticklines():
tck.set_ms(newd['xticksize'])
if 'yticksize' in newd:
for tck in a.yaxis.get_ticklines():
tck.set_ms(newd['yticksize'])
if 'fontsize' in newd:
for tklbl in a.xaxis.get_ticklabels():
tklbl.set_fontsize(newd['fontsize'])
for tklbl in a.yaxis.get_ticklabels():
tklbl.set_fontsize(newd['fontsize'])
a.transAxesXDataY = transforms.blended_transform_factory(
a.transAxes, a.transData)
a.transDataXAxesY = transforms.blended_transform_factory(
a.transData, a.transAxes)
a.setaxesframe = new.instancemethod(bf._setaxesframe, a, Axes)
a.annoteCorner = new.instancemethod(bf.annoteCorner, a, Axes)
a.offset_text = new.instancemethod(bf.offset_text, a, Axes)
a.cpcolor = new.instancemethod(bf.cpcolor, a, Axes)
a.cbar = new.instancemethod(bf.cbar, a, Axes)
a.labelax = new.instancemethod(bf.labelax, a, Axes)
a.skip_ticklabels = new.instancemethod(bf.skip_ticklabels, a, Axes)
a.errorshadex = new.instancemethod(bf.errorshadex, a, Axes)
# a.plot_specobj=new.instancemethod(plot_specobj,a,Axes)
pylab.draw_if_interactive()
return a
class disperse(dict):
"""
This dict subclass is for dispersing axgroup properties passed to
an axgroup.<some_method> across the individual calls to each
axes.<some_method>.
"""
pass
class dispersable(object):
"""
A descriptor class for defining dispersable objects.
"""
def __init__(self, name):
self.name = name
def __get__(self, instance, owner):
if instance is None:
return dispersable
return disperse([(ax, getattr(ax, self.name)) for ax in instance])
def __set__(self, instance):
raise AttributeError("Can't set attribute.")
class axgroup(object):
"""
The axgroup class provides a group interface to axes - level methods.
Many axes - level methods are defined here. These methods simply
perform the same operation on each axes in the group. These
methods are poorly documented here, refer to the documentation at
the axes level for details(unless otherwise specified methods
here simply pass arguments through to each call at the axes
level).
Parameters
----------
axes:
iterable
A list, tuple or np.ndarray of axes objects that will be
included in the group.
"""
transAxesXDataY = dispersable("transAxesXDataY")
transDataXAxesY = dispersable("transDataXAxesY")
transAxes = dispersable("transAxes")
transData = dispersable("transData")
transLimits = dispersable("transLimits")
def _disperse_kwargs(self, **kwargs):
out = dict(**kwargs)
for ax in self:
for ky, val in list(kwargs.items()):
if val.__class__ is disperse:
if len(val) != len(self):
raise Exception("The length of dispersable \
kwargs must match the length of the axgroup")
out[ky] = val[ax]
yield ax, out
def flatten(self,):
return axgroup(self.axes.flatten())
@property
def flat(self,):
return self.flatten()
def to_list(self,):
return list(self.flat)
def to_set(self,):
return set(self.flat)
def __iter__(self,):
for ax in self.axes.flatten():
yield ax
def __init__(self, axes):
if set not in axes.__class__.__mro__:
axes = np.array(axes)
self.axes = axes
alphNumAxes = bf.alphNumAxes
@property
def size(self,):
"""
The size of the axes array.
"""
return self.axes.size
@property
def shape(self,):
"""
The shape of the axes array.
"""
return self.axes.shape
@property
def ax(self,):
"""
A shortcut to 'self.axes'
"""
return self.axes
def __repr__(self,):
return '<axgroup: %s>' % self.axes.__repr__()
def __len__(self,):
return len(self.axes)
def __getitem__(self, val):
if hasattr(val, '__len__'):
for v in val:
if v.__class__ is slice:
return axgroup(self.axes[val])
elif val.__class__ is slice:
return axgroup(self.axes[val])
return self.axes[val]
def text(self, *args, **kwargs):
"""
Place text on all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.text(*args, **kwargs)
def annotate(self, *args, **kwargs):
"""
Annotate all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.annotate(*args, **kwargs)
def xgrid(self, b=None, **kwargs):
"""
Set the xgrid for all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.xaxis.grid(b, **kws)
def ygrid(self, b=None, **kwargs):
"""
Set the ygrid for all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.yaxis.grid(b, **kws)
def axhspan(self, *args, **kwargs):
"""
Add a horizontal span(rectangle) across the axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.axhspan(*args, **kws)
def axvspan(self, *args, **kwargs):
"""
Add a vertical span(rectangle) across the axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.axvspan(*args, **kws)
def axhline(self, y=0, *args, **kwargs):
"""
Add a horizontal line across the axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.axhline(y, *args, **kws)
def axvline(self, x=0, *args, **kwargs):
"""
Add a vertical line across the axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.vln(x, *args, **kws)
def fill_between(self, *args, **kwargs):
"""
Make filled polygons between two curves for all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.fill_between(*args, **kws)
def fill_betweenx(self, *args, **kwargs):
"""
Make filled polygons between two horizontal curves for all
axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.fill_betweenx(*args, **kws)
def set_xscale(self, val):
"""
Set the xscale {'linear', 'log', 'symlog'} for each axes in the group.
"""
for ax in self:
ax.set_xscale(val)
def set_yscale(self, val):
"""
Set the yscale {'linear', 'log', 'symlog'} for each axes in the group.
"""
for ax in self:
ax.set_yscale(val)
def set_xlim(self, *args, **kwargs):
"""
Set the xlimits for each axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.set_xlim(*args, **kws)
def set_ylim(self, *args, **kwargs):
"""
Set the ylimits for each axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.set_ylim(*args, **kws)
def set_xticks(self, *args, **kwargs):
"""
Set the xticks for each axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.set_xticks(*args, **kws)
def set_yticks(self, *args, **kwargs):
"""
Set the yticks for each axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.set_yticks(*args, **kws)
def set_title(self, lbls, *args, **kwargs):
"""
Set the ylabel for each axes in the group.
`lbls` can be a list of labels the same length as the axgroup,
or if it is a string(or length 1 list) it specifies a single
label that will be placed on each axis.
"""
if lbls.__class__ is str:
lbls = [lbls]
elif lbls.__class__ is not list:
lbls = list(lbls)
if len(lbls) == 1:
lbls = lbls * len(self)
for ax, lbl in zip(self, lbls):
ax.set_title(lbl, *args, **kwargs)
def set_ylabel(self, lbls, *args, **kwargs):
"""
Set the ylabel for each axes in the group.
`lbls` can be a list of labels the same length as the axgroup,
or if it is a string(or length 1 list) it specifies a single
label that will be placed on each axis.
"""
if lbls.__class__ is str:
lbls = [lbls]
elif lbls.__class__ is not list:
lbls = list(lbls)
if len(lbls) == 1:
lbls = lbls * len(self)
for ax, lbl in zip(self, lbls):
ax.set_ylabel(lbl, *args, **kwargs)
def set_xlabel(self, lbls, *args, **kwargs):
"""
Set the xlabel for each axes in the group.
`lbls` can be a list of labels the same length as the axgroup,
or if it is a string(or length 1 list) it specifies a single
label that will be placed on each axis.
"""
if lbls.__class__ is str:
lbls = [lbls]
elif lbls.__class__ is not list:
lbls = list(lbls)
if len(lbls) == 1:
lbls = lbls * len(self)
for ax, lbl in zip(self, lbls):
ax.set_xlabel(lbl, *args, **kwargs)
def plot(self, *args, **kwargs):
"""
Plot data on all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.plot(*args, **kwargs)
def loglog(self, *args, **kwargs):
"""
Loglog plot on all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.loglog(*args, **kwargs)
def semilogx(self, *args, **kwargs):
"""
Semilogx plot on all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.semilogx(*args, **kwargs)
def semilogy(self, *args, **kwargs):
"""
Semilogy plot on all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.semilogy(*args, **kwargs)
def offset_text(self, x, y, s, offset=(0, 0), *args, **kwargs):
"""
Place offset_text in all axes in the group.
"""
for ax, kws in self._disperse_kwargs(**kwargs):
ax.offset_text(x, y, s, offset=offset, *args, **kwargs)
def hide_xticklabels(self, exclude=None, hide=True):
"""
Hide the xticklabels of the axes in this group.
Parameters
----------
exclude : list of axes or an axes
These are excluded from hiding.
hide : bool
set hide=False to show these ticklabels.
"""
axs = self
if exclude is not None:
axs = list(axs.to_set() - set(exclude))
for ax in axs:
pylab.setp(ax.get_xticklabels(), visible=(not hide))
def hide_yticklabels(self, exclude=None, hide=True):
"""
Hide the yticklabels of the axes in this group.
Parameters
----------
exclude : list of axes or an axes
These are excluded from hiding.
hide : bool
set hide=False to show these ticklabels.
"""
axs = self
if exclude is not None:
axs = list(axs.to_set() - set(exclude))
for ax in axs:
pylab.setp(ax.get_yticklabels(), visible=(not hide))
def hide(self, objs='xticklabels', ax=None):
"""
Hide `objs` on all axes of this group * except* for those
specified in `ax`.
Parameters
----------
objs :
str {'xticklabels', 'yticklabels', 'minorxticks', 'minoryticks'}
or a list of these.
ax :
axes, optional (default: hide all)
The axes(or list of axes) on which these items should
not be hidden.
Examples
--------
Hide the xticklabels on all axes except ax0:
:
hide('xticklabels', self.ax0)
To hide all xticklabels, simply do:
hide('xticklabels')
See also
--------
antiset
"""
if objs.__class__ is str:
objs = [objs]
types = {'x': ['xticklabels', 'minorxticks'],
'y': ['yticklabels', 'minoryticks']}
for obj in objs:
if ax.__class__ is str and ax == 'all':
axs = self.flat
else:
if ax is None:
if obj in types['x'] and hasattr(self, '_xlabel_ax'):
ax = self._xlabel_ax
elif obj in types['y'] and hasattr(self, '_ylabel_ax'):
ax = self._ylabel_ax
else: # This gives default behavior?
ax = []
if not hasattr(ax, '__len__'):
ax = [ax]
axs = list(self.to_set() - set(ax))
for axn in axs:
if obj == 'xticklabels':
pylab.setp(axn.get_xticklabels(), visible=False)
elif obj == 'yticklabels':
pylab.setp(axn.get_yticklabels(), visible=False)
elif obj == 'minorxticks':
pylab.setp(axn.xaxis.get_minorticklines(), visible=False)
elif obj == 'minoryticks':
pylab.setp(axn.yaxis.get_minorticklines(), visible=False)
else:
error
def set(self, **kwargs):
"""
Set an attribute for each axes in the group.
"""
pylab.setp(self.ax.flatten(), **kwargs)
def antiset(self, ax, **kwargs):
# Some backwards compatability stuff:
if 'xticklabels' in kwargs and kwargs['xticklabels'] == '':
kwargs.pop('xticklabels')
self.hide('xticklabels', ax)
if 'yticklabels' in kwargs and kwargs['yticklabels'] == '':
kwargs.pop('yticklabels')
self.hide('yticklabels', ax)
if 'minorxticks' in kwargs and not kwargs['minorxticks']:
kwargs.pop('minorxticks')
self.hide('minorxticks', ax)
if 'minoryticks' in kwargs and not kwargs['minoryticks']:
kwargs.pop('minoryticks', ax)
self.hide('minoryticks', ax)
if len(kwargs) == 0:
return
# The meat:
if not hasattr(ax, '__len__'):
ax = [ax]
pylab.setp(list(set(self.ax.flatten()) - set(ax)), **kwargs)
class axSharer(object):
"""
A class for handling sharing of axes.
"""
def map_vals(self,):
return set(self.map.flatten())
def __init__(self, saxes, share_map=False):
self.saxes = saxes
self.map = np.zeros(saxes.n, dtype=np.uint16)
self.map[:] = share_map
self._share_ax = {}
def __getitem__(self, ind):
return self.map[ind]
def __setitem__(self, ind, val):
self.map[ind] = val
def __call__(self, iv, ih):
"""
Returns the 'prime' axes to be shared for the axes at
grid-point (iv, ih).
Parameters
----------
(iv,ih) :
The index of the axgrid for which you want the shareax.
Returns
-------
shareax :
:class:`axes`, or :class:`None`.
`None` if the axis does not share an axes, or one
has not yet been created that it matches.
"""
mapVal = self.map[iv, ih]
if not mapVal: # mapVal==0 do not share axes.
return
elif mapVal in self._share_ax:
# The mapVal is already in the _share_ax dictionary
return self._share_ax[mapVal]
else:
axs = self.saxes.axes[self.map == mapVal]
if np.any(axs):
# An axis for this mapVal has been created. Add it to
# the _share_ax dict.
self._share_ax[mapVal] = axs[np.nonzero(axs)][0]
return self._share_ax[mapVal]
else: # No axis exists yet for this mapVal.
return
class axSpacer(object):
"""
Defines the position and size of axes in either the horizontal or
vertical direction.
Parameters
----------
axsize :
array_like(n,float)
An array specifying the size of each axes in inches.
gap :
array_like(n+1,float)
An array specifying the spacing in inches between
axes. The first element is the distance from the
left /bottom of the figure to the first axes, the last
element is the distrance from the right /top of the figure
to the last axes.
vertical :
bool (default: False)
A flag specifying that this is a 'vertical' axSpacer
(flips ordering of axes positions so that the first
axes is at the top of the figure).
"""
def __init__(self, axsize=[1, 1], gap=[.7, .2, .2], vertical=False):
self.axsize = axsize
self.gap = gap
# self.units=units # Add support for units other than inches.
self.vertical = vertical
@property
def axsize_(self,):
"""
The figure -units axes sizes, array_like.
"""
return self.axsize / self.totsize
@axsize_.setter
def axsize_(self, val):
self.axsize = val * self.totsize
@property
def gap_(self,):
"""
The figure -units gap between axes, array_like.
"""
return self.gap / self.totsize
@gap_.setter
def gap_(self, val):
self.gap = val * self.totsize
@property
def pos_(self,):
"""
The figure -units position of the axes, array_like.
"""
return self.pos / self.totsize
@property
def n(self):
"""
The number of axes described by this axSpacer.
"""
return len(self.axsize)
def __len__(self,):
return self.n
@property
def axsize(self,):
"""
The axes size, in inches.
"""
return self.__axsize
@axsize.setter
def axsize(self, val):
self.__axsize = np.array(val)
@property
def gap(self):
"""
The gap between axes, in inches.
"""
return self.__gap
@gap.setter
def gap(self, val):
self.__gap = np.array(val)
def __iter__(self,):
for pos, wid in zip(self.pos_, self.axsize_):
yield pos, wid
@property
def pos(self):
if self.vertical:
return (np.cumsum(self.axsize + self.gap[:-1]) - self.axsize)[::-1]
else:
return np.cumsum(self.axsize + self.gap[:-1]) - self.axsize
@property
def totsize(self,):
return self.axsize.sum() + self.gap.sum()
@totsize.setter
def totsize(self, val):
self.__axsize *= val / self.totsize
self.__gap *= val / self.totsize
@property
def frame(self,):
"""
The bounding 'frame' around the axes, in inches.
"""
return self.gap[[0, -1]]
def axvec2axSpacer(n, vec, vertflag, rel=False):
"""
Returns an :
class:`axSpacer` corresponding to the `n` axes based
on the axes vector `vec`.
Parameters
----------
n :
int
The number of axes.
vec :
iterable(3)
The (left/bottom, right/top,gap) surrounding and between the
axes.
vertflag :
bool, optional (default: False)
Specifies this is for vertical(True) or horizontal spacing.
rel :
iterable(`n`), optional
This specifies the relative width of each of the axes. By
default all axes are the same width.
Returns
-------
axSpacer :
:class:`axSpacer`
The axes spacer object corresponding to the specified inputs.
Notes
-----
The units of the returned axSpacer match that of the input `vec`.
"""
if rel.__class__ is False.__class__ and not rel:
# Default value.
rel = np.ones(n)
wd = (((vec[1] - vec[0]) + vec[2]) / n - vec[2]) * rel / rel.mean()
gap = np.empty((len(wd) + 1), dtype=wd.dtype)
gap[0] = vec[0]
gap[1:-1] = vec[2]
gap[-1] = vec[1]
return axSpacer(wd, gap, vertflag)
class axPlacer(object):
"""
Axes placers contain the information on where axes objects should
be placed in a figure object.
Parameters
----------
vSpacer :
:class:`axSpacer`
The vertical axes spacer object.
hSpacer :
:class:`axSpacer`
The horizontal axes spacer object.
"""
def __init__(self, vSpacer, hSpacer):
if not vSpacer.vertical:
raise Exception("The vSpacer must have property `vertical`=True")
self.vSpacer = vSpacer
self.hSpacer = hSpacer
@property
def n(self,):
return self.vSpacer.n, self.hSpacer.n
def __call__(self, iv, ih):
return (self.hSpacer.pos_[ih],
self.vSpacer.pos_[iv],
self.hSpacer.axsize_[ih],
self.vSpacer.axsize_[iv])
@property
def figSize(self,):
"""
Width x Height in inches.
"""
return (self.hSpacer.totsize, self.vSpacer.totsize)
def __iter__(self,):
for iv in range(self.n[0]):
for ih in range(self.n[1]):
yield self(iv, ih)
@property
def axes_positions(self,):
"""
Returns a list of location tuples(left, bottom, width,
height) for axes objects.
"""
return list(self.__iter__())
def simpleAxSpacer(n, axsize, gap, frm=np.array([.5, .5]), vertical=False):
"""
calculates the width (or height) of a figure with *n * subplots.
Specify the width (height) of each subplot with *ax[0] *, the space
between subplots with *ax[1] *, and the left/right (bottom/top)
spacing with *frame[0] */*frame[1]*.
See also:
saxes, axes, calcAxesSize
"""
gap = np.ones(n + 1) * gap
gap[0] = frm[0]
gap[-1] = frm[1]
return axSpacer(np.ones(n) * axsize, gap, vertical=vertical)
class saxes(axgroup):
"""
Create an axes group object using S(uper)AXES.
Parameters
----------
Use keyword argument fig =<figure object> to specify the figure in
which to create the axes.
Notes
-----
n =(3,4) to set up a 3x4 array of axes.
n =(3,[1,1,1,.5]) to set up a 3x4 array of axes with the last
column half the width of the others.
n =([1,1,1.5],[1,1,1,.5]) to set up a 3x4 array of axes with the
last row 1.5 times as tall and the last column half as wide.
h =(.1,.9,.05) to create the horizontal frame box at .1 and .9, with
gaps of .05 between each axes.
v =(.1,.9,.05) similarly for the vertical frame/gap.
drawax =L, where L is a logical array of the axes you actually want to
draw(default is all of them).
sharex =True, chooses whether the axes share an xaxis.
sharey =True, chooses whether the axes share a yaxis.
"""
def __init__(self, axPlacer, **kwargs):
self.axes = np.empty(axPlacer.n, dtype='object')
self.linewidth = kwargs.pop('linewidth', rcParams['axes.linewidth'])
self.axPlacer = axPlacer
self.sharex = axSharer(self, kwargs.pop('sharex', False))
self.sharey = axSharer(self, kwargs.pop('sharey', False))
self.drawax = np.ones(axPlacer.n, dtype='bool')
for key in kwargs:
setattr(self, key, kwargs[key])
@property
def n(self,):
return self.axPlacer.n
def set_ylabel_pos(self, pos, axs=None,):
if axs is None:
axs = self.ax.flatten()
for ax in axs:
ax.yaxis.set_label_coords(pos, 0.5)
def xlabel(self, *args, **kwargs):
"""
This is different than 'set_xlabel' because it sets the xlabel
only for the 'self._xlabel_ax'.
"""
self._xlabel_ax.set_xlabel(*args, **kwargs)
def ylabel(self, *args, **kwargs):
"""
This is different than 'set_ylabel' because it sets the ylabel
only for the 'self._ylabel_ax'.
"""
self._ylabel_ax.set_ylabel(*args, **kwargs)
def _iter_axinds(self,):
for iv in range(self.n[0]):
for ih in range(self.n[1]):
yield iv, ih
def drawall(self, **kwargs):
if not self.n == self.drawax.shape:
self.drawax = np.ones(self.n, dtype='bool')
if 'lw' in kwargs:
kwargs['linewidth'] = kwargs.pop('lw', self.linewidth)
if 'linewidth' not in kwargs:
kwargs['linewidth'] = self.linewidth
else:
self.linewidth = kwargs['linewidth']
inter = pylab.isinteractive()
pylab.interactive(False)
# wait to draw the axes, until they've all been
# created.
for iv, ih in self._iter_axinds():
if self.drawax[iv, ih]:
self.ax[iv, ih] = axes(self.axPlacer(iv, ih),
sharex=self.sharex(iv, ih),
sharey=self.sharey(iv, ih),
**kwargs)
self.ax[iv, ih].hold(True)
self._xlabel_ax = self.ax[-1, 0]
self._ylabel_ax = self._xlabel_ax
pylab.interactive(inter)
pylab.draw_if_interactive()
return self.ax
class figobj(axgroup):
"""
A base class for axes -grid figures.
Parameters
----------
fignum :
int
Figure number
nax :
tuple(2 ints)
Shape of the axes grid.
saxparams :
dict
input arguments to saxes.
axsize :
tuple(2 floats)
specifies the size of the axes [vertical, horizontal] in inches.
frame :
iterable(4)
specifies the frame around the axes [bottom, top,left,right], in
inches (default: [.6, .3,1,.3]).
gap :
tuple(2 floats) or float
specifies the gap between axes [vertical, horizontal], in inches
(default: [.2, .2]).
hrel :
iterable
specifies the relative horizontal size of each axes.
vrel :
iterable
specifies the relative vertical size of each axes.
"""
nax = (1, 1)
def savefig(self, *args, **kwargs):
self.fig.savefig(*args, **kwargs)
#self.meta.write(args[0])
def initFig(self, fignum, **kwargs):
figkws = {}
figkws['figsize'] = kwargs.pop('figsize', self.saxes.axPlacer.figSize)
self.fig = pylab.figure(fignum, **figkws)
ff = np.array([0, .425]) # A fudge factor.
if figkws['figsize'] is not None and \
np.all(self.fig.get_size_inches() != figkws['figsize']):
self.fig.set_size_inches(figkws['figsize'] + ff, forward=True)
self.clf = self.fig.clf
self.clf()
if 'title' in kwargs:
self.fig.canvas.set_window_title(
'Fg%d: ' % (self.fig.number) + kwargs['title'])
def __init__(self, fignum=None, nax=[1, 1], axsize=[3, 3],
frame=[.6, .3, 1, .3], gap=[.4, .4],
sharex=False, sharey=False,
**kwargs):
gap = bf.pair(gap)
axsize = bf.pair(axsize)
vSpacer = simpleAxSpacer(nax[0],
axsize[0],
gap[0],
frm=frame[:2],
vertical=True)
hSpacer = simpleAxSpacer(nax[1],
axsize[1],
gap[1],
frm=frame[2:],
vertical=False)
placer = axPlacer(vSpacer, hSpacer)
self.saxes = saxes(placer, sharex=sharex, sharey=sharey,)
self.initFig(fignum, **kwargs)
self.saxes.drawall()
axgroup.__init__(self, self.saxes.axes)
def __enter__(self,):
return self
def __exit__(self, type, value, trace):
pass
| 29.923502 | 79 | 0.542982 |
ace4af654ec8fd6eb1d0b79ddc38c512fd7945e1 | 6,949 | py | Python | python/pynq/iop/tests/test_pmod_cable.py | AEW2015/PYNQ_PR_Overlay | 2c685d2d76d04e579beecdbdfd8d0919b3dfa71c | [
"BSD-3-Clause"
] | 16 | 2017-03-14T20:28:40.000Z | 2021-11-02T12:45:15.000Z | python/pynq/iop/tests/test_pmod_cable.py | xupsh/PYNQ_PR_Overlay | 2c685d2d76d04e579beecdbdfd8d0919b3dfa71c | [
"BSD-3-Clause"
] | 2 | 2017-12-04T05:46:35.000Z | 2018-11-30T21:40:45.000Z | python/pynq/iop/tests/test_pmod_cable.py | xupsh/PYNQ_PR_Overlay | 2c685d2d76d04e579beecdbdfd8d0919b3dfa71c | [
"BSD-3-Clause"
] | 8 | 2017-03-30T22:00:43.000Z | 2020-09-08T12:49:39.000Z | # Copyright (c) 2016, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = "Yun Rock Qu"
__copyright__ = "Copyright 2016, Xilinx"
__email__ = "pynq_support@xilinx.com"
from random import randint
from time import sleep
import pytest
from pynq import Overlay
from pynq.iop import Pmod_Cable
from pynq.tests.util import user_answer_yes
flag = user_answer_yes("\nTwo Pmod interfaces connected by a cable?")
if flag:
global TX_PORT,RX_PORT
TX_PORT = int(input("Type in the IOP ID of the sender (1 ~ 2): "))
RX_PORT = int(input("Type in the IOP ID of the receiver (1 ~ 2): "))
@pytest.mark.run(order=16)
@pytest.mark.skipif(not flag, reason="need Pmod cable connected to run")
def test_cable_type():
"""Tests for the Pmod cable type.
Note
----
The cable type can only be 'straight' or 'loopback'.
Default cable type is straight.
The Pmod IO layout is:
Upper row: {vdd,gnd,3,2,1,0}.
Lower row: {vdd,gnd,7,6,5,4}.
"""
print('\nTesting Pmod IO cable...')
assert not TX_PORT == RX_PORT, \
"The sender port cannot be the receiver port."
global tx,rx
tx = [Pmod_Cable(TX_PORT,k,'out','loopback') for k in range(8)]
rx = [Pmod_Cable(RX_PORT,k,'in','loopback') for k in range(8)]
tx[0].write(0)
tx[3].write(0)
tx[4].write(1)
tx[7].write(1)
if [rx[0].read(),rx[3].read(),rx[4].read(),rx[7].read()]==[0,0,1,1]:
# Using a loop-back cable
for i in range(8):
rx[i].set_cable('loopback')
elif [rx[0].read(),rx[3].read(),rx[4].read(),rx[7].read()]==[1,1,0,0]:
# Using a straight cable
for i in range(8):
rx[i].set_cable('straight')
else:
raise AssertionError("Cable unrecognizable.")
@pytest.mark.run(order=17)
@pytest.mark.skipif(not flag, reason="need Pmod cable connected to run")
def test_rshift1():
"""Test for right shifting the bit "1".
The sender will send patterns with the bit "1" right shifted each time.
"""
print('\nGenerating tests for right shifting a \"1\"...')
global tx,rx
for i in range(8):
if i==0:
data1 = [1,0,0,0,0,0,0,0]
else:
data1 = data1[-1:]+data1[:-1]
data2 = [0,0,0,0,0,0,0,0]
tx[i].write(data1[i])
sleep(0.001)
data2[i] = rx[i].read()
assert data1==data2,\
'Sent {} != received {} at Pin {}.'.format(data1,data2,i)
@pytest.mark.run(order=18)
@pytest.mark.skipif(not flag, reason="need Pmod cable connected to run")
def test_rshift0():
"""Test for right shifting the bit "0".
The sender will send patterns with the bit "0" right shifted each time.
"""
print('\nGenerating tests for right shifting a \"0\"...')
global tx,rx
for i in range(8):
if i==0:
data1 = [0,1,1,1,1,1,1,1]
else:
data1 = data1[-1:]+data1[:-1]
data2 = [1,1,1,1,1,1,1,1]
tx[i].write(data1[i])
sleep(0.001)
data2[i] = rx[i].read()
assert data1==data2,\
'Sent {} != received {} at Pin {}.'.format(data1,data2,i)
@pytest.mark.run(order=19)
@pytest.mark.skipif(not flag, reason="need Pmod cable connected to run")
def test_lshift1():
"""Test for left shifting the bit "1".
The sender will send patterns with the bit "1" left shifted each time.
"""
print('\nGenerating tests for left shifting a \"1\"...')
global tx,rx
for i in range(8):
if i==0:
data1 = [0,0,0,0,0,0,0,1]
else:
data1 = data1[1:]+data1[:1]
data2 = [0,0,0,0,0,0,0,0]
tx[7-i].write(data1[7-i])
sleep(0.001)
data2[7-i] = rx[7-i].read()
assert data1==data2,\
'Sent {} != received {} at Pin {}.'.format(data1,data2,7-i)
@pytest.mark.run(order=20)
@pytest.mark.skipif(not flag, reason="need Pmod cable connected to run")
def test_lshift0():
"""Test for left shifting the bit "0".
The sender will send patterns with the bit "0" left shifted each time.
"""
print('\nGenerating tests for left shifting a \"0\"...')
global tx,rx
for i in range(8):
if i==0:
data1 = [1,1,1,1,1,1,1,0]
else:
data1 = data1[1:]+data1[:1]
data2 = [1,1,1,1,1,1,1,1]
tx[7-i].write(data1[7-i])
sleep(0.001)
data2[7-i] = rx[7-i].read()
assert data1==data2,\
'Sent {} != received {} at Pin {}.'.format(data1,data2,7-i)
@pytest.mark.run(order=21)
@pytest.mark.skipif(not flag, reason="need Pmod cable connected to run")
def test_random():
"""Test for random patterns.
Testing software-generated pseudo-random numbers. Random 0/1's are
generated at each bit location. 8 bits (1 bit per pin) are sent out
in every iteration. This test may take a few seconds to finish.
"""
print('\nGenerating 100 random tests...')
global tx,rx
for i in range(100):
data1=[0,0,0,0,0,0,0,0]
data2=[1,1,1,1,1,1,1,1]
for j in range(8):
data1[j] = randint(0,1)
tx[j].write(data1[j])
sleep(0.001)
data2[j] = rx[j].read()
assert data1==data2,\
'Sent {} != received {} at Pin {}.'.format(data1,data2,j)
del tx,rx
| 34.40099 | 79 | 0.607857 |
ace4b01144b41d9ac404d086838e759cf279ac28 | 3,027 | py | Python | python/paddle/fluid/tests/unittests/ctr_dataset_reader.py | liym27/Paddle | 50582071dce846a973a054c40fe194069657960a | [
"Apache-2.0"
] | 3 | 2019-07-17T09:30:31.000Z | 2021-12-27T03:16:55.000Z | python/paddle/fluid/tests/unittests/ctr_dataset_reader.py | liym27/Paddle | 50582071dce846a973a054c40fe194069657960a | [
"Apache-2.0"
] | 1 | 2019-05-26T14:23:24.000Z | 2019-05-26T14:23:51.000Z | python/paddle/fluid/tests/unittests/ctr_dataset_reader.py | liym27/Paddle | 50582071dce846a973a054c40fe194069657960a | [
"Apache-2.0"
] | 4 | 2019-09-30T02:15:34.000Z | 2019-09-30T02:41:30.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import logging
import tarfile
import os
import paddle
import paddle.fluid.incubate.data_generator as data_generator
logging.basicConfig()
logger = logging.getLogger("paddle")
logger.setLevel(logging.INFO)
DATA_URL = "http://paddle-ctr-data.bj.bcebos.com/avazu_ctr_data.tgz"
DATA_MD5 = "c11df99fbd14e53cd4bfa6567344b26e"
"""
avazu_ctr_data/train.txt
avazu_ctr_data/infer.txt
avazu_ctr_data/test.txt
avazu_ctr_data/data.meta.txt
"""
def download_file():
file_name = "avazu_ctr_data"
path = paddle.dataset.common.download(DATA_URL, file_name, DATA_MD5)
dir_name = os.path.dirname(path)
text_file_dir_name = os.path.join(dir_name, file_name)
if not os.path.exists(text_file_dir_name):
tar = tarfile.open(path, "r:gz")
tar.extractall(dir_name)
return text_file_dir_name
def load_dnn_input_record(sent):
return list(map(int, sent.split()))
def load_lr_input_record(sent):
res = []
for _ in [x.split(':') for x in sent.split()]:
res.append(int(_[0]))
return res
class DatasetCtrReader(data_generator.MultiSlotDataGenerator):
def generate_sample(self, line):
def iter():
fs = line.strip().split('\t')
dnn_input = load_dnn_input_record(fs[0])
lr_input = load_lr_input_record(fs[1])
click = [int(fs[2])]
yield ("dnn_data", dnn_input), \
("lr_data", lr_input), \
("click", click)
return iter
def prepare_data():
"""
load data meta info from path, return (dnn_input_dim, lr_input_dim)
"""
file_dir_name = download_file()
meta_file_path = os.path.join(file_dir_name, 'data.meta.txt')
train_file_path = os.path.join(file_dir_name, 'train.txt')
with open(meta_file_path, "r") as f:
lines = f.readlines()
err_info = "wrong meta format"
assert len(lines) == 2, err_info
assert 'dnn_input_dim:' in lines[0] and 'lr_input_dim:' in lines[
1], err_info
res = map(int, [_.split(':')[1] for _ in lines])
res = list(res)
dnn_input_dim = res[0]
lr_input_dim = res[1]
logger.info('dnn input dim: %d' % dnn_input_dim)
logger.info('lr input dim: %d' % lr_input_dim)
return dnn_input_dim, lr_input_dim, train_file_path
if __name__ == "__main__":
pairwise_reader = DatasetCtrReader()
pairwise_reader.run_from_stdin()
| 29.970297 | 74 | 0.690453 |
ace4b05ff7255bdc3be99e69a40e3e14d93ec6a4 | 690 | py | Python | adoptions/models.py | ptyadana/django-WEB-wisdompets | 5e2f8505b44d30d00957c28c2cb23bdeb67a9263 | [
"MIT"
] | null | null | null | adoptions/models.py | ptyadana/django-WEB-wisdompets | 5e2f8505b44d30d00957c28c2cb23bdeb67a9263 | [
"MIT"
] | null | null | null | adoptions/models.py | ptyadana/django-WEB-wisdompets | 5e2f8505b44d30d00957c28c2cb23bdeb67a9263 | [
"MIT"
] | null | null | null | from django.db import models
class Pet(models.Model):
SEX_CHOICES = [('M','Male'), ('F', 'Female')]
name = models.CharField(max_length=100)
submitter = models.CharField(max_length=100)
species = models.CharField(max_length=30)
breed = models.CharField(max_length=100, blank=True)
description = models.TextField()
sex = models.CharField(choices=SEX_CHOICES, max_length=1, blank=True)
submission_date = models.DateTimeField()
age = models.IntegerField(null=True)
vaccinations = models.ManyToManyField('Vaccine', blank=True)
class Vaccine(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
| 32.857143 | 73 | 0.705797 |
ace4b1261c83636d1b03e6e0bad27562079b04e8 | 11,718 | py | Python | xmas.py | jkinville-test-org/Modifed-Osprey22-pi-light-sequencer | 33894eee3ce67a1971255519d6cce7f511ea6445 | [
"MIT"
] | 1 | 2018-02-01T17:17:09.000Z | 2018-02-01T17:17:09.000Z | xmas.py | jkinville-test-org/Modifed-Osprey22-pi-light-sequencer | 33894eee3ce67a1971255519d6cce7f511ea6445 | [
"MIT"
] | 1 | 2019-07-22T18:17:17.000Z | 2019-07-22T18:17:17.000Z | xmas.py | jkinville-test-org/Modifed-Osprey22-pi-light-sequencer | 33894eee3ce67a1971255519d6cce7f511ea6445 | [
"MIT"
] | 1 | 2019-07-22T18:13:19.000Z | 2019-07-22T18:13:19.000Z | #!/usr/bin/env python
#
# Command Line usage:
# xmas.py <input sequence> <audio file>
import RPi.GPIO as GPIO, time
import sys
import time
import pygame
import random
#This is the array that stores the SPI sequence
set = bytearray(25 * 3)
#blinks is used to handle the Star Blinking Effect
blinks = bytearray(25 * 3)
blink_active = int(-1)
blink_max = int(0)
blink_R1 = int(0)
blink_G1 = int(0)
blink_B1 = int(0)
blink_R2 = int(0)
blink_G2 = int(0)
blink_B2 = int(0)
# Defines the mapping of logical mapping to physical mapping
# 1 - 5 are lights from top to bottom on tree
# 6 = RED
# 7 = GREEN
# 8 = BLUE
logical_map = [0 for i in range(9)]
# Defines the mapping of the GPIO1-8 to the pin on the Pi
pin_map = [0,11,12,8,15,16,18,22,7]
# Defines an arbitrary X,Y position for each LED in the star
# which is used for some star effects
star = [-190, 262,
-90, 500,
45, 724,
123, 464,
217, 272,
442, 230,
676, 210,
509, 59,
340,-122,
355,-332,
409,-562,
209,-432,
6,-337,
-204,-459,
-378,-539,
-360,-349,
-336,-116,
-496, 70,
-701, 227,
-454, 241,
-184, 60,
-119,-143,
107,-160,
201, 60,
5, 194]
#####################################################################
def starinit(n):
for x in range(25):
set[x*3 ] = gamma[0]
set[x*3+1] = gamma[0]
set[x*3+2] = gamma[0]
spidev.write(set)
spidev.flush()
time.sleep(0.05)
#####################################################################
def star_vert(per,R1,G1,B1,R2,G2,B2):
for x in range(25):
if (float(star[x*2]) +701.0)/1377.0 > float(per)/100.0:
set[x*3 ] = gamma[int(R1)]
set[x*3+1] = gamma[int(G1)]
set[x*3+2] = gamma[int(B1)]
else:
set[x*3 ] = gamma[int(R2)]
set[x*3+1] = gamma[int(G2)]
set[x*3+2] = gamma[int(B2)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_solid(R,G,B):
for x in range(25):
set[x*3 ] = gamma[int(R)]
set[x*3+1] = gamma[int(G)]
set[x*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_tips(Rt,Gt,Bt,R,G,B):
for x in range(25):
set[x*3 ] = gamma[int(R)]
set[x*3+1] = gamma[int(G)]
set[x*3+2] = gamma[int(B)]
set[2*3 ] = gamma[int(Rt)]
set[2*3+1] = gamma[int(Gt)]
set[2*3+2] = gamma[int(Bt)]
set[6*3 ] = gamma[int(Rt)]
set[6*3+1] = gamma[int(Gt)]
set[6*3+2] = gamma[int(Bt)]
set[10*3 ] = gamma[int(Rt)]
set[10*3+1] = gamma[int(Gt)]
set[10*3+2] = gamma[int(Bt)]
set[14*3 ] = gamma[int(Rt)]
set[14*3+1] = gamma[int(Gt)]
set[14*3+2] = gamma[int(Bt)]
set[18*3 ] = gamma[int(Rt)]
set[18*3+1] = gamma[int(Gt)]
set[18*3+2] = gamma[int(Bt)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_point1(R,G,B):
set[0*3 ] = gamma[int(R)]
set[0*3+1] = gamma[int(G)]
set[0*3+2] = gamma[int(B)]
set[1*3 ] = gamma[int(R)]
set[1*3+1] = gamma[int(G)]
set[1*3+2] = gamma[int(B)]
set[2*3 ] = gamma[int(R)]
set[2*3+1] = gamma[int(G)]
set[2*3+2] = gamma[int(B)]
set[3*3 ] = gamma[int(R)]
set[3*3+1] = gamma[int(G)]
set[3*3+2] = gamma[int(B)]
set[4*3 ] = gamma[int(R)]
set[4*3+1] = gamma[int(G)]
set[4*3+2] = gamma[int(B)]
set[24*3 ] = gamma[int(R)]
set[24*3+1] = gamma[int(G)]
set[24*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_point2(R,G,B):
set[4*3 ] = gamma[int(R)]
set[4*3+1] = gamma[int(G)]
set[4*3+2] = gamma[int(B)]
set[5*3 ] = gamma[int(R)]
set[5*3+1] = gamma[int(G)]
set[5*3+2] = gamma[int(B)]
set[6*3 ] = gamma[int(R)]
set[6*3+1] = gamma[int(G)]
set[6*3+2] = gamma[int(B)]
set[7*3 ] = gamma[int(R)]
set[7*3+1] = gamma[int(G)]
set[7*3+2] = gamma[int(B)]
set[8*3 ] = gamma[int(R)]
set[8*3+1] = gamma[int(G)]
set[8*3+2] = gamma[int(B)]
set[23*3 ] = gamma[int(R)]
set[23*3+1] = gamma[int(G)]
set[23*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_point3(R,G,B):
set[8*3 ] = gamma[int(R)]
set[8*3+1] = gamma[int(G)]
set[8*3+2] = gamma[int(B)]
set[9*3 ] = gamma[int(R)]
set[9*3+1] = gamma[int(G)]
set[9*3+2] = gamma[int(B)]
set[10*3 ] = gamma[int(R)]
set[10*3+1] = gamma[int(G)]
set[10*3+2] = gamma[int(B)]
set[11*3 ] = gamma[int(R)]
set[11*3+1] = gamma[int(G)]
set[11*3+2] = gamma[int(B)]
set[12*3 ] = gamma[int(R)]
set[12*3+1] = gamma[int(G)]
set[12*3+2] = gamma[int(B)]
set[22*3 ] = gamma[int(R)]
set[22*3+1] = gamma[int(G)]
set[22*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_point4(R,G,B):
set[12*3 ] = gamma[int(R)]
set[12*3+1] = gamma[int(G)]
set[12*3+2] = gamma[int(B)]
set[13*3 ] = gamma[int(R)]
set[13*3+1] = gamma[int(G)]
set[13*3+2] = gamma[int(B)]
set[14*3 ] = gamma[int(R)]
set[14*3+1] = gamma[int(G)]
set[14*3+2] = gamma[int(B)]
set[15*3 ] = gamma[int(R)]
set[15*3+1] = gamma[int(G)]
set[15*3+2] = gamma[int(B)]
set[16*3 ] = gamma[int(R)]
set[16*3+1] = gamma[int(G)]
set[16*3+2] = gamma[int(B)]
set[21*3 ] = gamma[int(R)]
set[21*3+1] = gamma[int(G)]
set[21*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_point5(R,G,B):
set[0*3 ] = gamma[int(R)]
set[0*3+1] = gamma[int(G)]
set[0*3+2] = gamma[int(B)]
set[19*3 ] = gamma[int(R)]
set[19*3+1] = gamma[int(G)]
set[19*3+2] = gamma[int(B)]
set[18*3 ] = gamma[int(R)]
set[18*3+1] = gamma[int(G)]
set[18*3+2] = gamma[int(B)]
set[17*3 ] = gamma[int(R)]
set[17*3+1] = gamma[int(G)]
set[17*3+2] = gamma[int(B)]
set[16*3 ] = gamma[int(R)]
set[16*3+1] = gamma[int(G)]
set[16*3+2] = gamma[int(B)]
set[20*3 ] = gamma[int(R)]
set[20*3+1] = gamma[int(G)]
set[20*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
def star_inside_solid(R,G,B):
for x in range(5):
set[(x+20)*3 ] = gamma[int(R)]
set[(x+20)*3+1] = gamma[int(G)]
set[(x+20)*3+2] = gamma[int(B)]
spidev.write(set)
spidev.flush()
#####################################################################
#####################################################################
# Setup the board
GPIO.setmode(GPIO.BOARD)
for i in range(1,9):
GPIO.setup(pin_map[i], GPIO.OUT)
time.sleep(2.0);
dev = "/dev/spidev0.0"
spidev = file(dev,"wb")
# Calculate gamma correction
gamma = bytearray(256)
for i in range(256):
gamma[i] = int(pow(float(i) / 255.0, 2.5) * 255.0 + 0.5)
starinit(1)
# Open the setup config file and parse it to determine
# how GPIO1-8 are mapped to logical 1-8
with open("setup.txt",'r') as f:
data = f.readlines()
for i in range(8):
logical_map[i+1] = int(data[i])
# Open the input sequnce file and read/parse it
with open(sys.argv[1],'r') as f:
seq_data = f.readlines()
for i in range(len(seq_data)):
seq_data[i] = seq_data[i].rstrip()
# Current light states
lights = [False for i in range(8)]
# Load and play the music
pygame.mixer.init()
pygame.mixer.music.load(sys.argv[2])
pygame.mixer.music.play()
# Start sequencing
start_time = int(round(time.time()*1000))
step = 1 #ignore the header line
while True :
next_step = seq_data[step].split(",");
next_step[1] = next_step[1].rstrip()
cur_time = int(round(time.time()*1000)) - start_time
# time to run the command
if int(next_step[0]) <= cur_time:
print next_step
# if the command is Relay 1-8
if next_step[1] >= "1" and next_step[1] <= "8":
# change the pin state
if next_step[2] == "1":
GPIO.output(pin_map[logical_map[int(next_step[1])]],True)
else:
GPIO.output(pin_map[logical_map[int(next_step[1])]],False)
# Check for star commands
if next_step[1].rstrip() == "BLINK":
blink_active = 0
blink_max = int(next_step[2])
blink_R1 = int(next_step[3])
blink_G1 = int(next_step[4])
blink_B1 = int(next_step[5])
blink_R2 = int(next_step[6])
blink_G2 = int(next_step[7])
blink_B2 = int(next_step[8])
for i in range(25):
blinks[i*3] = 0
blinks[i*3+1] = 0
blinks[i*3+2] = 0
blink_next_time = int(round(time.time()*1000)) - start_time
if next_step[1].rstrip() == "BLINK_END":
blink_active = -1
if next_step[1].rstrip() == "STAR_VERT":
star_vert(next_step[2],next_step[3],next_step[4], next_step[5], next_step[6], next_step[7], next_step[8])
if next_step[1].rstrip() == "STAR_TIPS":
star_tips(next_step[2],next_step[3],next_step[4], next_step[5], next_step[6], next_step[7])
if next_step[1].rstrip() == "STAR_SOLID":
star_solid(next_step[2],next_step[3],next_step[4])
if next_step[1].rstrip() == "STAR_INSIDE_SOLID":
star_inside_solid(next_step[2],next_step[3],next_step[4])
if next_step[1].rstrip() == "STAR_POINT1":
star_point1(next_step[2],next_step[3],next_step[4])
if next_step[1].rstrip() == "STAR_POINT2":
star_point2(next_step[2],next_step[3],next_step[4])
if next_step[1].rstrip() == "STAR_POINT3":
star_point3(next_step[2],next_step[3],next_step[4])
if next_step[1].rstrip() == "STAR_POINT4":
star_point4(next_step[2],next_step[3],next_step[4])
if next_step[1].rstrip() == "STAR_POINT5":
star_point5(next_step[2],next_step[3],next_step[4])
# if the END command
if next_step[1].rstrip() == "END":
for i in range(1,9):
GPIO.output(pin_map[logical_map[i]],False)
break
step += 1
# ----------BLINKS---------------------------------
# The following is to handle the star blink command....
# if blinks are active and it's time
if blink_active > -1 and cur_time > blink_next_time:
blink_next_time = cur_time + 100
#increment active blinks
for i in range (25):
if blinks[i*3]>0 or blinks[i*3+1]>0 or blinks[i*3+2]>0:
blinks[i*3] += blink_R1
blinks[i*3+1] += blink_G1
blinks[i*3+2] += blink_B1
if blinks[i*3]==255 or blinks[i*3+1]==255 or blinks[i*3+2]==255:
blinks[i*3] = 0
blinks[i*3+1] = 0
blinks[i*3+2] = 0
blink_active -= 1
#try and get a new blink randomly
if blink_active < blink_max and random.randrange(1,5) == 1:
pick = random.randrange(0,24)
if blinks[pick*3] == 0 and blinks[pick*3+1]==0 and blinks[pick*3+2]==0:
blink_active += 1
blinks[pick*3] = blink_R1
blinks[pick*3+1] = blink_G1
blinks[pick*3+2] = blink_B1
#push out the serial
for i in range (25):
if blinks[i*3]==0 and blinks[i*3+1]==0 and blinks[i*3+2]==0:
set[i*3] = blink_R2
set[i*3+1] = blink_G2
set[i*3+2] = blink_B2
else:
set[i*3] = blinks[i*3]
set[i*3+1] = blinks[i*3+1]
set[i*3+2] = blinks[i*3+2]
spidev.write(set)
spidev.flush()
# ------END-BLINKS---------------------------------
| 26.753425 | 111 | 0.514166 |
ace4b1452582b1f147bf05384525e9600ef65eef | 545 | py | Python | profiles_api/permissions.py | IstrateMihai0209/profiles-rest-api | 807d3c40374047af8d84c0590d1a94d21fc1004b | [
"MIT"
] | 1 | 2022-03-11T10:06:14.000Z | 2022-03-11T10:06:14.000Z | profiles_api/permissions.py | IstrateMihai0209/profiles-rest-api | 807d3c40374047af8d84c0590d1a94d21fc1004b | [
"MIT"
] | null | null | null | profiles_api/permissions.py | IstrateMihai0209/profiles-rest-api | 807d3c40374047af8d84c0590d1a94d21fc1004b | [
"MIT"
] | null | null | null | from rest_framework import permissions
class UpdateOwnProfile(permissions.BasePermission):
#Allow user to update only their own profile
def has_object_permission(self, request, view, obj):
#Check if the user is trying to edit their own profile
# If the method is safe, like GET, the user is allowed to read the data
if request.method in permissions.SAFE_METHODS:
return True
# The user is allowed to use any HTTP method only if it's his own profile
return obj.id == request.user.id
| 38.928571 | 81 | 0.706422 |
ace4b16829ce99752c5113125bd79ac205368a56 | 12,263 | py | Python | samples/samples/backup_sample.py | thiagotnunes/python-spanner | 1343656ad43dbc41c119b652d8fe9360fa2b0e78 | [
"Apache-2.0"
] | null | null | null | samples/samples/backup_sample.py | thiagotnunes/python-spanner | 1343656ad43dbc41c119b652d8fe9360fa2b0e78 | [
"Apache-2.0"
] | null | null | null | samples/samples/backup_sample.py | thiagotnunes/python-spanner | 1343656ad43dbc41c119b652d8fe9360fa2b0e78 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This application demonstrates how to create and restore from backups
using Cloud Spanner.
For more information, see the README.rst under /spanner.
"""
import argparse
from datetime import datetime, timedelta
import time
from google.cloud import spanner
# [START spanner_create_backup]
def create_backup(instance_id, database_id, backup_id):
"""Creates a backup for a database."""
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)
# Create a backup
expire_time = datetime.utcnow() + timedelta(days=14)
version_time = database.earliest_version_time
backup = instance.backup(backup_id, database=database, expire_time=expire_time, version_time=version_time)
operation = backup.create()
# Wait for backup operation to complete.
operation.result(1200)
# Verify that the backup is ready.
backup.reload()
assert backup.is_ready() is True
# Get the name, create time and backup size.
backup.reload()
print(
"Backup {} of size {} bytes was created at {} for version of database at {}".format(
backup.name, backup.size_bytes, backup.create_time, backup.version_time
)
)
# [END spanner_create_backup]
# [START spanner_restore_backup]
def restore_database(instance_id, new_database_id, backup_id):
"""Restores a database from a backup."""
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
# Create a backup on database_id.
# Start restoring an existing backup to a new database.
backup = instance.backup(backup_id)
new_database = instance.database(new_database_id)
operation = new_database.restore(backup)
# Wait for restore operation to complete.
operation.result(1600)
# Newly created database has restore information.
new_database.reload()
restore_info = new_database.restore_info
print(
"Database {} restored to {} from backup {} with version time {}.".format(
restore_info.backup_info.source_database,
new_database_id,
restore_info.backup_info.backup,
restore_info.backup_info.version_time
)
)
# [END spanner_restore_backup]
# [START spanner_cancel_backup_create]
def cancel_backup(instance_id, database_id, backup_id):
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
database = instance.database(database_id)
expire_time = datetime.utcnow() + timedelta(days=30)
# Create a backup.
backup = instance.backup(backup_id, database=database, expire_time=expire_time)
operation = backup.create()
# Cancel backup creation.
operation.cancel()
# Cancel operations are best effort so either it will complete or
# be cancelled.
while not operation.done():
time.sleep(300) # 5 mins
# Deal with resource if the operation succeeded.
if backup.exists():
print("Backup was created before the cancel completed.")
backup.delete()
print("Backup deleted.")
else:
print("Backup creation was successfully cancelled.")
# [END spanner_cancel_backup_create]
# [START spanner_list_backup_operations]
def list_backup_operations(instance_id, database_id):
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
# List the CreateBackup operations.
filter_ = (
"(metadata.database:{}) AND "
"(metadata.@type:type.googleapis.com/"
"google.spanner.admin.database.v1.CreateBackupMetadata)"
).format(database_id)
operations = instance.list_backup_operations(filter_=filter_)
for op in operations:
metadata = op.metadata
print(
"Backup {} on database {}: {}% complete.".format(
metadata.name, metadata.database, metadata.progress.progress_percent
)
)
# [END spanner_list_backup_operations]
# [START spanner_list_database_operations]
def list_database_operations(instance_id):
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
# List the progress of restore.
filter_ = (
"(metadata.@type:type.googleapis.com/"
"google.spanner.admin.database.v1.OptimizeRestoredDatabaseMetadata)"
)
operations = instance.list_database_operations(filter_=filter_)
for op in operations:
print(
"Database {} restored from backup is {}% optimized.".format(
op.metadata.name, op.metadata.progress.progress_percent
)
)
# [END spanner_list_database_operations]
# [START spanner_list_backups]
def list_backups(instance_id, database_id, backup_id):
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
# List all backups.
print("All backups:")
for backup in instance.list_backups():
print(backup.name)
# List all backups that contain a name.
print('All backups with backup name containing "{}":'.format(backup_id))
for backup in instance.list_backups(filter_="name:{}".format(backup_id)):
print(backup.name)
# List all backups for a database that contains a name.
print('All backups with database name containing "{}":'.format(database_id))
for backup in instance.list_backups(filter_="database:{}".format(database_id)):
print(backup.name)
# List all backups that expire before a timestamp.
expire_time = datetime.utcnow().replace(microsecond=0) + timedelta(days=30)
print(
'All backups with expire_time before "{}-{}-{}T{}:{}:{}Z":'.format(
*expire_time.timetuple()
)
)
for backup in instance.list_backups(
filter_='expire_time < "{}-{}-{}T{}:{}:{}Z"'.format(*expire_time.timetuple())
):
print(backup.name)
# List all backups with a size greater than some bytes.
print("All backups with backup size more than 100 bytes:")
for backup in instance.list_backups(filter_="size_bytes > 100"):
print(backup.name)
# List backups that were created after a timestamp that are also ready.
create_time = datetime.utcnow().replace(microsecond=0) - timedelta(days=1)
print(
'All backups created after "{}-{}-{}T{}:{}:{}Z" and are READY:'.format(
*create_time.timetuple()
)
)
for backup in instance.list_backups(
filter_='create_time >= "{}-{}-{}T{}:{}:{}Z" AND state:READY'.format(
*create_time.timetuple()
)
):
print(backup.name)
print("All backups with pagination")
# If there are multiple pages, additional ``ListBackup``
# requests will be made as needed while iterating.
for backup in instance.list_backups(page_size=2):
print(backup.name)
# [END spanner_list_backups]
# [START spanner_delete_backup]
def delete_backup(instance_id, backup_id):
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
backup = instance.backup(backup_id)
backup.reload()
# Wait for databases that reference this backup to finish optimizing.
while backup.referencing_databases:
time.sleep(30)
backup.reload()
# Delete the backup.
backup.delete()
# Verify that the backup is deleted.
assert backup.exists() is False
print("Backup {} has been deleted.".format(backup.name))
# [END spanner_delete_backup]
# [START spanner_update_backup]
def update_backup(instance_id, backup_id):
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
backup = instance.backup(backup_id)
backup.reload()
# Expire time must be within 366 days of the create time of the backup.
old_expire_time = backup.expire_time
new_expire_time = old_expire_time + timedelta(days=30)
backup.update_expire_time(new_expire_time)
print(
"Backup {} expire time was updated from {} to {}.".format(
backup.name, old_expire_time, new_expire_time
)
)
# [END spanner_update_backup]
# [START spanner_create_database_with_version_retention_period]
def create_database_with_version_retention_period(instance_id, database_id, retention_period):
"""Creates a database with a version retention period."""
spanner_client = spanner.Client()
instance = spanner_client.instance(instance_id)
ddl_statements = [
"CREATE TABLE Singers ("
+ " SingerId INT64 NOT NULL,"
+ " FirstName STRING(1024),"
+ " LastName STRING(1024),"
+ " SingerInfo BYTES(MAX)"
+ ") PRIMARY KEY (SingerId)",
"CREATE TABLE Albums ("
+ " SingerId INT64 NOT NULL,"
+ " AlbumId INT64 NOT NULL,"
+ " AlbumTitle STRING(MAX)"
+ ") PRIMARY KEY (SingerId, AlbumId),"
+ " INTERLEAVE IN PARENT Singers ON DELETE CASCADE",
"ALTER DATABASE `{}`"
" SET OPTIONS (version_retention_period = '{}')".format(
database_id, retention_period
)
]
db = instance.database(database_id, ddl_statements)
operation = db.create()
operation.result(30)
db.reload()
print("Database {} created with version retention period {} and earliest version time {}".format(
db.database_id, db.version_retention_period, db.earliest_version_time
))
db.drop()
# [END spanner_create_database_with_version_retention_period]
if __name__ == "__main__": # noqa: C901
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument("instance_id", help="Your Cloud Spanner instance ID.")
parser.add_argument(
"--database-id", help="Your Cloud Spanner database ID.", default="example_db"
)
parser.add_argument(
"--backup-id", help="Your Cloud Spanner backup ID.", default="example_backup"
)
subparsers = parser.add_subparsers(dest="command")
subparsers.add_parser("create_backup", help=create_backup.__doc__)
subparsers.add_parser("cancel_backup", help=cancel_backup.__doc__)
subparsers.add_parser("update_backup", help=update_backup.__doc__)
subparsers.add_parser("restore_database", help=restore_database.__doc__)
subparsers.add_parser("list_backups", help=list_backups.__doc__)
subparsers.add_parser("list_backup_operations", help=list_backup_operations.__doc__)
subparsers.add_parser(
"list_database_operations", help=list_database_operations.__doc__
)
subparsers.add_parser("delete_backup", help=delete_backup.__doc__)
args = parser.parse_args()
if args.command == "create_backup":
create_backup(args.instance_id, args.database_id, args.backup_id)
elif args.command == "cancel_backup":
cancel_backup(args.instance_id, args.database_id, args.backup_id)
elif args.command == "update_backup":
update_backup(args.instance_id, args.backup_id)
elif args.command == "restore_database":
restore_database(args.instance_id, args.database_id, args.backup_id)
elif args.command == "list_backups":
list_backups(args.instance_id, args.database_id, args.backup_id)
elif args.command == "list_backup_operations":
list_backup_operations(args.instance_id, args.database_id)
elif args.command == "list_database_operations":
list_database_operations(args.instance_id)
elif args.command == "delete_backup":
delete_backup(args.instance_id, args.backup_id)
else:
print("Command {} did not match expected commands.".format(args.command))
| 34.35014 | 110 | 0.694284 |
ace4b2a72e0db0f0fab7a898395dad2ce9834f9e | 2,182 | py | Python | alsa_audio_piper.py | liquidx/alsa-audio-pipe | e2374198246b8a285b1315c3ad74305f573086b7 | [
"Apache-2.0"
] | 9 | 2017-07-21T08:17:59.000Z | 2022-02-16T16:44:23.000Z | alsa_audio_piper.py | liquidx/alsa-audio-pipe | e2374198246b8a285b1315c3ad74305f573086b7 | [
"Apache-2.0"
] | null | null | null | alsa_audio_piper.py | liquidx/alsa-audio-pipe | e2374198246b8a285b1315c3ad74305f573086b7 | [
"Apache-2.0"
] | 3 | 2020-03-23T21:43:52.000Z | 2021-04-13T13:06:22.000Z | #!/usr/bin/python
#
# Equivalent to:
#
# arecord -f S16_LE -r48000 -c2 -F0 --period-size=1024 -B0 --buffer-size=4096 \
# -D ${SOURCE_DEVICE} | aplay -D ${DESTINATION_DEVICE}
#
# But instead, this will run as a single executable that is not the same as
# aplay.
import alsaaudio
import argparse
import struct
def pipe(in_card, out_card, channels=2, rate=48000, periodsize=128, floor_noise=0):
format = alsaaudio.PCM_FORMAT_S16_LE
in_device = alsaaudio.PCM(alsaaudio.PCM_CAPTURE, alsaaudio.PCM_NORMAL, in_card)
in_device.setchannels(channels)
in_device.setrate(rate)
in_device.setformat(format)
in_device.setperiodsize(periodsize)
out_device = alsaaudio.PCM(alsaaudio.PCM_PLAYBACK, alsaaudio.PCM_NORMAL, out_card)
out_device.setchannels(channels)
out_device.setrate(rate)
out_device.setformat(format)
out_device.setperiodsize(periodsize)
try:
while True:
length, buf = in_device.read()
buffer_silent = floor_noise and is_silent(length, buf, floor_noise)
try:
if length > 0 and not buffer_silent:
out_device.write(buf)
except alsaaudio.ALSAAudioError:
print 'Possible failed to provide proper frame size: %d' % length
except KeyboardInterrupt:
pass
def is_silent(length, buf, floor_noise):
"""Returns True if the clip is nearly silent."""
samples = len(buf) / 2 # each sample is a short (16-bit)
values = struct.unpack('<%dh' % samples, buf)
for v in values:
if abs(v) > floor_noise:
return False
return True
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--input', '-i', help='Input card name')
parser.add_argument('--output', '-o', help='Output card name')
parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
parser.add_argument('--floor-noise', type=int, default=0,
help='Mute when samples are nearly silent')
args = parser.parse_args()
if args.verbose:
print 'Cards: '
for card in alsaaudio.cards():
print ' ', card
print 'PCMs: '
for pcm in alsaaudio.pcms():
print ' ', pcm
pipe(args.input, args.output, floor_noise=args.floor_noise)
| 31.623188 | 84 | 0.694775 |
ace4b34ce59e0ef46ae5d94198ecf5109be4f2bd | 23,813 | py | Python | venv/lib/python3.8/site-packages/django/contrib/admin/views/main.py | Joshua-Barawa/My-Photos | adcaea48149c6b31e9559b045709d538d0b749bc | [
"PostgreSQL",
"Unlicense"
] | 16 | 2019-08-10T12:24:06.000Z | 2020-05-21T09:11:14.000Z | venv/lib/python3.8/site-packages/django/contrib/admin/views/main.py | Joshua-Barawa/My-Photos | adcaea48149c6b31e9559b045709d538d0b749bc | [
"PostgreSQL",
"Unlicense"
] | 12 | 2019-08-10T11:55:29.000Z | 2020-05-21T04:46:30.000Z | venv/lib/python3.8/site-packages/django/contrib/admin/views/main.py | Joshua-Barawa/My-Photos | adcaea48149c6b31e9559b045709d538d0b749bc | [
"PostgreSQL",
"Unlicense"
] | 4 | 2022-03-12T10:17:00.000Z | 2022-03-26T08:40:43.000Z | from datetime import datetime, timedelta
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import FieldListFilter
from django.contrib.admin.exceptions import (
DisallowedModelAdminLookup,
DisallowedModelAdminToField,
)
from django.contrib.admin.options import (
IS_POPUP_VAR,
TO_FIELD_VAR,
IncorrectLookupParameters,
)
from django.contrib.admin.utils import (
get_fields_from_path,
lookup_spawns_duplicates,
prepare_lookup_value,
quote,
)
from django.core.exceptions import (
FieldDoesNotExist,
ImproperlyConfigured,
SuspiciousOperation,
)
from django.core.paginator import InvalidPage
from django.db.models import Exists, F, Field, ManyToOneRel, OrderBy, OuterRef
from django.db.models.expressions import Combinable
from django.urls import reverse
from django.utils.http import urlencode
from django.utils.timezone import make_aware
from django.utils.translation import gettext
# Changelist settings
ALL_VAR = "all"
ORDER_VAR = "o"
PAGE_VAR = "p"
SEARCH_VAR = "q"
ERROR_FLAG = "e"
IGNORED_PARAMS = (ALL_VAR, ORDER_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR)
class ChangeListSearchForm(forms.Form):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Populate "fields" dynamically because SEARCH_VAR is a variable:
self.fields = {
SEARCH_VAR: forms.CharField(required=False, strip=False),
}
class ChangeList:
search_form_class = ChangeListSearchForm
def __init__(
self,
request,
model,
list_display,
list_display_links,
list_filter,
date_hierarchy,
search_fields,
list_select_related,
list_per_page,
list_max_show_all,
list_editable,
model_admin,
sortable_by,
search_help_text,
):
self.model = model
self.opts = model._meta
self.lookup_opts = self.opts
self.root_queryset = model_admin.get_queryset(request)
self.list_display = list_display
self.list_display_links = list_display_links
self.list_filter = list_filter
self.has_filters = None
self.has_active_filters = None
self.clear_all_filters_qs = None
self.date_hierarchy = date_hierarchy
self.search_fields = search_fields
self.list_select_related = list_select_related
self.list_per_page = list_per_page
self.list_max_show_all = list_max_show_all
self.model_admin = model_admin
self.preserved_filters = model_admin.get_preserved_filters(request)
self.sortable_by = sortable_by
self.search_help_text = search_help_text
# Get search parameters from the query string.
_search_form = self.search_form_class(request.GET)
if not _search_form.is_valid():
for error in _search_form.errors.values():
messages.error(request, ", ".join(error))
self.query = _search_form.cleaned_data.get(SEARCH_VAR) or ""
try:
self.page_num = int(request.GET.get(PAGE_VAR, 1))
except ValueError:
self.page_num = 1
self.show_all = ALL_VAR in request.GET
self.is_popup = IS_POPUP_VAR in request.GET
to_field = request.GET.get(TO_FIELD_VAR)
if to_field and not model_admin.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField(
"The field %s cannot be referenced." % to_field
)
self.to_field = to_field
self.params = dict(request.GET.items())
if PAGE_VAR in self.params:
del self.params[PAGE_VAR]
if ERROR_FLAG in self.params:
del self.params[ERROR_FLAG]
if self.is_popup:
self.list_editable = ()
else:
self.list_editable = list_editable
self.queryset = self.get_queryset(request)
self.get_results(request)
if self.is_popup:
title = gettext("Select %s")
elif self.model_admin.has_change_permission(request):
title = gettext("Select %s to change")
else:
title = gettext("Select %s to view")
self.title = title % self.opts.verbose_name
self.pk_attname = self.lookup_opts.pk.attname
def __repr__(self):
return "<%s: model=%s model_admin=%s>" % (
self.__class__.__qualname__,
self.model.__qualname__,
self.model_admin.__class__.__qualname__,
)
def get_filters_params(self, params=None):
"""
Return all params except IGNORED_PARAMS.
"""
params = params or self.params
lookup_params = params.copy() # a dictionary of the query string
# Remove all the parameters that are globally and systematically
# ignored.
for ignored in IGNORED_PARAMS:
if ignored in lookup_params:
del lookup_params[ignored]
return lookup_params
def get_filters(self, request):
lookup_params = self.get_filters_params()
may_have_duplicates = False
has_active_filters = False
for key, value in lookup_params.items():
if not self.model_admin.lookup_allowed(key, value):
raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key)
filter_specs = []
for list_filter in self.list_filter:
lookup_params_count = len(lookup_params)
if callable(list_filter):
# This is simply a custom list filter class.
spec = list_filter(request, lookup_params, self.model, self.model_admin)
else:
field_path = None
if isinstance(list_filter, (tuple, list)):
# This is a custom FieldListFilter class for a given field.
field, field_list_filter_class = list_filter
else:
# This is simply a field name, so use the default
# FieldListFilter class that has been registered for the
# type of the given field.
field, field_list_filter_class = list_filter, FieldListFilter.create
if not isinstance(field, Field):
field_path = field
field = get_fields_from_path(self.model, field_path)[-1]
spec = field_list_filter_class(
field,
request,
lookup_params,
self.model,
self.model_admin,
field_path=field_path,
)
# field_list_filter_class removes any lookup_params it
# processes. If that happened, check if duplicates should be
# removed.
if lookup_params_count > len(lookup_params):
may_have_duplicates |= lookup_spawns_duplicates(
self.lookup_opts,
field_path,
)
if spec and spec.has_output():
filter_specs.append(spec)
if lookup_params_count > len(lookup_params):
has_active_filters = True
if self.date_hierarchy:
# Create bounded lookup parameters so that the query is more
# efficient.
year = lookup_params.pop("%s__year" % self.date_hierarchy, None)
if year is not None:
month = lookup_params.pop("%s__month" % self.date_hierarchy, None)
day = lookup_params.pop("%s__day" % self.date_hierarchy, None)
try:
from_date = datetime(
int(year),
int(month if month is not None else 1),
int(day if day is not None else 1),
)
except ValueError as e:
raise IncorrectLookupParameters(e) from e
if day:
to_date = from_date + timedelta(days=1)
elif month:
# In this branch, from_date will always be the first of a
# month, so advancing 32 days gives the next month.
to_date = (from_date + timedelta(days=32)).replace(day=1)
else:
to_date = from_date.replace(year=from_date.year + 1)
if settings.USE_TZ:
from_date = make_aware(from_date)
to_date = make_aware(to_date)
lookup_params.update(
{
"%s__gte" % self.date_hierarchy: from_date,
"%s__lt" % self.date_hierarchy: to_date,
}
)
# At this point, all the parameters used by the various ListFilters
# have been removed from lookup_params, which now only contains other
# parameters passed via the query string. We now loop through the
# remaining parameters both to ensure that all the parameters are valid
# fields and to determine if at least one of them spawns duplicates. If
# the lookup parameters aren't real fields, then bail out.
try:
for key, value in lookup_params.items():
lookup_params[key] = prepare_lookup_value(key, value)
may_have_duplicates |= lookup_spawns_duplicates(self.lookup_opts, key)
return (
filter_specs,
bool(filter_specs),
lookup_params,
may_have_duplicates,
has_active_filters,
)
except FieldDoesNotExist as e:
raise IncorrectLookupParameters(e) from e
def get_query_string(self, new_params=None, remove=None):
if new_params is None:
new_params = {}
if remove is None:
remove = []
p = self.params.copy()
for r in remove:
for k in list(p):
if k.startswith(r):
del p[k]
for k, v in new_params.items():
if v is None:
if k in p:
del p[k]
else:
p[k] = v
return "?%s" % urlencode(sorted(p.items()))
def get_results(self, request):
paginator = self.model_admin.get_paginator(
request, self.queryset, self.list_per_page
)
# Get the number of objects, with admin filters applied.
result_count = paginator.count
# Get the total number of objects, with no admin filters applied.
if self.model_admin.show_full_result_count:
full_result_count = self.root_queryset.count()
else:
full_result_count = None
can_show_all = result_count <= self.list_max_show_all
multi_page = result_count > self.list_per_page
# Get the list of objects to display on this page.
if (self.show_all and can_show_all) or not multi_page:
result_list = self.queryset._clone()
else:
try:
result_list = paginator.page(self.page_num).object_list
except InvalidPage:
raise IncorrectLookupParameters
self.result_count = result_count
self.show_full_result_count = self.model_admin.show_full_result_count
# Admin actions are shown if there is at least one entry
# or if entries are not counted because show_full_result_count is disabled
self.show_admin_actions = not self.show_full_result_count or bool(
full_result_count
)
self.full_result_count = full_result_count
self.result_list = result_list
self.can_show_all = can_show_all
self.multi_page = multi_page
self.paginator = paginator
def _get_default_ordering(self):
ordering = []
if self.model_admin.ordering:
ordering = self.model_admin.ordering
elif self.lookup_opts.ordering:
ordering = self.lookup_opts.ordering
return ordering
def get_ordering_field(self, field_name):
"""
Return the proper model field name corresponding to the given
field_name to use for ordering. field_name may either be the name of a
proper model field or the name of a method (on the admin or model) or a
callable with the 'admin_order_field' attribute. Return None if no
proper model field name can be matched.
"""
try:
field = self.lookup_opts.get_field(field_name)
return field.name
except FieldDoesNotExist:
# See whether field_name is a name of a non-field
# that allows sorting.
if callable(field_name):
attr = field_name
elif hasattr(self.model_admin, field_name):
attr = getattr(self.model_admin, field_name)
else:
attr = getattr(self.model, field_name)
if isinstance(attr, property) and hasattr(attr, "fget"):
attr = attr.fget
return getattr(attr, "admin_order_field", None)
def get_ordering(self, request, queryset):
"""
Return the list of ordering fields for the change list.
First check the get_ordering() method in model admin, then check
the object's default ordering. Then, any manually-specified ordering
from the query string overrides anything. Finally, a deterministic
order is guaranteed by calling _get_deterministic_ordering() with the
constructed ordering.
"""
params = self.params
ordering = list(
self.model_admin.get_ordering(request) or self._get_default_ordering()
)
if ORDER_VAR in params:
# Clear ordering and used params
ordering = []
order_params = params[ORDER_VAR].split(".")
for p in order_params:
try:
none, pfx, idx = p.rpartition("-")
field_name = self.list_display[int(idx)]
order_field = self.get_ordering_field(field_name)
if not order_field:
continue # No 'admin_order_field', skip it
if isinstance(order_field, OrderBy):
if pfx == "-":
order_field = order_field.copy()
order_field.reverse_ordering()
ordering.append(order_field)
elif hasattr(order_field, "resolve_expression"):
# order_field is an expression.
ordering.append(
order_field.desc() if pfx == "-" else order_field.asc()
)
# reverse order if order_field has already "-" as prefix
elif order_field.startswith("-") and pfx == "-":
ordering.append(order_field[1:])
else:
ordering.append(pfx + order_field)
except (IndexError, ValueError):
continue # Invalid ordering specified, skip it.
# Add the given query's ordering fields, if any.
ordering.extend(queryset.query.order_by)
return self._get_deterministic_ordering(ordering)
def _get_deterministic_ordering(self, ordering):
"""
Ensure a deterministic order across all database backends. Search for a
single field or unique together set of fields providing a total
ordering. If these are missing, augment the ordering with a descendant
primary key.
"""
ordering = list(ordering)
ordering_fields = set()
total_ordering_fields = {"pk"} | {
field.attname
for field in self.lookup_opts.fields
if field.unique and not field.null
}
for part in ordering:
# Search for single field providing a total ordering.
field_name = None
if isinstance(part, str):
field_name = part.lstrip("-")
elif isinstance(part, F):
field_name = part.name
elif isinstance(part, OrderBy) and isinstance(part.expression, F):
field_name = part.expression.name
if field_name:
# Normalize attname references by using get_field().
try:
field = self.lookup_opts.get_field(field_name)
except FieldDoesNotExist:
# Could be "?" for random ordering or a related field
# lookup. Skip this part of introspection for now.
continue
# Ordering by a related field name orders by the referenced
# model's ordering. Skip this part of introspection for now.
if field.remote_field and field_name == field.name:
continue
if field.attname in total_ordering_fields:
break
ordering_fields.add(field.attname)
else:
# No single total ordering field, try unique_together and total
# unique constraints.
constraint_field_names = (
*self.lookup_opts.unique_together,
*(
constraint.fields
for constraint in self.lookup_opts.total_unique_constraints
),
)
for field_names in constraint_field_names:
# Normalize attname references by using get_field().
fields = [
self.lookup_opts.get_field(field_name) for field_name in field_names
]
# Composite unique constraints containing a nullable column
# cannot ensure total ordering.
if any(field.null for field in fields):
continue
if ordering_fields.issuperset(field.attname for field in fields):
break
else:
# If no set of unique fields is present in the ordering, rely
# on the primary key to provide total ordering.
ordering.append("-pk")
return ordering
def get_ordering_field_columns(self):
"""
Return a dictionary of ordering field column numbers and asc/desc.
"""
# We must cope with more than one column having the same underlying sort
# field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = {}
if ORDER_VAR not in self.params:
# for ordering specified on ModelAdmin or model Meta, we don't know
# the right column numbers absolutely, because there might be more
# than one column associated with that ordering, so we guess.
for field in ordering:
if isinstance(field, (Combinable, OrderBy)):
if not isinstance(field, OrderBy):
field = field.asc()
if isinstance(field.expression, F):
order_type = "desc" if field.descending else "asc"
field = field.expression.name
else:
continue
elif field.startswith("-"):
field = field[1:]
order_type = "desc"
else:
order_type = "asc"
for index, attr in enumerate(self.list_display):
if self.get_ordering_field(attr) == field:
ordering_fields[index] = order_type
break
else:
for p in self.params[ORDER_VAR].split("."):
none, pfx, idx = p.rpartition("-")
try:
idx = int(idx)
except ValueError:
continue # skip it
ordering_fields[idx] = "desc" if pfx == "-" else "asc"
return ordering_fields
def get_queryset(self, request):
# First, we collect all the declared list filters.
(
self.filter_specs,
self.has_filters,
remaining_lookup_params,
filters_may_have_duplicates,
self.has_active_filters,
) = self.get_filters(request)
# Then, we let every list filter modify the queryset to its liking.
qs = self.root_queryset
for filter_spec in self.filter_specs:
new_qs = filter_spec.queryset(request, qs)
if new_qs is not None:
qs = new_qs
try:
# Finally, we apply the remaining lookup parameters from the query
# string (i.e. those that haven't already been processed by the
# filters).
qs = qs.filter(**remaining_lookup_params)
except (SuspiciousOperation, ImproperlyConfigured):
# Allow certain types of errors to be re-raised as-is so that the
# caller can treat them in a special way.
raise
except Exception as e:
# Every other error is caught with a naked except, because we don't
# have any other way of validating lookup parameters. They might be
# invalid if the keyword arguments are incorrect, or if the values
# are not in the correct type, so we might get FieldError,
# ValueError, ValidationError, or ?.
raise IncorrectLookupParameters(e)
# Apply search results
qs, search_may_have_duplicates = self.model_admin.get_search_results(
request,
qs,
self.query,
)
# Set query string for clearing all filters.
self.clear_all_filters_qs = self.get_query_string(
new_params=remaining_lookup_params,
remove=self.get_filters_params(),
)
# Remove duplicates from results, if necessary
if filters_may_have_duplicates | search_may_have_duplicates:
qs = qs.filter(pk=OuterRef("pk"))
qs = self.root_queryset.filter(Exists(qs))
# Set ordering.
ordering = self.get_ordering(request, qs)
qs = qs.order_by(*ordering)
if not qs.query.select_related:
qs = self.apply_select_related(qs)
return qs
def apply_select_related(self, qs):
if self.list_select_related is True:
return qs.select_related()
if self.list_select_related is False:
if self.has_related_field_in_list_display():
return qs.select_related()
if self.list_select_related:
return qs.select_related(*self.list_select_related)
return qs
def has_related_field_in_list_display(self):
for field_name in self.list_display:
try:
field = self.lookup_opts.get_field(field_name)
except FieldDoesNotExist:
pass
else:
if isinstance(field.remote_field, ManyToOneRel):
# <FK>_id field names don't require a join.
if field_name != field.get_attname():
return True
return False
def url_for_result(self, result):
pk = getattr(result, self.pk_attname)
return reverse(
"admin:%s_%s_change" % (self.opts.app_label, self.opts.model_name),
args=(quote(pk),),
current_app=self.model_admin.admin_site.name,
)
| 40.636519 | 88 | 0.583127 |
ace4b4c2119d8e437de19ce8f68875b1d7da976b | 1,136 | py | Python | rl_agents/agents/common/exploration/boltzmann.py | songanz/highway-env | ac21d1da25e224dbdbf8ba39509f4013bd029f52 | [
"MIT"
] | 1 | 2019-11-06T15:28:27.000Z | 2019-11-06T15:28:27.000Z | rl_agents/agents/common/exploration/boltzmann.py | songanz/highway-env | ac21d1da25e224dbdbf8ba39509f4013bd029f52 | [
"MIT"
] | null | null | null | rl_agents/agents/common/exploration/boltzmann.py | songanz/highway-env | ac21d1da25e224dbdbf8ba39509f4013bd029f52 | [
"MIT"
] | 1 | 2019-07-22T03:37:09.000Z | 2019-07-22T03:37:09.000Z | import numpy as np
from gym import spaces
from rl_agents.agents.common.exploration.abstract import DiscreteDistribution
class Boltzmann(DiscreteDistribution):
"""
Uniform distribution with probability epsilon, and optimal action with probability 1-epsilon
"""
def __init__(self, action_space, config=None):
super(Boltzmann, self).__init__(config)
self.action_space = action_space
if not isinstance(self.action_space, spaces.Discrete):
raise TypeError("The action space should be discrete")
self.values = None
self.seed()
@classmethod
def default_config(cls):
return dict(temperature=0.5)
def get_distribution(self):
actions = range(self.action_space.n)
if self.config['temperature'] > 0:
weights = np.exp(self.values / self.config['temperature'])
else:
weights = np.zeros((len(actions),))
weights[np.argmax(self.values)] = 1
return {action: weights[action] / np.sum(weights) for action in actions}
def update(self, values, time=False):
self.values = values
| 32.457143 | 100 | 0.661972 |
ace4b54ccfd5e1a224660f66e68db4771017e398 | 3,046 | py | Python | lambdo/resolve.py | wangchengrong/lambdo | 7de0e4bd61ffa6d03f23290c198f08a22c3fcf28 | [
"MIT"
] | 1 | 2021-02-24T09:06:32.000Z | 2021-02-24T09:06:32.000Z | lambdo/resolve.py | wangchengrong/lambdo | 7de0e4bd61ffa6d03f23290c198f08a22c3fcf28 | [
"MIT"
] | null | null | null | lambdo/resolve.py | wangchengrong/lambdo | 7de0e4bd61ffa6d03f23290c198f08a22c3fcf28 | [
"MIT"
] | null | null | null | __author__="Alexandr Savinov"
import os
import sys
import types
import inspect
import importlib
import importlib.util
import logging
log = logging.getLogger('RESOLVE')
def resolve_full_name(full_name: str):
# Example: 'mod1.mod2.mod3:class1.class2.func1.func2'
if not full_name: return None
mod_and_func = full_name.split(':', 1)
mod_name = mod_and_func[0] if len(mod_and_func) > 1 else None
func_name = mod_and_func[-1]
if mod_name:
mod = resolve_module(mod_name)
if mod is None: return None
func = resolve_name_in_mod(func_name, mod)
return func
# TODO: Module is not specified. Search in all modules
return None
def all_modules():
modules = []
return modules
def resolve_module(mod_name: str):
mod = sys.modules.get(mod_name)
if mod:
return mod
try:
mod = importlib.import_module(mod_name)
except Exception as e:
pass
return mod
def resolve_name_in_mod(func_name: str, mod):
# Split full name into segments (classes and functions)
name_path = func_name.split('.')
# Sequentially resolve each next segment in the result of the previous segment starting from the specified module
last_segment = mod
for i in range(len(name_path)):
name_segment = name_path[i]
ref_segment = None
try:
ref_segment = getattr(last_segment, name_segment)
"""
for key, val in mod.__dict__.items():
if not inspect.isclass(val): continue
members = inspect.getmembers(val, predicate=inspect.ismethod) # A list of all members of the class
for n, m in members:
if n == func_name: return m
"""
except AttributeError as e:
pass
if ref_segment is None:
return None
else:
last_segment = ref_segment
return last_segment
def import_modules(imports):
modules = []
for mod_name in imports:
mod = None
try:
mod = importlib.import_module(mod_name)
except ImportError as ie:
pass
if mod:
modules.append(mod)
continue # Module found and imported
# Try to import from source file
# See: https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
try:
file_path = mod_name.replace('.', '/')
file_path = file_path + '.py'
spec = importlib.util.spec_from_file_location(mod_name, file_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
except ImportError as ie:
pass
if mod:
modules.append(mod)
sys.modules[mod_name] = mod
continue
log.warning("Cannot import module '{0}'. Ignored. This can cause errors later if its functions are used in the workflow".format(mod_name))
return modules
if __name__ == "__main__":
pass
| 26.486957 | 146 | 0.619173 |
ace4b5ecd7e6ea3389e2f4ca035c232ff5c97e3a | 500 | py | Python | tools/utils/data_loader.py | lemonviv/Pivot | 585b39e54cea3450221521e452f2e89ad5ac990a | [
"Apache-2.0"
] | 4 | 2021-08-04T08:25:53.000Z | 2021-08-11T17:04:26.000Z | tools/utils/data_loader.py | lemonviv/Pivot | 585b39e54cea3450221521e452f2e89ad5ac990a | [
"Apache-2.0"
] | 3 | 2021-07-18T11:25:28.000Z | 2021-07-18T11:25:28.000Z | tools/utils/data_loader.py | lemonviv/Pivot | 585b39e54cea3450221521e452f2e89ad5ac990a | [
"Apache-2.0"
] | 1 | 2022-02-19T15:37:33.000Z | 2022-02-19T15:37:33.000Z | from numpy import genfromtxt
from sklearn.model_selection import train_test_split
def load_from_csv(data_path, test_perc=0.2, delimiter=','):
'''
assume label on the last feature dimension
:param test_perc: percentage of data used for validation
:return:
'''
data = genfromtxt(data_path, delimiter=delimiter)
X, y = data[:, :-1], data[:, -1]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_perc)
return X_train, y_train, X_test, y_test
| 35.714286 | 82 | 0.708 |
ace4b74bfd2238fbfea109cd688bd811d3161e19 | 1,197 | py | Python | setup.py | olxbr/kong-config-builder | 69dc8040eca449aba4557d0d89e96e2bfdbd4721 | [
"MIT"
] | 1 | 2020-06-16T03:10:42.000Z | 2020-06-16T03:10:42.000Z | setup.py | olxbr/kong-config-builder | 69dc8040eca449aba4557d0d89e96e2bfdbd4721 | [
"MIT"
] | 2 | 2020-06-19T18:52:29.000Z | 2020-08-03T19:48:03.000Z | setup.py | olxbr/kong-config-builder | 69dc8040eca449aba4557d0d89e96e2bfdbd4721 | [
"MIT"
] | 1 | 2021-04-09T20:51:56.000Z | 2021-04-09T20:51:56.000Z | from setuptools import setup, find_packages
libs = ["aws"]
extras = {"all": []}
with open("requirements.txt") as reqs:
requirements = reqs.read().split("\n")
for lib in libs:
with open(f"requirements_{lib}.txt") as reqs:
extras[lib] = reqs.read().split("\n")
extras["all"] = extras["all"] + extras[lib]
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="kong_config_builder",
version="DYNAMIC",
description="Kong declarative configuration builder",
long_description=long_description,
long_description_content_type="text/markdown",
author="Olx",
license='MIT',
include_package_data=True,
url='https://github.com/olxbr/kong-config-builder/',
download_url='https://github.com/olxbr/kong-config-builder/archive/master.zip',
install_requires=requirements,
extras_require=extras,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries :: Application Frameworks"
],
packages=find_packages()
)
| 31.5 | 83 | 0.668338 |
ace4b7ef10648c23dc7246016012d2c2300dbf70 | 440 | py | Python | 10/eip.py | SxNade/THM_Buffer-Overflow-Prep | f4818a446c5ede939492a04f53cdb7398dbc0b8e | [
"BSD-3-Clause"
] | null | null | null | 10/eip.py | SxNade/THM_Buffer-Overflow-Prep | f4818a446c5ede939492a04f53cdb7398dbc0b8e | [
"BSD-3-Clause"
] | null | null | null | 10/eip.py | SxNade/THM_Buffer-Overflow-Prep | f4818a446c5ede939492a04f53cdb7398dbc0b8e | [
"BSD-3-Clause"
] | null | null | null | import socket
import sys
import time
print("[+] Nani???? offset!!\n")
buff = "A" * 537
EIP = "B" * 4
fill = "C" * 159
payload = buff + EIP + fill
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect to the Application
s.connect(('192.168.1.117', 1337))
s.recv(1024) #Recv the banner
#Finally the vulnerable command
s.send('OVERFLOW10 ' + payload + '\r\n')
s.send('EXIT\r\n')
s.close()
print("[+] Execution Finished")
| 15.172414 | 53 | 0.65 |
ace4b801c37fb018e980d12c945ccafb90df85b1 | 1,638 | py | Python | scripts/python_scripts/subquery_cli.py | stepanLav/subquery-nova | 1745b3c8b9be814f19ce05aeb24e2e9cd256b36f | [
"Apache-2.0"
] | 3 | 2021-12-02T08:23:42.000Z | 2022-03-07T07:56:13.000Z | scripts/python_scripts/subquery_cli.py | stepanLav/subquery-nova | 1745b3c8b9be814f19ce05aeb24e2e9cd256b36f | [
"Apache-2.0"
] | 4 | 2022-01-19T05:07:02.000Z | 2022-03-01T13:52:00.000Z | scripts/python_scripts/subquery_cli.py | stepanLav/subquery-nova | 1745b3c8b9be814f19ce05aeb24e2e9cd256b36f | [
"Apache-2.0"
] | 3 | 2022-02-24T05:00:22.000Z | 2022-03-07T06:54:26.000Z | import subprocess
import wget
import os
import zipfile
import os
import platform
def get_subquery_cli(subquery_cli_version):
download_url = "https://github.com/fewensa/subquery-cli/releases/download/v" + subquery_cli_version
temporary_path = "./temporary"
current_platform = platform.system()
if current_platform == "Linux":
download_url += "/subquery-linux-x86_64.zip"
elif current_platform == "Darwin":
download_url += "/subquery-macos-x86_64.zip"
elif current_platform == "Windows":
download_url += "/subquery-windows-x86_64.zip"
else:
raise ValueError('Can\'t to recognize the operating system')
try:
os.makedirs(temporary_path, exist_ok=False)
wget.download(download_url, out = temporary_path)
for file in os.listdir(temporary_path):
with zipfile.ZipFile(temporary_path+'/'+file) as item:
item.extractall(temporary_path)
except:
pass
subprocess.call(['chmod', '-R', '777', temporary_path])
return temporary_path
def use_subquery_cli(subquery_cli_version, *args):
temporary_path = get_subquery_cli(subquery_cli_version)
data_from_subquery = subprocess.check_output([temporary_path+'/subquery', *args]).decode()
return data_from_subquery
if __name__ == "__main__":
# token = os.environ['SUBQUERY_TOKEN', '']
token=''
# project_key = os.environ['PROJECT_KEY', '']
project_key = ''
subquery_cli_version = '0.2.4'
use_subquery_cli(subquery_cli_version, '--token', token, 'deployment', 'list', '-o', 'json', '--org', 'nova-wallet', '--key', project_key) | 30.90566 | 142 | 0.684371 |
ace4b98277c9dd5b1ecb77aebef5bf959d6ae641 | 1,185 | py | Python | profiles_api/serializers.py | homabakhtiarian/profiles-rest-api | 0f05aae98cecd9f7ae8c78e794233133c6db1767 | [
"MIT"
] | null | null | null | profiles_api/serializers.py | homabakhtiarian/profiles-rest-api | 0f05aae98cecd9f7ae8c78e794233133c6db1767 | [
"MIT"
] | null | null | null | profiles_api/serializers.py | homabakhtiarian/profiles-rest-api | 0f05aae98cecd9f7ae8c78e794233133c6db1767 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from profiles_api import models
class HelloSerializer(serializers.Serializer):
"""Serializes a name field for testing our APIView"""
name = serializers.CharField(max_length=10)
class UserProfileSerializer(serializers.ModelSerializer):
"""Serializes a user profile object"""
class Meta:
model = models.UserProfile
fields = ('id', 'email', 'name', 'password')
extra_kwargs = {
'password': {
'write_only': True,
'style': {'input_type': 'password'}
}
}
def create(self, validated_data):
"""Create and return a new user"""
user = models.UserProfile.objects.create_user(
email=validated_data['email'],
name=validated_data['name'],
password=validated_data['password']
)
return user
class ProfileFeedItemSerializer(serializers.ModelSerializer):
"""Serializes profile feed item"""
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {'user_profile': {'read_only': True}}
| 29.625 | 68 | 0.62616 |
ace4b9a869872820db09f275d3116f70b0e364a4 | 835 | py | Python | example/server/views.py | mikebryant/opentracing-python-django-jaeger-example | fe62b95a2560a340185a93385f8fb3a55fd279cf | [
"Apache-2.0"
] | 2 | 2020-05-04T08:40:07.000Z | 2020-06-08T08:52:46.000Z | example/server/views.py | mikebryant/opentracing-python-django-jaeger-example | fe62b95a2560a340185a93385f8fb3a55fd279cf | [
"Apache-2.0"
] | 1 | 2017-12-25T02:45:57.000Z | 2017-12-26T04:57:13.000Z | example/server/views.py | mikebryant/opentracing-python-django-jaeger-example | fe62b95a2560a340185a93385f8fb3a55fd279cf | [
"Apache-2.0"
] | 4 | 2018-05-30T13:56:59.000Z | 2022-01-20T11:18:01.000Z | from django.shortcuts import render
from django.http import HttpResponse
from django.conf import settings
import opentracing
# Create your views here.
def server_index(request):
return HttpResponse("Hello, world. You're at the server index.")
def server_simple(request):
return HttpResponse("This is a simple traced request.")
def server_log(request):
span = settings.OPENTRACING_TRACER.get_span(request)
if span is not None:
span.log_event("Hello, world!")
return HttpResponse("Something was logged")
def server_child_span(request):
span = settings.OPENTRACING_TRACER.get_span(request)
if span is not None:
child_span = settings.OPENTRACING_TRACER._tracer.start_span("child span", child_of=span.context)
child_span.finish()
return HttpResponse("A child span was created")
| 30.925926 | 104 | 0.749701 |
ace4b9d2fa2106553697ff1ec540fc2fb2985b01 | 566 | py | Python | LintCode/927.py | RENHANFEI/LintCode | d572dee248ba4c2a95b52cd737d76c7297f4e7b4 | [
"CNRI-Python"
] | null | null | null | LintCode/927.py | RENHANFEI/LintCode | d572dee248ba4c2a95b52cd737d76c7297f4e7b4 | [
"CNRI-Python"
] | null | null | null | LintCode/927.py | RENHANFEI/LintCode | d572dee248ba4c2a95b52cd737d76c7297f4e7b4 | [
"CNRI-Python"
] | null | null | null | class Solution:
"""
@param str: a string
@return: return a string
"""
def reverseWords(self, S):
words = S.split(" ")
return " ".join(words[::-1])
# class Solution:
# """
# @param str: a string
# @return: return a string
# """
# def reverseWords(self, S):
# S = list(S)
# S = S[::-1] + [" "]
# i = 0
# for j, ch in enumerate(S):
# if ch == " ":
# S[i:j] = S[i:j][::-1]
# i = j + 1
# return "".join(S[:-1])
| 20.962963 | 39 | 0.381625 |
ace4ba4a9567686b190950322696c405411c4e7a | 1,203 | py | Python | celery_app/plugins/pluginnormal/yunxiazi_fastjson.py | tiaotiaolong/piu | 8e464ab62464c15763476d591df4365d434f7341 | [
"MIT"
] | 2 | 2020-05-15T04:24:59.000Z | 2020-06-03T14:23:32.000Z | celery_app/plugins/pluginnormal/yunxiazi_fastjson.py | tiaotiaolong/piu | 8e464ab62464c15763476d591df4365d434f7341 | [
"MIT"
] | null | null | null | celery_app/plugins/pluginnormal/yunxiazi_fastjson.py | tiaotiaolong/piu | 8e464ab62464c15763476d591df4365d434f7341 | [
"MIT"
] | null | null | null | import requests
from celery_app.utils.utils import insert_vuln_db
from celery_app.utils.utils import get_dns_payload,have_record
#云匣子Fastjson =< 1.2.47 反序列化远程代码执行漏洞
plugin_id=43
default_port_list=[80,443,8080]
def check(host, port=443):
scheme = 'https' if '443' in str(port) else 'http'
target = '{}://{}:{}'.format(scheme, host, port)
subdomain, payload_dns = get_dns_payload()
uris = ['/3.0/authService/config', '/2.0/authService/config', '/1.0/authService/config']
payload = {"c": {"@type": "java.net.InetAddress", "val": payload_dns}, "b": {}}
try:
with requests.Session() as session:
requests.packages.urllib3.disable_warnings()
targets = ['{}{}'.format(target, uri) for uri in uris]
for target in targets:
try:
session.post(target, json=payload, timeout=5, verify=False)
except:
pass
finally:
if have_record(subdomain):
insert_vuln_db(host, target, payload_dns, plugin_id)
return True, host, target, payload_dns
return False
except:
return False
| 34.371429 | 92 | 0.591022 |
ace4ba7147ab93f90c3497268bc3d83b8905fb0f | 3,293 | py | Python | language_formatters_pre_commit_hooks/pretty_format_kotlin.py | greggiacovelli/language-formatters-pre-commit-hooks | f6b82c7eae7b930d613fd20a2fcded0daa60cf3c | [
"Apache-2.0"
] | null | null | null | language_formatters_pre_commit_hooks/pretty_format_kotlin.py | greggiacovelli/language-formatters-pre-commit-hooks | f6b82c7eae7b930d613fd20a2fcded0daa60cf3c | [
"Apache-2.0"
] | null | null | null | language_formatters_pre_commit_hooks/pretty_format_kotlin.py | greggiacovelli/language-formatters-pre-commit-hooks | f6b82c7eae7b930d613fd20a2fcded0daa60cf3c | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import sys
import typing
from language_formatters_pre_commit_hooks import _get_default_version
from language_formatters_pre_commit_hooks.pre_conditions import java_required
from language_formatters_pre_commit_hooks.utils import download_url
from language_formatters_pre_commit_hooks.utils import run_command
def __download_kotlin_formatter_jar(version): # pragma: no cover
# type: (typing.Text) -> typing.Text
def get_url(_version):
# type: (typing.Text) -> typing.Text
# Links extracted from https://github.com/pinterest/ktlint/
return "https://github.com/pinterest/ktlint/releases/download/{version}/ktlint".format(
version=_version,
)
url_to_download = get_url(version)
try:
return download_url(get_url(version), "ktlint{version}.jar".format(version=version))
except: # noqa: E722 (allow usage of bare 'except')
raise RuntimeError(
"Failed to download {url}. Probably the requested version, {version}, is "
"not valid or you have some network issue.".format(
url=url_to_download,
version=version,
),
)
@java_required
def pretty_format_kotlin(argv=None):
# type: (typing.Optional[typing.List[typing.Text]]) -> int
parser = argparse.ArgumentParser()
parser.add_argument(
"--autofix",
action="store_true",
dest="autofix",
help="Automatically fixes encountered not-pretty-formatted files",
)
parser.add_argument(
"--ktlint-version",
dest="ktlint_version",
default=_get_default_version("ktlint"),
help="KTLint version to use (default %(default)s)",
)
parser.add_argument("filenames", nargs="*", help="Filenames to fix")
args = parser.parse_args(argv)
ktlint_jar = __download_kotlin_formatter_jar(
args.ktlint_version,
)
# ktlint does not return exit-code!=0 if we're formatting them.
# To workaround this limitation we do run ktlint in check mode only,
# which provides the expected exit status and we run it again in format
# mode if autofix flag is enabled
check_status, check_output = run_command("java", "-jar", ktlint_jar, "--verbose", "--relative", "--", *args.filenames)
not_pretty_formatted_files = set() # type: typing.Set[typing.Text]
if check_status != 0:
not_pretty_formatted_files.update(line.split(":", 1)[0] for line in check_output.splitlines())
if args.autofix:
print("Running ktlint format on {}".format(not_pretty_formatted_files))
run_command("java", "-jar", ktlint_jar, "--verbose", "--relative", "--format", "--", *not_pretty_formatted_files)
status = 0
if not_pretty_formatted_files:
status = 1
print(
"{}: {}".format(
"The following files have been fixed by ktlint" if args.autofix else "The following files are not properly formatted",
", ".join(sorted(not_pretty_formatted_files)),
),
)
return status
if __name__ == "__main__":
sys.exit(pretty_format_kotlin())
| 36.186813 | 134 | 0.671424 |
ace4ba95462d3ebae1bcaef3c1042721f20a68a2 | 491 | py | Python | 1/1_13.py | kopsh/python_cookbook | 298c092cd20404a0755e2170776c44a04e8648ad | [
"CNRI-Python"
] | null | null | null | 1/1_13.py | kopsh/python_cookbook | 298c092cd20404a0755e2170776c44a04e8648ad | [
"CNRI-Python"
] | null | null | null | 1/1_13.py | kopsh/python_cookbook | 298c092cd20404a0755e2170776c44a04e8648ad | [
"CNRI-Python"
] | null | null | null | class Solution:
r"""
1.13 通过某个关键字排序一个字典列表
使用operater模块的itemgetter类(若是对象,可使用attrgetter类)
>>> l = [{"id": 1, "name": "c"}, {"id": 2, "name": "b"}, {"id": 3, "name": "a"}]
>>> Solution.solve(l)
[{'id': 3, 'name': 'a'}, {'id': 2, 'name': 'b'}, {'id': 1, 'name': 'c'}]
"""
@staticmethod
def solve(l):
from operator import itemgetter
return sorted(l, key=itemgetter('name'))
if __name__ == '__main__':
import doctest
doctest.testmod() | 27.277778 | 84 | 0.531568 |
ace4bb3e623eb4b7b0ef5f05de492868b7076617 | 39,267 | py | Python | sdk/python/kfp/v2/compiler/pipeline_spec_builder.py | iguazio/pipelines | b482ba83d8edf8e683f315bfcf3f700970b23129 | [
"Apache-2.0"
] | null | null | null | sdk/python/kfp/v2/compiler/pipeline_spec_builder.py | iguazio/pipelines | b482ba83d8edf8e683f315bfcf3f700970b23129 | [
"Apache-2.0"
] | 1 | 2021-05-13T19:35:09.000Z | 2021-05-13T19:35:09.000Z | sdk/python/kfp/v2/compiler/pipeline_spec_builder.py | iguazio/pipelines | b482ba83d8edf8e683f315bfcf3f700970b23129 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions for creating PipelineSpec proto objects."""
import json
from typing import List, Mapping, Optional, Tuple, Union
from google.protobuf import struct_pb2
from kfp.pipeline_spec import pipeline_spec_pb2
from kfp.v2.components import utils as component_utils
from kfp.v2.components import for_loop
from kfp.v2.components import pipeline_channel
from kfp.v2.components import pipeline_task
from kfp.v2.components import placeholders
from kfp.v2.components import structures
from kfp.v2.components import tasks_group
from kfp.v2.components.types import artifact_types
from kfp.v2.components.types import type_utils
_GroupOrTask = Union[tasks_group.TasksGroup, pipeline_task.PipelineTask]
def _additional_input_name_for_pipeline_channel(
channel_or_name: Union[pipeline_channel.PipelineChannel, str]) -> str:
"""Gets the name for an additional (compiler-injected) input."""
# Adding a prefix to avoid (reduce chance of) name collision between the
# original component inputs and the injected input.
return 'pipelinechannel--' + (
channel_or_name.full_name if isinstance(
channel_or_name, pipeline_channel.PipelineChannel) else
channel_or_name)
def _to_protobuf_value(value: type_utils.PARAMETER_TYPES) -> struct_pb2.Value:
"""Creates a google.protobuf.struct_pb2.Value message out of a provide
value.
Args:
value: The value to be converted to Value message.
Returns:
A google.protobuf.struct_pb2.Value message.
Raises:
ValueError if the given value is not one of the parameter types.
"""
if isinstance(value, str):
return struct_pb2.Value(string_value=value)
elif isinstance(value, (int, float)):
return struct_pb2.Value(number_value=value)
elif isinstance(value, bool):
return struct_pb2.Value(bool_value=value)
elif isinstance(value, dict):
return struct_pb2.Value(
struct_value=struct_pb2.Struct(
fields={k: _to_protobuf_value(v) for k, v in value.items()}))
elif isinstance(value, list):
return struct_pb2.Value(
list_value=struct_pb2.ListValue(
values=[_to_protobuf_value(v) for v in value]))
else:
raise ValueError('Value must be one of the following types: '
'str, int, float, bool, dict, and list. Got: '
f'"{value}" of type "{type(value)}".')
def build_task_spec_for_task(
task: pipeline_task.PipelineTask,
parent_component_inputs: pipeline_spec_pb2.ComponentInputsSpec,
tasks_in_current_dag: List[str],
input_parameters_in_current_dag: List[str],
input_artifacts_in_current_dag: List[str],
) -> pipeline_spec_pb2.PipelineTaskSpec:
"""Builds PipelineTaskSpec for a pipeline task.
A task input may reference an output outside its immediate DAG.
For instance::
random_num = random_num_op(...)
with dsl.Condition(random_num.output > 5):
print_op('%s > 5' % random_num.output)
In this example, `dsl.Condition` forms a subDAG with one task from `print_op`
inside the subDAG. The task of `print_op` references output from `random_num`
task, which is outside the sub-DAG. When compiling to IR, such cross DAG
reference is disallowed. So we need to "punch a hole" in the sub-DAG to make
the input available in the subDAG component inputs if it's not already there,
Next, we can call this method to fix the tasks inside the subDAG to make them
reference the component inputs instead of directly referencing the original
producer task.
Args:
task: The task to build a PipelineTaskSpec for.
parent_component_inputs: The task's parent component's input specs.
tasks_in_current_dag: The list of tasks names for tasks in the same dag.
input_parameters_in_current_dag: The list of input parameters in the DAG
component.
input_artifacts_in_current_dag: The list of input artifacts in the DAG
component.
Returns:
A PipelineTaskSpec object representing the task.
"""
pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec()
pipeline_task_spec.task_info.name = (
task.task_spec.display_name or task.name)
# Use task.name for component_ref.name because we may customize component
# spec for individual tasks to work around the lack of optional inputs
# support in IR.
pipeline_task_spec.component_ref.name = (
component_utils.sanitize_component_name(task.name))
pipeline_task_spec.caching_options.enable_cache = (
task.task_spec.enable_caching)
for input_name, input_value in task.inputs.items():
input_type = task.component_spec.inputs[input_name].type
if isinstance(input_value, pipeline_channel.PipelineArtifactChannel):
if input_value.task_name:
# Value is produced by an upstream task.
if input_value.task_name in tasks_in_current_dag:
# Dependent task within the same DAG.
pipeline_task_spec.inputs.artifacts[
input_name].task_output_artifact.producer_task = (
component_utils.sanitize_task_name(
input_value.task_name))
pipeline_task_spec.inputs.artifacts[
input_name].task_output_artifact.output_artifact_key = (
input_value.name)
else:
# Dependent task not from the same DAG.
component_input_artifact = (
_additional_input_name_for_pipeline_channel(input_value)
)
assert component_input_artifact in parent_component_inputs.artifacts, \
'component_input_artifact: {} not found. All inputs: {}'.format(
component_input_artifact, parent_component_inputs)
pipeline_task_spec.inputs.artifacts[
input_name].component_input_artifact = (
component_input_artifact)
else:
raise RuntimeError(
f'Artifacts must be produced by a task. Got {input_value}.')
elif isinstance(input_value, pipeline_channel.PipelineParameterChannel):
if input_value.task_name:
# Value is produced by an upstream task.
if input_value.task_name in tasks_in_current_dag:
# Dependent task within the same DAG.
pipeline_task_spec.inputs.parameters[
input_name].task_output_parameter.producer_task = (
component_utils.sanitize_task_name(
input_value.task_name))
pipeline_task_spec.inputs.parameters[
input_name].task_output_parameter.output_parameter_key = (
input_value.name)
else:
# Dependent task not from the same DAG.
component_input_parameter = (
_additional_input_name_for_pipeline_channel(input_value)
)
assert component_input_parameter in parent_component_inputs.parameters, \
'component_input_parameter: {} not found. All inputs: {}'.format(
component_input_parameter, parent_component_inputs)
pipeline_task_spec.inputs.parameters[
input_name].component_input_parameter = (
component_input_parameter)
else:
# Value is from pipeline input.
component_input_parameter = input_value.full_name
if component_input_parameter not in parent_component_inputs.parameters:
component_input_parameter = (
_additional_input_name_for_pipeline_channel(input_value)
)
pipeline_task_spec.inputs.parameters[
input_name].component_input_parameter = (
component_input_parameter)
elif isinstance(input_value, for_loop.LoopArgument):
component_input_parameter = (
_additional_input_name_for_pipeline_channel(input_value))
assert component_input_parameter in parent_component_inputs.parameters, \
'component_input_parameter: {} not found. All inputs: {}'.format(
component_input_parameter, parent_component_inputs)
pipeline_task_spec.inputs.parameters[
input_name].component_input_parameter = (
component_input_parameter)
elif isinstance(input_value, for_loop.LoopArgumentVariable):
component_input_parameter = (
_additional_input_name_for_pipeline_channel(
input_value.loop_argument))
assert component_input_parameter in parent_component_inputs.parameters, \
'component_input_parameter: {} not found. All inputs: {}'.format(
component_input_parameter, parent_component_inputs)
pipeline_task_spec.inputs.parameters[
input_name].component_input_parameter = (
component_input_parameter)
pipeline_task_spec.inputs.parameters[
input_name].parameter_expression_selector = (
'parseJson(string_value)["{}"]'.format(
input_value.subvar_name))
elif isinstance(input_value, str):
# Handle extra input due to string concat
pipeline_channels = (
pipeline_channel.extract_pipeline_channels_from_any(input_value)
)
for channel in pipeline_channels:
# value contains PipelineChannel placeholders which needs to be
# replaced. And the input needs to be added to the task spec.
# Form the name for the compiler injected input, and make sure it
# doesn't collide with any existing input names.
additional_input_name = (
_additional_input_name_for_pipeline_channel(channel))
# We don't expect collision to happen because we prefix the name
# of additional input with 'pipelinechannel--'. But just in case
# collision did happend, throw a RuntimeError so that we don't
# get surprise at runtime.
for existing_input_name, _ in task.inputs.items():
if existing_input_name == additional_input_name:
raise RuntimeError(
'Name collision between existing input name '
'{} and compiler injected input name {}'.format(
existing_input_name, additional_input_name))
additional_input_placeholder = (
placeholders.input_parameter_placeholder(
additional_input_name))
input_value = input_value.replace(channel.pattern,
additional_input_placeholder)
if channel.task_name:
# Value is produced by an upstream task.
if channel.task_name in tasks_in_current_dag:
# Dependent task within the same DAG.
pipeline_task_spec.inputs.parameters[
additional_input_name].task_output_parameter.producer_task = (
component_utils.sanitize_task_name(
channel.task_name))
pipeline_task_spec.inputs.parameters[
input_name].task_output_parameter.output_parameter_key = (
channel.name)
else:
# Dependent task not from the same DAG.
component_input_parameter = (
_additional_input_name_for_pipeline_channel(channel)
)
assert component_input_parameter in parent_component_inputs.parameters, \
'component_input_parameter: {} not found. All inputs: {}'.format(
component_input_parameter, parent_component_inputs)
pipeline_task_spec.inputs.parameters[
additional_input_name].component_input_parameter = (
component_input_parameter)
else:
# Value is from pipeline input. (or loop?)
component_input_parameter = channel.full_name
if component_input_parameter not in parent_component_inputs.parameters:
component_input_parameter = (
_additional_input_name_for_pipeline_channel(channel)
)
pipeline_task_spec.inputs.parameters[
additional_input_name].component_input_parameter = (
component_input_parameter)
pipeline_task_spec.inputs.parameters[
input_name].runtime_value.constant.string_value = input_value
elif isinstance(input_value, (str, int, float, bool, dict, list)):
pipeline_task_spec.inputs.parameters[
input_name].runtime_value.constant.CopyFrom(
_to_protobuf_value(input_value))
else:
raise ValueError(
'Input argument supports only the following types: '
'str, int, float, bool, dict, and list.'
f'Got {input_value} of type {type(input_value)}.')
return pipeline_task_spec
def build_component_spec_for_task(
task: pipeline_task.PipelineTask) -> pipeline_spec_pb2.ComponentSpec:
"""Builds ComponentSpec for a pipeline task.
Args:
task: The task to build a ComponentSpec for.
Returns:
A ComponentSpec object for the task.
"""
component_spec = pipeline_spec_pb2.ComponentSpec()
component_spec.executor_label = component_utils.sanitize_executor_label(
task.name)
for input_name, input_spec in (task.component_spec.inputs or {}).items():
# skip inputs not present, as a workaround to support optional inputs.
if input_name not in task.inputs:
continue
if type_utils.is_parameter_type(input_spec.type):
component_spec.input_definitions.parameters[
input_name].parameter_type = type_utils.get_parameter_type(
input_spec.type)
else:
component_spec.input_definitions.artifacts[
input_name].artifact_type.CopyFrom(
type_utils.get_artifact_type_schema(input_spec.type))
for output_name, output_spec in (task.component_spec.outputs or {}).items():
if type_utils.is_parameter_type(output_spec.type):
component_spec.output_definitions.parameters[
output_name].parameter_type = type_utils.get_parameter_type(
output_spec.type)
else:
component_spec.output_definitions.artifacts[
output_name].artifact_type.CopyFrom(
type_utils.get_artifact_type_schema(output_spec.type))
return component_spec
def build_container_spec_for_task(
task: pipeline_task.PipelineTask
) -> pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec:
"""Builds PipelineContainerSpec for a pipeline task.
Args:
task: The task to build a ComponentSpec for.
Returns:
A PipelineContaienrSpec object for the task.
"""
container_spec = (
pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec(
image=task.container_spec.image,
command=task.container_spec.commands,
args=task.container_spec.arguments,
))
if task.container_spec.env is not None:
container_spec.env = [
pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec
.EnvVar(name=name, value=value)
for name, value in task.container_spec.env.items()
]
if task.container_spec.resources is not None:
container_spec.reources.cpu_limit = (
task.container_spec.resources.cpu_limit)
container_spec.reources.memory_limit = (
task.container_spec.resources.memory_limit)
if task.container_spec.resources.accelerator_count is not None:
container_spec.resources.accelerator.CopyFrom(
pipeline_spec_pb2.PipelineDeploymentConfig.PipelineContainerSpec
.ResourceSpec.AcceleratorConfig(
type=task.container_spec.resources.accelerator_type,
count=task.container_spec.resources.accelerator_count,
))
return container_spec
def _fill_in_component_input_default_value(
component_spec: pipeline_spec_pb2.ComponentSpec,
input_name: str,
default_value: Optional[type_utils.PARAMETER_TYPES],
) -> None:
"""Fills in the default of component input parameter.
Args:
component_spec: The ComponentSpec to update in place.
input_name: The name of the input parameter.
default_value: The default value of the input parameter.
"""
if default_value is None:
return
parameter_type = component_spec.input_definitions.parameters[
input_name].parameter_type
if pipeline_spec_pb2.ParameterType.NUMBER_INTEGER == parameter_type:
component_spec.input_definitions.parameters[
input_name].default_value.number_value = default_value
elif pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE == parameter_type:
component_spec.input_definitions.parameters[
input_name].default_value.number_value = default_value
elif pipeline_spec_pb2.ParameterType.STRING == parameter_type:
component_spec.input_definitions.parameters[
input_name].default_value.string_value = default_value
elif pipeline_spec_pb2.ParameterType.BOOLEAN == parameter_type:
component_spec.input_definitions.parameters[
input_name].default_value.bool_value = default_value
elif pipeline_spec_pb2.ParameterType.STRUCT == parameter_type:
component_spec.input_definitions.parameters[
input_name].default_value.CopyFrom(
_to_protobuf_value(default_value))
elif pipeline_spec_pb2.ParameterType.LIST == parameter_type:
component_spec.input_definitions.parameters[
input_name].default_value.CopyFrom(
_to_protobuf_value(default_value))
def build_component_spec_for_group(
pipeline_channels: List[pipeline_channel.PipelineChannel],
is_root_group: bool,
) -> pipeline_spec_pb2.ComponentSpec:
"""Builds ComponentSpec for a TasksGroup.
Args:
group: The group to build a ComponentSpec for.
pipeline_channels: The list of pipeline channels referenced by the group.
Returns:
A PipelineTaskSpec object representing the loop group.
"""
component_spec = pipeline_spec_pb2.ComponentSpec()
for channel in pipeline_channels:
input_name = (
channel.name if is_root_group else
_additional_input_name_for_pipeline_channel(channel))
if isinstance(channel, pipeline_channel.PipelineArtifactChannel):
component_spec.input_definitions.artifacts[
input_name].artifact_type.CopyFrom(
type_utils.get_artifact_type_schema(channel.channel_type))
else:
# channel is one of PipelineParameterChannel, LoopArgument, or
# LoopArgumentVariable.
component_spec.input_definitions.parameters[
input_name].parameter_type = type_utils.get_parameter_type(
channel.channel_type)
# TODO: should we fill in default value for all groups and tasks?
if is_root_group:
_fill_in_component_input_default_value(
component_spec=component_spec,
input_name=input_name,
default_value=channel.value,
)
return component_spec
def _pop_input_from_task_spec(
task_spec: pipeline_spec_pb2.PipelineTaskSpec,
input_name: str,
) -> None:
"""Removes an input from task spec inputs.
Args:
task_spec: The pipeline task spec to update in place.
input_name: The name of the input, which could be an artifact or paremeter.
"""
task_spec.inputs.artifacts.pop(input_name)
task_spec.inputs.parameters.pop(input_name)
if task_spec.inputs == pipeline_spec_pb2.TaskInputsSpec():
task_spec.ClearField('inputs')
def _update_task_spec_for_loop_group(
group: tasks_group.ParallelFor,
pipeline_task_spec: pipeline_spec_pb2.PipelineTaskSpec,
) -> None:
"""Updates PipelineTaskSpec for loop group.
Args:
group: The loop group to update task spec for.
pipeline_task_spec: The pipeline task spec to update in place.
"""
if group.items_is_pipeline_channel:
loop_items_channel = group.loop_argument.items_or_pipeline_channel
input_parameter_name = _additional_input_name_for_pipeline_channel(
loop_items_channel)
loop_argument_item_name = _additional_input_name_for_pipeline_channel(
group.loop_argument.full_name)
loop_arguments_item = '{}-{}'.format(
input_parameter_name, for_loop.LoopArgument.LOOP_ITEM_NAME_BASE)
assert loop_arguments_item == loop_argument_item_name
pipeline_task_spec.parameter_iterator.items.input_parameter = (
input_parameter_name)
pipeline_task_spec.parameter_iterator.item_input = (
loop_argument_item_name)
# If the loop items itself is a loop arguments variable, handle the
# subvar name.
if isinstance(loop_items_channel, for_loop.LoopArgumentVariable):
pipeline_task_spec.inputs.parameters[
input_parameter_name].parameter_expression_selector = (
'parseJson(string_value)["{}"]'.format(
loop_items_channel.subvar_name))
pipeline_task_spec.inputs.parameters[
input_parameter_name].component_input_parameter = (
_additional_input_name_for_pipeline_channel(
loop_items_channel.loop_argument))
remove_input_name = loop_argument_item_name
else:
input_parameter_name = _additional_input_name_for_pipeline_channel(
group.loop_argument)
raw_values = group.loop_argument.items_or_pipeline_channel
pipeline_task_spec.parameter_iterator.items.raw = json.dumps(
raw_values, sort_keys=True)
pipeline_task_spec.parameter_iterator.item_input = (
input_parameter_name)
_pop_input_from_task_spec(
task_spec=pipeline_task_spec,
input_name=pipeline_task_spec.parameter_iterator.item_input)
def _resolve_condition_operands(
left_operand: Union[str, pipeline_channel.PipelineChannel],
right_operand: Union[str, pipeline_channel.PipelineChannel],
) -> Tuple[str, str]:
"""Resolves values and PipelineChannels for condition operands.
Args:
left_operand: The left operand of a condition expression.
right_operand: The right operand of a condition expression.
Returns:
A tuple of the resolved operands values:
(left_operand_value, right_operand_value).
"""
# Pre-scan the operand to get the type of constant value if there's any.
# The value_type can be used to backfill missing PipelineChannel.channel_type.
value_type = None
for value_or_reference in [left_operand, right_operand]:
if isinstance(value_or_reference, pipeline_channel.PipelineChannel):
parameter_type = type_utils.get_parameter_type(
value_or_reference.channel_type)
if parameter_type in [
pipeline_spec_pb2.ParameterType.STRUCT,
pipeline_spec_pb2.ParameterType.LIST,
pipeline_spec_pb2.ParameterType
.PARAMETER_TYPE_ENUM_UNSPECIFIED,
]:
input_name = _additional_input_name_for_pipeline_channel(
value_or_reference)
raise ValueError('Conditional requires scalar parameter values'
' for comparison. Found input "{}" of type {}'
' in pipeline definition instead.'.format(
input_name,
value_or_reference.channel_type))
parameter_types = set()
for value_or_reference in [left_operand, right_operand]:
if isinstance(value_or_reference, pipeline_channel.PipelineChannel):
parameter_type = type_utils.get_parameter_type(
value_or_reference.channel_type)
else:
parameter_type = type_utils.get_parameter_type(
type(value_or_reference).__name__)
parameter_types.add(parameter_type)
if len(parameter_types) == 2:
# Two different types being compared. The only possible types are
# String, Boolean, Double and Integer. We'll promote the other type
# using the following precedence:
# String > Boolean > Double > Integer
if pipeline_spec_pb2.ParameterType.STRING in parameter_types:
canonical_parameter_type = pipeline_spec_pb2.ParameterType.STRING
elif pipeline_spec_pb2.ParameterType.BOOLEAN in parameter_types:
canonical_parameter_type = pipeline_spec_pb2.ParameterType.BOOLEAN
else:
# Must be a double and int, promote to double.
assert pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE in parameter_types, \
'Types: {} [{} {}]'.format(
parameter_types, left_operand, right_operand)
assert pipeline_spec_pb2.ParameterType.NUMBER_INTEGER in parameter_types, \
'Types: {} [{} {}]'.format(
parameter_types, left_operand, right_operand)
canonical_parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE
elif len(parameter_types) == 1: # Both operands are the same type.
canonical_parameter_type = parameter_types.pop()
else:
# Probably shouldn't happen.
raise ValueError('Unable to determine operand types for'
' "{}" and "{}"'.format(left_operand, right_operand))
operand_values = []
for value_or_reference in [left_operand, right_operand]:
if isinstance(value_or_reference, pipeline_channel.PipelineChannel):
input_name = _additional_input_name_for_pipeline_channel(
value_or_reference)
operand_value = "inputs.parameter_values['{input_name}']".format(
input_name=input_name)
parameter_type = type_utils.get_parameter_type(
value_or_reference.channel_type)
if parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER:
operand_value = 'int({})'.format(operand_value)
elif isinstance(value_or_reference, str):
operand_value = "'{}'".format(value_or_reference)
parameter_type = pipeline_spec_pb2.ParameterType.STRING
elif isinstance(value_or_reference, bool):
# Booleans need to be compared as 'true' or 'false' in CEL.
operand_value = str(value_or_reference).lower()
parameter_type = pipeline_spec_pb2.ParameterType.BOOLEAN
elif isinstance(value_or_reference, int):
operand_value = str(value_or_reference)
parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_INTEGER
else:
assert isinstance(value_or_reference, float), value_or_reference
operand_value = str(value_or_reference)
parameter_type = pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE
if parameter_type != canonical_parameter_type:
# Type-cast to so CEL does not complain.
if canonical_parameter_type == pipeline_spec_pb2.ParameterType.STRING:
assert parameter_type in [
pipeline_spec_pb2.ParameterType.BOOLEAN,
pipeline_spec_pb2.ParameterType.NUMBER_INTEGER,
pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE,
]
operand_value = "'{}'".format(operand_value)
elif canonical_parameter_type == pipeline_spec_pb2.ParameterType.BOOLEAN:
assert parameter_type in [
pipeline_spec_pb2.ParameterType.NUMBER_INTEGER,
pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE,
]
operand_value = 'true' if int(operand_value) == 0 else 'false'
else:
assert canonical_parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_DOUBLE
assert parameter_type == pipeline_spec_pb2.ParameterType.NUMBER_INTEGER
operand_value = 'double({})'.format(operand_value)
operand_values.append(operand_value)
return tuple(operand_values)
def _update_task_spec_for_condition_group(
group: tasks_group.Condition,
pipeline_task_spec: pipeline_spec_pb2.PipelineTaskSpec,
) -> None:
"""Updates PipelineTaskSpec for condition group.
Args:
group: The condition group to update task spec for.
pipeline_task_spec: The pipeline task spec to update in place.
"""
left_operand_value, right_operand_value = _resolve_condition_operands(
group.condition.left_operand, group.condition.right_operand)
condition_string = (
f'{left_operand_value} {group.condition.operator} {right_operand_value}'
)
pipeline_task_spec.trigger_policy.CopyFrom(
pipeline_spec_pb2.PipelineTaskSpec.TriggerPolicy(
condition=condition_string))
def build_task_spec_for_exit_task(
task: pipeline_task.PipelineTask,
dependent_task: str,
pipeline_inputs: pipeline_spec_pb2.ComponentInputsSpec,
) -> pipeline_spec_pb2.PipelineTaskSpec:
"""Builds PipelineTaskSpec for an exit handler's exit task.
Args:
tasks: The exit handler's exit task to build task spec for.
dependent_task: The dependent task name for the exit task, i.e. the name
of the exit handler group.
pipeline_inputs: The pipeline level input definitions.
Returns:
A PipelineTaskSpec object representing the exit task.
"""
pipeline_task_spec = build_task_spec_for_task(
task=task,
parent_component_inputs=pipeline_inputs,
tasks_in_current_dag=[], # Does not matter for exit task
input_parameters_in_current_dag=pipeline_inputs.parameters.keys(),
input_artifacts_in_current_dag=[],
)
pipeline_task_spec.dependent_tasks.extend([dependent_task])
pipeline_task_spec.trigger_policy.strategy = (
pipeline_spec_pb2.PipelineTaskSpec.TriggerPolicy.TriggerStrategy
.ALL_UPSTREAM_TASKS_COMPLETED)
return pipeline_task_spec
def build_task_spec_for_group(
group: tasks_group.TasksGroup,
pipeline_channels: List[pipeline_channel.PipelineChannel],
tasks_in_current_dag: List[str],
is_parent_component_root: bool,
) -> pipeline_spec_pb2.PipelineTaskSpec:
"""Builds PipelineTaskSpec for a group.
Args:
group: The group to build PipelineTaskSpec for.
pipeline_channels: The list of pipeline channels referenced by the group.
tasks_in_current_dag: The list of tasks names for tasks in the same dag.
is_parent_component_root: Whether the parent component is the pipeline's
root dag.
Returns:
A PipelineTaskSpec object representing the group.
"""
pipeline_task_spec = pipeline_spec_pb2.PipelineTaskSpec()
pipeline_task_spec.task_info.name = group.display_name or group.name
pipeline_task_spec.component_ref.name = (
component_utils.sanitize_component_name(group.name))
for channel in pipeline_channels:
channel_full_name = channel.full_name
subvar_name = None
if isinstance(channel, for_loop.LoopArgumentVariable):
channel_full_name = channel.loop_argument.full_name
subvar_name = channel.subvar_name
input_name = _additional_input_name_for_pipeline_channel(channel)
channel_name = channel.name
if subvar_name:
pipeline_task_spec.inputs.parameters[
input_name].parameter_expression_selector = (
'parseJson(string_value)["{}"]'.format(subvar_name))
if not channel.is_with_items_loop_argument:
channel_name = channel.items_or_pipeline_channel.name
if isinstance(channel, pipeline_channel.PipelineArtifactChannel):
if channel.task_name and channel.task_name in tasks_in_current_dag:
pipeline_task_spec.inputs.artifacts[
input_name].task_output_artifact.producer_task = (
component_utils.sanitize_task_name(channel.task_name))
pipeline_task_spec.inputs.artifacts[
input_name].task_output_artifact.output_artifact_key = (
channel_name)
else:
pipeline_task_spec.inputs.artifacts[
input_name].component_input_artifact = (
channel_full_name
if is_parent_component_root else input_name)
else:
# channel is one of PipelineParameterChannel, LoopArgument, or
# LoopArgumentVariable
if channel.task_name and channel.task_name in tasks_in_current_dag:
pipeline_task_spec.inputs.parameters[
input_name].task_output_parameter.producer_task = (
component_utils.sanitize_task_name(channel.task_name))
pipeline_task_spec.inputs.parameters[
input_name].task_output_parameter.output_parameter_key = (
channel_name)
else:
pipeline_task_spec.inputs.parameters[
input_name].component_input_parameter = (
channel_full_name if is_parent_component_root else
_additional_input_name_for_pipeline_channel(
channel_full_name))
if isinstance(group, tasks_group.ParallelFor):
_update_task_spec_for_loop_group(
group=group,
pipeline_task_spec=pipeline_task_spec,
)
elif isinstance(group, tasks_group.Condition):
_update_task_spec_for_condition_group(
group=group,
pipeline_task_spec=pipeline_task_spec,
)
return pipeline_task_spec
def populate_metrics_in_dag_outputs(
tasks: List[pipeline_task.PipelineTask],
task_name_to_parent_groups: Mapping[str, List[_GroupOrTask]],
task_name_to_task_spec: Mapping[str, pipeline_spec_pb2.PipelineTaskSpec],
task_name_to_component_spec: Mapping[str, pipeline_spec_pb2.ComponentSpec],
pipeline_spec: pipeline_spec_pb2.PipelineSpec,
) -> None:
"""Populates metrics artifacts in DAG outputs.
Args:
tasks: The list of tasks that may produce metrics outputs.
task_name_to_parent_groups: The dict of task name to parent groups.
Key is the task's name. Value is a list of ancestor groups including
the task itself. The list of a given op is sorted in a way that the
farthest group is the first and the task itself is the last.
task_name_to_task_spec: The dict of task name to PipelineTaskSpec.
task_name_to_component_spec: The dict of task name to ComponentSpec.
pipeline_spec: The pipeline_spec to update in-place.
"""
for task in tasks:
task_spec = task_name_to_task_spec[task.name]
component_spec = task_name_to_component_spec[task.name]
# Get the tuple of (component_name, task_name) of all its parent groups.
parent_components_and_tasks = [('_root', '')]
# skip the op itself and the root group which cannot be retrived via name.
for group_name in task_name_to_parent_groups[task.name][1:-1]:
parent_components_and_tasks.append(
(component_utils.sanitize_component_name(group_name),
component_utils.sanitize_task_name(group_name)))
# Reverse the order to make the farthest group in the end.
parent_components_and_tasks.reverse()
for output_name, artifact_spec in \
component_spec.output_definitions.artifacts.items():
if artifact_spec.artifact_type.WhichOneof(
'kind'
) == 'schema_title' and artifact_spec.artifact_type.schema_title in [
artifact_types.Metrics.TYPE_NAME,
artifact_types.ClassificationMetrics.TYPE_NAME,
]:
unique_output_name = '{}-{}'.format(task.name, output_name)
sub_task_name = task.name
sub_task_output = output_name
for component_name, task_name in parent_components_and_tasks:
group_component_spec = (
pipeline_spec.root if component_name == '_root' else
pipeline_spec.components[component_name])
group_component_spec.output_definitions.artifacts[
unique_output_name].CopyFrom(artifact_spec)
group_component_spec.dag.outputs.artifacts[
unique_output_name].artifact_selectors.append(
pipeline_spec_pb2.DagOutputsSpec
.ArtifactSelectorSpec(
producer_subtask=sub_task_name,
output_artifact_key=sub_task_output,
))
sub_task_name = task_name
sub_task_output = unique_output_name
| 45.712456 | 97 | 0.655996 |
ace4bb7b4c19199c48f20aa3e827b652f729ce33 | 6,679 | py | Python | cunumeric/utils.py | mferreravila/cunumeric | 87901174d0ecb1719bbccb98201dc19034973834 | [
"Apache-2.0"
] | null | null | null | cunumeric/utils.py | mferreravila/cunumeric | 87901174d0ecb1719bbccb98201dc19034973834 | [
"Apache-2.0"
] | null | null | null | cunumeric/utils.py | mferreravila/cunumeric | 87901174d0ecb1719bbccb98201dc19034973834 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021-2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
import traceback
from functools import reduce
from string import ascii_lowercase, ascii_uppercase
from types import FrameType
from typing import Any, List, Sequence, Tuple, Union, cast
import numpy as np
_SUPPORTED_DTYPES = [
np.float16,
np.float32,
np.float64,
float,
np.int16,
np.int32,
np.int64,
int,
np.uint16,
np.uint32,
np.uint64,
np.bool_,
bool,
]
def broadcast_shapes(*args: tuple[int, ...]) -> tuple[int, ...]:
arrays = [np.empty(x, dtype=[]) for x in args]
return np.broadcast(*arrays).shape
def is_advanced_indexing(key: Any) -> bool:
if key is Ellipsis or key is None: # np.newdim case
return False
if np.isscalar(key):
return False
if isinstance(key, slice):
return False
if isinstance(key, tuple):
return any(is_advanced_indexing(k) for k in key)
# Any other kind of thing leads to advanced indexing
return True
def find_last_user_stacklevel() -> int:
stacklevel = 1
for (frame, _) in traceback.walk_stack(None):
if not frame.f_globals["__name__"].startswith("cunumeric"):
break
stacklevel += 1
return stacklevel
def get_line_number_from_frame(frame: FrameType) -> str:
return f"{frame.f_code.co_filename}:{frame.f_lineno}"
def find_last_user_frames(top_only: bool = True) -> str:
for (last, _) in traceback.walk_stack(None):
if "__name__" not in last.f_globals:
continue
if not last.f_globals["__name__"].startswith("cunumeric"):
break
if top_only:
return get_line_number_from_frame(last)
frames: list[FrameType] = []
curr: Union[FrameType, None] = last
while curr is not None:
if "legion_top.py" in curr.f_code.co_filename:
break
frames.append(curr)
curr = curr.f_back
return "|".join(get_line_number_from_frame(f) for f in frames)
def is_supported_dtype(dtype: Any) -> bool:
if not isinstance(dtype, np.dtype):
raise TypeError("expected a NumPy dtype")
return dtype.type in _SUPPORTED_DTYPES
def calculate_volume(shape: tuple[int, ...]) -> int:
if len(shape) == 0:
return 0
return reduce(lambda x, y: x * y, shape)
def get_arg_dtype(dtype: np.dtype[Any]) -> np.dtype[Any]:
return np.dtype(
[("arg", np.int64), ("arg_value", dtype)],
align=True,
)
def get_arg_value_dtype(dtype: np.dtype[Any]) -> np.dtype[Any]:
dt = dtype.fields["arg_value"][0].type # type: ignore [index]
return cast(Any, dt)
Modes = Tuple[List[str], List[str], List[str]]
def dot_modes(a_ndim: int, b_ndim: int) -> Modes:
a_modes = list(ascii_lowercase[:a_ndim])
b_modes = list(ascii_uppercase[:b_ndim])
if a_ndim == 0 or b_ndim == 0:
out_modes = a_modes + b_modes
elif b_ndim == 1:
b_modes[-1] = a_modes[-1]
out_modes = a_modes[:-1]
else:
b_modes[-2] = a_modes[-1]
out_modes = a_modes[:-1] + b_modes[:-2] + [b_modes[-1]]
return (a_modes, b_modes, out_modes)
def inner_modes(a_ndim: int, b_ndim: int) -> Modes:
a_modes = list(ascii_lowercase[:a_ndim])
b_modes = list(ascii_uppercase[:b_ndim])
if a_ndim == 0 or b_ndim == 0:
out_modes = a_modes + b_modes
else:
b_modes[-1] = a_modes[-1]
out_modes = a_modes[:-1] + b_modes[:-1]
return (a_modes, b_modes, out_modes)
def matmul_modes(a_ndim: int, b_ndim: int) -> Modes:
if a_ndim == 0 or b_ndim == 0:
raise ValueError("Scalars not allowed in matmul")
a_modes = list(ascii_lowercase[-a_ndim:])
b_modes = list(ascii_lowercase[-b_ndim:])
if b_ndim >= 2:
a_modes[-1] = "A"
b_modes[-2] = "A"
if b_ndim == 1:
out_modes = a_modes[:-1]
elif a_ndim == 1:
out_modes = b_modes[:-2] + [b_modes[-1]]
else:
out_modes = (
list(ascii_lowercase[-max(a_ndim, b_ndim) : -2])
+ [a_modes[-2]]
+ [b_modes[-1]]
)
return (a_modes, b_modes, out_modes)
Axes = Sequence[int]
AxesPair = Tuple[Axes, Axes]
AxesPairLikeTuple = Union[
Tuple[int, int],
Tuple[int, Axes],
Tuple[Axes, int],
Tuple[Axes, Axes],
]
AxesPairLike = Union[int, AxesPairLikeTuple]
def tensordot_modes(a_ndim: int, b_ndim: int, axes: AxesPairLike) -> Modes:
def convert_int_axes(axes: int) -> AxesPair:
return list(range(a_ndim - axes, a_ndim)), list(range(axes))
def convert_seq_axes(axes: AxesPairLikeTuple) -> AxesPair:
a_axes, b_axes = axes
return (
[a_axes] if isinstance(a_axes, int) else list(a_axes),
[b_axes] if isinstance(b_axes, int) else list(b_axes),
)
def convert_axes(axes: AxesPairLike) -> AxesPair:
if isinstance(axes, int):
a_axes, b_axes = convert_int_axes(axes)
else:
a_axes, b_axes = convert_seq_axes(axes)
return (
[ax + a_ndim if ax < 0 else ax for ax in a_axes],
[ax + b_ndim if ax < 0 else ax for ax in b_axes],
)
def check_axes(a_axes: Axes, b_axes: Axes) -> None:
if (
len(a_axes) != len(b_axes)
or len(a_axes) > a_ndim
or len(b_axes) > b_ndim
or len(a_axes) != len(set(a_axes))
or len(b_axes) != len(set(b_axes))
or any(ax < 0 for ax in a_axes)
or any(ax < 0 for ax in b_axes)
or any(ax >= a_ndim for ax in a_axes)
or any(ax >= b_ndim for ax in b_axes)
):
raise ValueError("Invalid axes argument")
a_axes, b_axes = convert_axes(axes)
check_axes(a_axes, b_axes)
a_modes = list(ascii_lowercase[:a_ndim])
b_modes = list(ascii_uppercase[:b_ndim])
for (a_i, b_i) in zip(a_axes, b_axes):
b_modes[b_i] = a_modes[a_i]
a_out = [a_modes[a_i] for a_i in sorted(set(range(a_ndim)) - set(a_axes))]
b_out = [b_modes[b_i] for b_i in sorted(set(range(b_ndim)) - set(b_axes))]
return (a_modes, b_modes, a_out + b_out)
| 29.684444 | 78 | 0.625393 |
ace4bb8b428de35479b76ff0ccdc62631318bd3b | 2,823 | py | Python | tests/test_mdelta.py | yitistica/month | 4c143fd7b17f52407f80d5744564e82a0e3ea396 | [
"MIT"
] | 1 | 2020-06-19T13:23:31.000Z | 2020-06-19T13:23:31.000Z | tests/test_mdelta.py | yitistica/month | 4c143fd7b17f52407f80d5744564e82a0e3ea396 | [
"MIT"
] | null | null | null | tests/test_mdelta.py | yitistica/month | 4c143fd7b17f52407f80d5744564e82a0e3ea396 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Tests for `month` package."""
from mock import patch, call
import pytest
from month import month
from month.month import MDelta
import pickle
con_data = [({}, 0), # null case;
(15, 15), # case without kwargs;
({'years': 2, 'months': 3}, 27),
({'years': 1}, 12),
({'months': 2}, 2),
({'years': 2.5, 'months': 1.2}, 'TypeError'),
]
@pytest.mark.parametrize("kwargs, expected", con_data)
@patch.object(month, '_check_int_field', wraps=month._check_int_field)
def test_mdelta_construct(check_int_field_func, kwargs, expected):
if isinstance(expected, int): # expected, expected month is given;
if isinstance(kwargs, dict):
case = MDelta(**kwargs)
if 'months' in kwargs:
months = kwargs['months']
else:
months = 0
if 'years' in kwargs:
int_check_calls = [call(kwargs['years']), call(months)]
else:
int_check_calls = [call(months)]
elif isinstance(kwargs, int):
# optional is supplied, representing months;
case = MDelta(kwargs)
int_check_calls = [call(kwargs)]
else:
raise TypeError(f'check arg types.')
check_int_field_func.assert_has_calls(int_check_calls, any_order=False)
assert case.months == expected # test conversion;
else: # test wrong input:
if expected == 'TypeError':
with pytest.raises(TypeError) as execinfo:
MDelta(**kwargs)
assert \
execinfo.value.args[0] == 'integer argument expected, got float'
def test_delta_operations():
delta_1 = MDelta(years=2, months=5)
delta_2 = MDelta(months=29)
delta_3 = MDelta(months=30)
delta_4 = MDelta(years=-1, months=5)
# str & repr:
assert str(delta_1) == '29months'
assert repr(delta_1) == 'mdelta(29)'
# equality & inequality
assert delta_1 == delta_2
assert not delta_1 == delta_3
assert delta_1 <= delta_2
assert not delta_1 < delta_2
assert delta_1 < delta_3
assert delta_1 >= delta_2
assert not delta_1 > delta_2
assert delta_3 > delta_1
# operators:
assert delta_1 + delta_3 == delta_3 + delta_1
assert delta_1 * 2 == 2 * delta_1
assert delta_1 + delta_3 == MDelta(59)
assert delta_1 - delta_3 == MDelta(-1)
assert +delta_1 == delta_1
assert -delta_1 == MDelta(years=-2, months=-5)
assert abs(delta_4) == MDelta(7)
assert delta_1 * 2 == 2 * delta_1 == MDelta(58)
assert delta_1 + 5 == MDelta(34)
assert delta_1 - 5 == MDelta(24)
assert pickle.loads(
pickle.dumps(delta_1,
protocol=pickle.HIGHEST_PROTOCOL))._months == 29
| 30.031915 | 80 | 0.593695 |
ace4bc9db82ecdd3b0ef946d98a9ba788eecc41a | 1,244 | py | Python | bbc1/core/logger.py | ks91/bbc1-pub | 6b9c33c6c8aec7d410ba9b704eeeb8c3772012d0 | [
"Apache-2.0"
] | 89 | 2017-10-31T05:38:30.000Z | 2021-11-06T11:53:19.000Z | bbc1/core/logger.py | ks91/bbc1-pub | 6b9c33c6c8aec7d410ba9b704eeeb8c3772012d0 | [
"Apache-2.0"
] | 74 | 2017-11-07T13:06:33.000Z | 2021-05-06T14:26:19.000Z | bbc1/core/logger.py | ks91/bbc1-pub | 6b9c33c6c8aec7d410ba9b704eeeb8c3772012d0 | [
"Apache-2.0"
] | 56 | 2017-11-04T13:54:56.000Z | 2021-06-18T18:05:46.000Z | # -*- coding: utf-8 -*-
"""
Copyright (c) 2017 beyond-blockchain.org.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
def get_logger(key="", logname="-", level="none"):
LEVELS = { 'all': logging.NOTSET,
'debug':logging.DEBUG,
'info':logging.INFO,
'warning':logging.WARNING,
'error':logging.ERROR,
'critical':logging.CRITICAL,
'none': 99,
}
if logname == "-":
logname = None
logging.basicConfig(
format='%(asctime)s| %(levelname)-8s| %(name)s| %(message)s',
datefmt='%Y/%m/%d %H:%M:%S',
filename=logname,
level=LEVELS.get(level, logging.NOTSET),
)
return logging.getLogger(key)
| 29.619048 | 72 | 0.631833 |
ace4bcb200a2c22c84dbb075f69fd1a9ebbfb815 | 1,547 | py | Python | src/exceptionite/blocks/Environment.py | MasoniteFramework/exceptions | ce15da5e9f763c563e9d687771fb0599b875b83f | [
"MIT"
] | 6 | 2019-12-13T05:22:49.000Z | 2020-01-02T20:50:24.000Z | src/exceptionite/blocks/Environment.py | MasoniteFramework/exceptions | ce15da5e9f763c563e9d687771fb0599b875b83f | [
"MIT"
] | 7 | 2019-12-12T18:02:20.000Z | 2020-01-04T19:49:49.000Z | src/exceptionite/blocks/Environment.py | MasoniteFramework/exceptions | ce15da5e9f763c563e9d687771fb0599b875b83f | [
"MIT"
] | 3 | 2020-08-11T22:07:46.000Z | 2022-02-21T05:22:59.000Z | import sys
import platform
import socket
import os
from ..Block import Block
class Environment(Block):
id = "environment"
name = "System Environment"
icon = "TerminalIcon"
def build(self):
python_version = (
f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
)
default_encoding = sys.getdefaultencoding()
file_system_encoding = sys.getfilesystemencoding()
os_name = platform.system()
if os_name == "Darwin":
os_name = "macOS"
# when VPN is enabled it can fails for some VPN clients on macOS
try:
ip = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
print(
"Exceptionite did not manage to fetch the IP address. Disable you VPN or add "
+ "'127.0.0.1 YOUR_HOSTNAME' line in /etc/hosts file."
)
ip = "Error fetching the IP address (open your terminal)"
return {
"Python Version": python_version,
"Python Interpreter": sys.executable,
"Virtual env": os.getenv("VIRTUAL_ENV"),
"Python argv": sys.argv,
"Working Dir": os.getcwd(),
"OS": os_name,
"Arch": platform.architecture()[0],
"Host Name": socket.gethostname(),
"IP": ip,
"File System Encoding": file_system_encoding,
"Default Encoding": default_encoding,
}
def has_content(self):
return True
| 30.94 | 94 | 0.577893 |
ace4bcbc679a66f61d2a955985e4879b9bd64a90 | 486 | py | Python | scripts/fund_and_withdraw.py | Optimus-Goch1/Brownie-Fund-Me | d46c5d2a90657b319f37b621bb77c76352bea4a8 | [
"MIT"
] | null | null | null | scripts/fund_and_withdraw.py | Optimus-Goch1/Brownie-Fund-Me | d46c5d2a90657b319f37b621bb77c76352bea4a8 | [
"MIT"
] | null | null | null | scripts/fund_and_withdraw.py | Optimus-Goch1/Brownie-Fund-Me | d46c5d2a90657b319f37b621bb77c76352bea4a8 | [
"MIT"
] | null | null | null | from brownie import FundMe
from scripts.helpers import get_account
def fund():
fund_me = FundMe[-1]
account = get_account()
entrance_fee = fund_me.getEntranceFee()
print(entrance_fee)
print(f"The current entry fee is {entrance_fee}")
print("Funding")
fund_me.fund({"from": account, "value": entrance_fee})
def withdraw():
fund_me = FundMe[-1]
account = get_account()
fund_me.withdraw({"from": account})
def main():
fund()
withdraw() | 21.130435 | 58 | 0.666667 |
ace4bcdc4f2dd1637212bddbbfdb704e1ece27b7 | 26,861 | py | Python | chalice/cli/__init__.py | andrew-mcgrath/chalice | 5d28c0ea55fc7db12c536d0789c6707e8cd51c41 | [
"Apache-2.0"
] | null | null | null | chalice/cli/__init__.py | andrew-mcgrath/chalice | 5d28c0ea55fc7db12c536d0789c6707e8cd51c41 | [
"Apache-2.0"
] | null | null | null | chalice/cli/__init__.py | andrew-mcgrath/chalice | 5d28c0ea55fc7db12c536d0789c6707e8cd51c41 | [
"Apache-2.0"
] | null | null | null | """Command line interface for chalice.
Contains commands for deploying chalice.
"""
import logging
import os
import platform
import sys
import tempfile
import shutil
import traceback
import functools
import json
import botocore.exceptions
import click
from typing import Dict, Any, Optional # noqa
from chalice import __version__ as chalice_version
from chalice.app import Chalice # noqa
from chalice.awsclient import TypedAWSClient
from chalice.awsclient import ReadTimeout
from chalice.cli.factory import CLIFactory
from chalice.cli.factory import NoSuchFunctionError
from chalice.config import Config # noqa
from chalice.logs import display_logs, LogRetrieveOptions
from chalice.utils import create_zip_file
from chalice.deploy.validate import validate_routes, validate_python_version
from chalice.deploy.validate import ExperimentalFeatureError
from chalice.utils import getting_started_prompt, UI, serialize_to_json
from chalice.constants import CONFIG_VERSION, TEMPLATE_APP, GITIGNORE
from chalice.constants import DEFAULT_STAGE_NAME
from chalice.constants import DEFAULT_APIGATEWAY_STAGE_NAME
from chalice.local import LocalDevServer # noqa
from chalice.constants import DEFAULT_HANDLER_NAME
from chalice.invoke import UnhandledLambdaError
from chalice.deploy.swagger import TemplatedSwaggerGenerator
from chalice.deploy.planner import PlanEncoder
from chalice.deploy.appgraph import ApplicationGraphBuilder, GraphPrettyPrint
def _configure_logging(level, format_string=None):
# type: (int, Optional[str]) -> None
if format_string is None:
format_string = "%(asctime)s %(name)s [%(levelname)s] %(message)s"
logger = logging.getLogger('')
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string)
handler.setFormatter(formatter)
logger.addHandler(handler)
def create_new_project_skeleton(project_name, profile=None):
# type: (str, Optional[str]) -> None
chalice_dir = os.path.join(project_name, '.chalice')
os.makedirs(chalice_dir)
config = os.path.join(project_name, '.chalice', 'config.json')
cfg = {
'version': CONFIG_VERSION,
'app_name': project_name,
'stages': {
DEFAULT_STAGE_NAME: {
'api_gateway_stage': DEFAULT_APIGATEWAY_STAGE_NAME,
}
}
}
if profile is not None:
cfg['profile'] = profile
with open(config, 'w') as f:
f.write(serialize_to_json(cfg))
with open(os.path.join(project_name, 'requirements.txt'), 'w'):
pass
with open(os.path.join(project_name, 'app.py'), 'w') as f:
f.write(TEMPLATE_APP % project_name)
with open(os.path.join(project_name, '.gitignore'), 'w') as f:
f.write(GITIGNORE)
def get_system_info():
# type: () -> str
python_info = "python {}.{}.{}".format(sys.version_info[0],
sys.version_info[1],
sys.version_info[2])
platform_system = platform.system().lower()
platform_release = platform.release()
platform_info = "{} {}".format(platform_system, platform_release)
return "{}, {}".format(python_info, platform_info)
@click.group()
@click.version_option(version=chalice_version,
message='%(prog)s %(version)s, {}'
.format(get_system_info()))
@click.option('--project-dir',
help='The project directory path (absolute or relative).'
'Defaults to CWD')
@click.option('--debug/--no-debug',
default=False,
help='Print debug logs to stderr.')
@click.pass_context
def cli(ctx, project_dir, debug=False):
# type: (click.Context, str, bool) -> None
if project_dir is None:
project_dir = os.getcwd()
elif not os.path.isabs(project_dir):
project_dir = os.path.abspath(project_dir)
if debug is True:
_configure_logging(logging.DEBUG)
ctx.obj['project_dir'] = project_dir
ctx.obj['debug'] = debug
ctx.obj['factory'] = CLIFactory(project_dir, debug, environ=os.environ)
os.chdir(project_dir)
@cli.command()
@click.option('--host', default='127.0.0.1')
@click.option('--port', default=8000, type=click.INT)
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage for the local server to use.')
@click.option('--autoreload/--no-autoreload',
default=True,
help='Automatically restart server when code changes.')
@click.pass_context
def local(ctx, host='127.0.0.1', port=8000, stage=DEFAULT_STAGE_NAME,
autoreload=True):
# type: (click.Context, str, int, str, bool) -> None
factory = ctx.obj['factory'] # type: CLIFactory
from chalice.cli import reloader
# We don't create the server here because that will bind the
# socket and we only want to do this in the worker process.
server_factory = functools.partial(
create_local_server, factory, host, port, stage)
# When running `chalice local`, a stdout logger is configured
# so you'll see the same stdout logging as you would when
# running in lambda. This is configuring the root logger.
# The app-specific logger (app.log) will still continue
# to work.
logging.basicConfig(
stream=sys.stdout, level=logging.INFO, format='%(message)s')
if autoreload:
project_dir = factory.create_config_obj(
chalice_stage_name=stage).project_dir
rc = reloader.run_with_reloader(
server_factory, os.environ, project_dir)
# Click doesn't sys.exit() with the RC this function. The
# recommended way to do this is to use sys.exit() directly,
# see: https://github.com/pallets/click/issues/747
sys.exit(rc)
run_local_server(factory, host, port, stage)
def create_local_server(factory, host, port, stage):
# type: (CLIFactory, str, int, str) -> LocalDevServer
config = factory.create_config_obj(
chalice_stage_name=stage
)
app_obj = config.chalice_app
# Check that `chalice deploy` would let us deploy these routes, otherwise
# there is no point in testing locally.
routes = config.chalice_app.routes
validate_routes(routes)
server = factory.create_local_server(app_obj, config, host, port)
return server
def run_local_server(factory, host, port, stage):
# type: (CLIFactory, str, int, str) -> None
server = create_local_server(factory, host, port, stage)
server.serve_forever()
@cli.command()
@click.option('--autogen-policy/--no-autogen-policy',
default=None,
help='Automatically generate IAM policy for app code.')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--api-gateway-stage',
help='Name of the API gateway stage to deploy to.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.option('--connection-timeout',
type=int,
help=('Overrides the default botocore connection '
'timeout.'))
@click.pass_context
def deploy(ctx, autogen_policy, profile, api_gateway_stage, stage,
connection_timeout):
# type: (click.Context, Optional[bool], str, str, str, int) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(
chalice_stage_name=stage, autogen_policy=autogen_policy,
api_gateway_stage=api_gateway_stage,
)
session = factory.create_botocore_session(
connection_timeout=connection_timeout)
ui = UI()
d = factory.create_default_deployer(session=session,
config=config,
ui=ui)
deployed_values = d.deploy(config, chalice_stage_name=stage)
reporter = factory.create_deployment_reporter(ui=ui)
reporter.display_report(deployed_values)
@cli.group()
def dev():
# type: () -> None
"""Development and debugging commands for chalice.
All the commands under the "chalice dev" namespace are provided
to help chalice developers introspect the internals of chalice.
They are also useful for users to better understand the chalice
deployment process.
These commands are provided for informational purposes only.
There is NO guarantee of backwards compatibility for any
"chalice dev" commands. Do not rely on the output of these commands.
These commands allow introspection of chalice internals, and the
internals of chalice are subject to change as needed.
"""
@dev.command()
@click.option('--autogen-policy/--no-autogen-policy',
default=None,
help='Automatically generate IAM policy for app code.')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--api-gateway-stage',
help='Name of the API gateway stage to deploy to.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.pass_context
def plan(ctx, autogen_policy, profile, api_gateway_stage, stage):
# type: (click.Context, Optional[bool], str, str, str) -> None
"""Generate and display deployment plan.
This command will calculate and pretty print the deployment plan
without actually executing the plan. It's primarily used to better
understand the chalice deployment process.
"""
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(
chalice_stage_name=stage, autogen_policy=autogen_policy,
api_gateway_stage=api_gateway_stage,
)
session = factory.create_botocore_session()
ui = UI()
d = factory.create_plan_only_deployer(
session=session, config=config, ui=ui)
d.deploy(config, chalice_stage_name=stage)
@dev.command()
@click.option('--autogen-policy/--no-autogen-policy',
default=None,
help='Automatically generate IAM policy for app code.')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--api-gateway-stage',
help='Name of the API gateway stage to deploy to.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.pass_context
def appgraph(ctx, autogen_policy, profile, api_gateway_stage, stage):
# type: (click.Context, Optional[bool], str, str, str) -> None
"""Generate and display the application graph."""
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(
chalice_stage_name=stage, autogen_policy=autogen_policy,
api_gateway_stage=api_gateway_stage,
)
graph_build = ApplicationGraphBuilder()
graph = graph_build.build(config, stage)
ui = UI()
GraphPrettyPrint(ui).display_graph(graph)
@cli.command('invoke')
@click.option('-n', '--name', metavar='NAME', required=True,
help=('The name of the function to invoke. '
'This is the logical name of the function. If the '
'function is decorated by app.route use the name '
'api_handler instead.'))
@click.option('--profile', metavar='PROFILE',
help='Override profile at deploy time.')
@click.option('--stage', metavar='STAGE', default=DEFAULT_STAGE_NAME,
help=('Name of the Chalice stage to deploy to. '
'Specifying a new chalice stage will create '
'an entirely new set of AWS resources.'))
@click.pass_context
def invoke(ctx, name, profile, stage):
# type: (click.Context, str, str, str) -> None
"""Invoke the deployed lambda function NAME.
Reads payload from STDIN.
"""
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
try:
invoke_handler = factory.create_lambda_invoke_handler(name, stage)
payload = factory.create_stdin_reader().read()
invoke_handler.invoke(payload)
except NoSuchFunctionError as e:
err = click.ClickException(
"could not find a lambda function named %s." % e.name)
err.exit_code = 2
raise err
except botocore.exceptions.ClientError as e:
error = e.response['Error']
err = click.ClickException(
"got '%s' exception back from Lambda\n%s"
% (error['Code'], error['Message']))
err.exit_code = 1
raise err
except UnhandledLambdaError:
err = click.ClickException(
"Unhandled exception in Lambda function, details above.")
err.exit_code = 1
raise err
except ReadTimeout as e:
err = click.ClickException(e.message)
err.exit_code = 1
raise err
@cli.command('delete')
@click.option('--profile', help='Override profile at deploy time.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to delete.')
@click.pass_context
def delete(ctx, profile, stage):
# type: (click.Context, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(chalice_stage_name=stage)
session = factory.create_botocore_session()
d = factory.create_deletion_deployer(session=session, ui=UI())
d.deploy(config, chalice_stage_name=stage)
@cli.command()
@click.option('--num-entries', default=None, type=int,
help='Max number of log entries to show.')
@click.option('--include-lambda-messages/--no-include-lambda-messages',
default=False,
help='Controls whether or not lambda log messages are included.')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to get logs for.')
@click.option('-n', '--name',
help='The name of the lambda function to retrieve logs from.',
default=DEFAULT_HANDLER_NAME)
@click.option('-s', '--since',
help=('Only display logs since the provided time. If the '
'-f/--follow option is specified, then this value will '
'default to 10 minutes from the current time. Otherwise '
'by default all log messages are displayed. This value '
'can either be an ISO8601 formatted timestamp or a '
'relative time. For relative times provide a number '
'and a single unit. Units can be "s" for seconds, '
'"m" for minutes, "h" for hours, "d" for days, and "w" '
'for weeks. For example "5m" would indicate to display '
'logs starting five minutes in the past.'),
default=None)
@click.option('-f', '--follow/--no-follow',
default=False,
help=('Continuously poll for new log messages. Note that this '
'is a best effort attempt, and in certain cases can '
'miss log messages. This option is intended for '
'interactive usage only.'))
@click.option('--profile', help='The profile to use for fetching logs.')
@click.pass_context
def logs(ctx, num_entries, include_lambda_messages, stage,
name, since, follow, profile):
# type: (click.Context, int, bool, str, str, str, bool, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
factory.profile = profile
config = factory.create_config_obj(stage, False)
deployed = config.deployed_resources(stage)
if name in deployed.resource_names():
lambda_arn = deployed.resource_values(name)['lambda_arn']
session = factory.create_botocore_session()
retriever = factory.create_log_retriever(
session, lambda_arn, follow)
options = LogRetrieveOptions.create(
max_entries=num_entries,
since=since,
include_lambda_messages=include_lambda_messages,
)
display_logs(retriever, sys.stdout, options)
@cli.command('gen-policy')
@click.option('--filename',
help='The filename to analyze. Otherwise app.py is assumed.')
@click.pass_context
def gen_policy(ctx, filename):
# type: (click.Context, str) -> None
from chalice import policy
if filename is None:
filename = os.path.join(ctx.obj['project_dir'], 'app.py')
if not os.path.isfile(filename):
click.echo("App file does not exist: %s" % filename, err=True)
raise click.Abort()
with open(filename) as f:
contents = f.read()
generated = policy.policy_from_source_code(contents)
click.echo(serialize_to_json(generated))
@cli.command('new-project')
@click.argument('project_name', required=False)
@click.option('--profile', required=False)
def new_project(project_name, profile):
# type: (str, str) -> None
if project_name is None:
project_name = getting_started_prompt(click)
if os.path.isdir(project_name):
click.echo("Directory already exists: %s" % project_name, err=True)
raise click.Abort()
create_new_project_skeleton(project_name, profile)
validate_python_version(Config.create())
@cli.command('url')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to get the deployed URL for.')
@click.pass_context
def url(ctx, stage):
# type: (click.Context, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
deployed = config.deployed_resources(stage)
if deployed is not None and 'rest_api' in deployed.resource_names():
click.echo(deployed.resource_values('rest_api')['rest_api_url'])
else:
e = click.ClickException(
"Could not find a record of a Rest API in chalice stage: '%s'"
% stage)
e.exit_code = 2
raise e
@cli.command('generate-sdk')
@click.option('--sdk-type', default='javascript',
type=click.Choice(['javascript']))
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help='Name of the Chalice stage to generate an SDK for.')
@click.argument('outdir')
@click.pass_context
def generate_sdk(ctx, sdk_type, stage, outdir):
# type: (click.Context, str, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
session = factory.create_botocore_session()
client = TypedAWSClient(session)
deployed = config.deployed_resources(stage)
if deployed is not None and 'rest_api' in deployed.resource_names():
rest_api_id = deployed.resource_values('rest_api')['rest_api_id']
api_gateway_stage = config.api_gateway_stage
client.download_sdk(rest_api_id, outdir,
api_gateway_stage=api_gateway_stage,
sdk_type=sdk_type)
else:
click.echo("Could not find API ID, has this application "
"been deployed?", err=True)
raise click.Abort()
@cli.command('generate-models')
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help="Chalice Stage for which to generate models.")
@click.pass_context
def generate_models(ctx, stage):
# type: (click.Context, str) -> None
"""Generate a model from Chalice routes.
Currently only supports generating Swagger 2.0 models.
"""
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
if not config.chalice_app.routes:
click.echo('No REST API found to generate model from.')
raise click.Abort()
swagger_generator = TemplatedSwaggerGenerator()
model = swagger_generator.generate_swagger(
config.chalice_app,
)
ui = UI()
ui.write(json.dumps(model, indent=4, cls=PlanEncoder))
ui.write('\n')
@cli.command('package')
@click.option('--pkg-format', default='cloudformation',
help=('Specify the provisioning engine to use for '
'template output. Chalice supports both '
'CloudFormation and Terraform. Default '
'is CloudFormation.'),
type=click.Choice(['cloudformation', 'terraform']))
@click.option('--stage', default=DEFAULT_STAGE_NAME,
help="Chalice Stage to package.")
@click.option('--single-file', is_flag=True,
default=False,
help=("Create a single packaged file. "
"By default, the 'out' argument "
"specifies a directory in which the "
"package assets will be placed. If "
"this argument is specified, a single "
"zip file will be created instead. CloudFormation Only."))
@click.option('--merge-template',
help=('Specify a JSON or YAML template to be merged '
'into the generated template. This is useful '
'for adding resources to a Chalice template or '
'modify values in the template. CloudFormation Only.'))
@click.option('--template-format', default='json',
type=click.Choice(['json', 'yaml'], case_sensitive=False),
help=('Specify if the generated template should be serialized '
'as either JSON or YAML. CloudFormation only.'))
@click.argument('out')
@click.pass_context
def package(ctx, single_file, stage, merge_template,
out, pkg_format, template_format):
# type: (click.Context, bool, str, str, str, str, str) -> None
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj(stage)
packager = factory.create_app_packager(config, pkg_format, template_format,
merge_template)
if pkg_format == 'terraform' and (merge_template or
single_file or
template_format != 'json'):
# I don't see any reason we couldn't support --single-file for
# terraform if we wanted to.
click.echo((
"Terraform format does not support "
"--merge-template, --single-file, or --template-format"))
raise click.Abort()
if single_file:
dirname = tempfile.mkdtemp()
try:
packager.package_app(config, dirname, stage)
create_zip_file(source_dir=dirname, outfile=out)
finally:
shutil.rmtree(dirname)
else:
packager.package_app(config, out, stage)
@cli.command('generate-pipeline')
@click.option('-i', '--codebuild-image',
help=("Specify default codebuild image to use. "
"This option must be provided when using a python "
"version besides 2.7."))
@click.option('-s', '--source', default='codecommit',
type=click.Choice(['codecommit', 'github']),
help=("Specify the input source. The default value of "
"'codecommit' will create a CodeCommit repository "
"for you. The 'github' value allows you to "
"reference an existing GitHub repository."))
@click.option('-b', '--buildspec-file',
help=("Specify path for buildspec.yml file. "
"By default, the build steps are included in the "
"generated cloudformation template. If this option "
"is provided, a buildspec.yml will be generated "
"as a separate file and not included in the cfn "
"template. This allows you to make changes to how "
"the project is built without having to redeploy "
"a CloudFormation template. This file should be "
"named 'buildspec.yml' and placed in the root "
"directory of your app."))
@click.argument('filename')
@click.pass_context
def generate_pipeline(ctx, codebuild_image, source, buildspec_file, filename):
# type: (click.Context, str, str, str, str) -> None
"""Generate a cloudformation template for a starter CD pipeline.
This command will write a starter cloudformation template to
the filename you provide. It contains a CodeCommit repo,
a CodeBuild stage for packaging your chalice app, and a
CodePipeline stage to deploy your application using cloudformation.
You can use any AWS SDK or the AWS CLI to deploy this stack.
Here's an example using the AWS CLI:
\b
$ chalice generate-pipeline pipeline.json
$ aws cloudformation deploy --stack-name mystack \b
--template-file pipeline.json --capabilities CAPABILITY_IAM
"""
from chalice import pipeline
factory = ctx.obj['factory'] # type: CLIFactory
config = factory.create_config_obj()
p = pipeline.CreatePipelineTemplate()
params = pipeline.PipelineParameters(
app_name=config.app_name,
lambda_python_version=config.lambda_python_version,
codebuild_image=codebuild_image,
code_source=source,
)
output = p.create_template(params)
if buildspec_file:
extractor = pipeline.BuildSpecExtractor()
buildspec_contents = extractor.extract_buildspec(output)
with open(buildspec_file, 'w') as f:
f.write(buildspec_contents)
with open(filename, 'w') as f:
f.write(serialize_to_json(output))
def main():
# type: () -> int
# click's dynamic attrs will allow us to pass through
# 'obj' via the context object, so we're ignoring
# these error messages from pylint because we know it's ok.
# pylint: disable=unexpected-keyword-arg,no-value-for-parameter
try:
return cli(obj={})
except botocore.exceptions.NoRegionError:
click.echo("No region configured. "
"Either export the AWS_DEFAULT_REGION "
"environment variable or set the "
"region value in our ~/.aws/config file.", err=True)
return 2
except ExperimentalFeatureError as e:
click.echo(str(e))
return 2
except Exception:
click.echo(traceback.format_exc(), err=True)
return 2
| 41.709627 | 79 | 0.64938 |
ace4be8d914c84e750299fd1159ca6d21c69402e | 29,334 | py | Python | torchmetrics/functional/text/bert.py | hookSSi/metrics | a1116cb0edbe95db606912c9c05ae9c35fc983e2 | [
"Apache-2.0"
] | 2 | 2022-01-20T12:33:18.000Z | 2022-03-25T04:30:02.000Z | torchmetrics/functional/text/bert.py | hookSSi/metrics | a1116cb0edbe95db606912c9c05ae9c35fc983e2 | [
"Apache-2.0"
] | null | null | null | torchmetrics/functional/text/bert.py | hookSSi/metrics | a1116cb0edbe95db606912c9c05ae9c35fc983e2 | [
"Apache-2.0"
] | null | null | null | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import math
import urllib
from collections import Counter, defaultdict
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
from warnings import warn
import torch
from torch import Tensor
from torch.utils.data import DataLoader, Dataset
from torchmetrics.utilities.imports import _TQDM_AVAILABLE, _TRANSFORMERS_AUTO_AVAILABLE
if _TRANSFORMERS_AUTO_AVAILABLE:
from transformers.models.auto import AutoModel, AutoTokenizer
else:
__doctest_skip__ = ["bert_score"]
if _TQDM_AVAILABLE:
import tqdm
# Default model recommended in the original implementation.
_DEFAULT_MODEL = "roberta-large"
def _preprocess_text(
text: List[str],
tokenizer: Any,
max_length: int = 512,
truncation: bool = True,
sort_according_length: bool = True,
own_tokenizer: bool = False,
) -> Dict[str, Tensor]:
"""Default text pre-processing function using `transformers` `AutoTokenizer` instance.
Args:
text:
An iterable of sentences.
tokenizer:
Either `AutoTokenizer` instance from `transformers` package, or a user's own tokenizer.
max_length:
A maximum sequence length.
truncation:
An indication of whether tokenized sequences should be padded only to the length of the longest sequence.
sort_according_length:
An indication of whether tokenized sequences should be sorted from shortest to longest. This is appropriate
to do for leveraging dynamic padding during embedding calculation and thereby to hasten inference.
own_tokenizer:
An indication of whether a non-default user's own tokenizer is used.
Return:
A dictionary of tokenized sentences including input_ids and attention_mask.
Raises:
BaseException:
If a tokenization with a user's own tokenizer is not successful.
"""
if not own_tokenizer:
tokenized_data = tokenizer(
text, padding="max_length", max_length=max_length, truncation=truncation, return_tensors="pt"
)
else:
try:
tokenized_data = tokenizer(text, max_length)
except BaseException as e:
raise BaseException(f"Tokenization was not successful: {e}")
input_ids, attention_mask = (
_sort_data_according_length(tokenized_data["input_ids"], tokenized_data["attention_mask"])
if sort_according_length
else (tokenized_data["input_ids"], tokenized_data["attention_mask"])
)
return {"input_ids": input_ids, "attention_mask": attention_mask}
def _process_attention_mask_for_special_tokens(attention_mask: Tensor) -> Tensor:
"""Process attention mask to be zero for special [CLS] and [SEP] tokens as they're not included in a
calculation for BERT score.
Args:
attention_mask: An attention mask to be returned, for example, by a `transformers` tokenizer.
Return:
A processed attention mask.
"""
# Make attention_mask zero for [CLS] token
attention_mask[:, 0] = 0
# Make attention_mask zero for [SEP] token
sep_token_position = (attention_mask - 0.1).cumsum(-1).argmax(-1)
attention_mask[torch.arange(attention_mask.size(0)).long(), sep_token_position] = 0
return attention_mask
def _sort_data_according_length(input_ids: Tensor, attention_mask: Tensor) -> Tuple[Tensor, Tensor]:
"""Sort tokenized sentence from the shortest to the longest one."""
sorted_indices = attention_mask.sum(1).argsort()
input_ids = input_ids[sorted_indices]
attention_mask = attention_mask[sorted_indices]
return input_ids, attention_mask
def _input_data_collator(
batch: Dict[str, Tensor], device: Optional[Union[str, torch.device]] = None
) -> Dict[str, Tensor]:
"""Helper function that trims model inputs to the longest sequence within the batch and put the input on the
proper device."""
max_len = int(batch["attention_mask"].sum(1).max().item())
input_ids = batch["input_ids"][:, :max_len].to(device)
attention_mask = batch["attention_mask"][:, :max_len].to(device)
batch.update({"input_ids": input_ids, "attention_mask": attention_mask})
return batch
def _output_data_collator(model_output: Tensor, attention_mask: Tensor, target_len: int) -> Tuple[Tensor, Tensor]:
"""Helper function that pads the model output and attention mask to the target length."""
zeros_shape = list(model_output.shape)
zeros_shape[2] = target_len - zeros_shape[2]
model_output = torch.cat(
[model_output, torch.zeros(zeros_shape, dtype=model_output.dtype).to(model_output.device)], dim=2
)
zeros = torch.zeros(zeros_shape[0], zeros_shape[2], dtype=attention_mask.dtype).to(attention_mask.device)
attention_mask = torch.cat([attention_mask, zeros], dim=1)
return model_output, attention_mask
class TextDataset(Dataset):
"""PyTorch dataset class for storing tokenized sentences and other properties used for BERT score
calculation."""
def __init__(
self,
text: List[str],
tokenizer: Any,
max_length: int = 512,
preprocess_text_fn: Callable[[List[str], Any, int], Dict[str, Tensor]] = _preprocess_text,
idf: bool = False,
tokens_idf: Optional[Dict[int, float]] = None,
) -> None:
"""
Args:
text:
An iterable of sentences.
tokenizer:
`AutoTokenizer` instance from `transformers` package.
max_length:
A maximum sequence length.
preprocess_text_fn:
A function used for processing the input sentences.
idf:
An indication of whether calculate token inverse document frequencies to weight the model embeddings.
tokens_idf:
Inverse document frequencies (these should be calculated on reference sentences).
"""
self.text = preprocess_text_fn(text, tokenizer, max_length)
self.max_length = self.text["input_ids"].shape[1]
self.num_sentences = len(text)
self.idf = idf
self.tokens_idf = {}
if idf:
self.tokens_idf = tokens_idf if tokens_idf is not None else self._get_tokens_idf()
def __getitem__(self, idx: int) -> Dict[str, Tensor]:
input_ids = self.text["input_ids"][idx, :]
attention_mask = self.text["attention_mask"][idx, :]
inputs_dict = {"input_ids": input_ids, "attention_mask": attention_mask}
if self.idf:
input_ids_idf = torch.tensor([self.tokens_idf[input_idx] for input_idx in input_ids.tolist()])
inputs_dict["input_ids_idf"] = input_ids_idf
return inputs_dict
def __len__(self) -> int:
return self.num_sentences
def _get_tokens_idf(self) -> Dict[int, float]:
"""Calculate token inverse document frequences.
Return:
A python dictionary containing inverse document frequences for token ids.
"""
token_counter: Counter = Counter()
for tokens in map(self._set_of_tokens, self.text["input_ids"]):
token_counter.update(tokens)
tokens_idf: Dict[int, float] = defaultdict(self._get_tokens_idf_default_value)
tokens_idf.update(
{idx: math.log((self.num_sentences + 1) / (occurrence + 1)) for idx, occurrence in token_counter.items()}
)
return tokens_idf
def _get_tokens_idf_default_value(self) -> float:
"""Helper function that ensures `defaultdict` to be pickled."""
return math.log((self.num_sentences + 1) / 1)
@staticmethod
def _set_of_tokens(input_ids: Tensor) -> Set:
"""Return set of tokens from the `input_ids` `torch.Tensor`."""
return set(input_ids.tolist())
class TokenizedDataset(TextDataset):
"""The child class of `TextDataset` class used with already tokenized data."""
def __init__(
self,
input_ids: Tensor,
attention_mask: Tensor,
idf: bool = False,
tokens_idf: Optional[Dict[int, float]] = None,
) -> None:
"""
Args:
input_ids:
Input ids (`torch.Tensor`).
attention_mask:
Attention mask (`torch.Tensor`).
idf:
An indication of whether calculate token inverse document frequencies to weight the model embeddings.
tokens_idf:
Inverse document frequencies (these should be calculated on reference sentences).
"""
self.text = dict(zip(["input_ids", "attention_mask"], _sort_data_according_length(input_ids, attention_mask)))
self.text = _input_data_collator(self.text)
self.num_sentences = len(self.text["input_ids"])
self.max_length = self.text["input_ids"].shape[1]
self.idf = idf
self.tokens_idf = {}
if idf:
self.tokens_idf = tokens_idf if tokens_idf is not None else self._get_tokens_idf()
def _get_progress_bar(dataloader: DataLoader, verbose: bool = False) -> Union[DataLoader, "tqdm.auto.tqdm"]:
"""Helper function returning either the dataloader itself when `verbose = False`, or it wraps the dataloader with
`tqdm.auto.tqdm`, when `verbose = True` to display a progress bar during the embbeddings calculation."""
return tqdm.auto.tqdm(dataloader) if verbose else dataloader
def _check_shape_of_model_output(output: Tensor, input_ids: Tensor) -> None:
"""Check if the shape of the user's own model output."""
bs, seq_len = input_ids.shape[:2]
invalid_out_shape = len(output.shape) != 3 or output.shape[0] != bs or output.shape[1] != seq_len
if invalid_out_shape:
raise ValueError(
"The model output must be `torch.Tensor` of a shape `[batch_size, seq_len, model_dim]` "
f"i.e. [{bs}, {seq_len}. , `model_dim`], but got {output.shape}."
)
def _get_embeddings_and_idf_scale(
dataloader: DataLoader,
target_len: int,
model: torch.nn.Module,
device: Optional[Union[str, torch.device]] = None,
num_layers: Optional[int] = None,
all_layers: bool = False,
idf: bool = False,
verbose: bool = False,
user_forward_fn: Callable[[torch.nn.Module, Dict[str, Tensor]], Tensor] = None,
) -> Tuple[Tensor, Tensor]:
"""Calculate sentence embeddings and the inverse-document-frequence scaling factor.
Args:
dataloader:
`torch.utils.data.DataLoader` instance.
target_len:
A length of the longest sequence in the data. Used for padding the model output.
model:
BERT model.
device:
A device to be used for calculation.
num_layers:
The layer of representation to use.
all_layers:
An indication whether representation from all model layers should be used for BERTScore.
idf:
An Indication whether normalization using inverse document frequencies should be used.
verbose:
An indication of whether a progress bar to be displayed during the embeddings calculation.
user_forward_fn:
A user's own forward function used in a combination with `user_model`. This function must take `user_model`
and a python dictionary of containing `"input_ids"` and `"attention_mask"` represented by `torch.Tensor`
as an input and return the model's output represented by the single `torch.Tensor`.
Return:
A tuple of torch.Tensors containing the model's embeddings and the normalized tokens IDF.
When `idf = False`, tokens IDF is not calculated, and a matrix of mean weights is returned instead.
For a single sentence, `mean_weight = 1/seq_len`, where `seq_len` is a sum over the corresponding
`attention_mask`.
Raises:
ValueError:
If `all_layers = True` and a model, which is not from the `transformers` package, is used.
"""
embeddings_list: List[Tensor] = []
idf_scale_list: List[Tensor] = []
for batch in _get_progress_bar(dataloader, verbose):
with torch.no_grad():
batch = _input_data_collator(batch, device)
# Output shape: batch_size x num_layers OR 1 x sequence_length x bert_dim
if not all_layers:
if not user_forward_fn:
out = model(batch["input_ids"], batch["attention_mask"], output_hidden_states=True)
out = out.hidden_states[num_layers if num_layers is not None else -1]
else:
out = user_forward_fn(model, batch)
_check_shape_of_model_output(out, batch["input_ids"])
out = out.unsqueeze(1)
else:
if user_forward_fn:
raise ValueError(
"The option `all_layers=True` can be used only with default `transformers` models."
)
out = model(batch["input_ids"], batch["attention_mask"], output_hidden_states=True)
out = torch.cat([o.unsqueeze(1) for o in out.hidden_states], dim=1)
out /= out.norm(dim=-1).unsqueeze(-1) # normalize embeddings
out, attention_mask = _output_data_collator(out, batch["attention_mask"], target_len)
processed_attention_mask = _process_attention_mask_for_special_tokens(attention_mask)
# Multiply embeddings with attention_mask (b=batch_size, l=num_layers, s=seq_len, d=emb_dim)
out = torch.einsum("blsd, bs -> blsd", out, processed_attention_mask)
embeddings_list.append(out.cpu())
# Calculate weighted (w.r.t. sentence length) input_ids IDF matrix
input_ids_idf = (
batch["input_ids_idf"] * processed_attention_mask if idf else processed_attention_mask.type(out.dtype)
)
input_ids_idf /= input_ids_idf.sum(-1, keepdim=True)
idf_scale_list.append(input_ids_idf)
embeddings = torch.cat(embeddings_list)
idf_scale = torch.cat(idf_scale_list)
return embeddings, idf_scale
def _get_scaled_precision_or_recall(cos_sim: Tensor, metric: str, idf_scale: Tensor) -> Tensor:
"""Helper function that calculates precision or recall, transpose it and scale it with idf_scale factor."""
dim = 3 if metric == "precision" else 2
res = cos_sim.max(dim=dim).values
res = torch.einsum("bls, bs -> bls", res, idf_scale).sum(-1)
# We transpose the results and squeeze if possible to match the format of the original BERTScore implementation
res = res.transpose(0, 1).squeeze()
return res
def _get_precision_recall_f1(
preds_embeddings: Tensor, target_embeddings: Tensor, preds_idf_scale: Tensor, target_idf_scale: Tensor
) -> Tuple[Tensor, Tensor, Tensor]:
"""Calculate precision, recall and F1 score over candidate and reference sentences.
Args:
preds_embeddings: Embeddings of candidate sentenecs.
target_embeddings: Embeddings of reference sentences.
preds_idf_scale: An IDF scale factor for candidate sentences.
target_idf_scale: An IDF scale factor for reference sentences.
Return:
Tensors containing precision, recall and F1 score, respectively.
"""
# Dimensions: b = batch_size, l = num_layers, p = predictions_seq_len, r = references_seq_len, d = bert_dim
cos_sim = torch.einsum("blpd, blrd -> blpr", preds_embeddings, target_embeddings)
# Final metrics shape = (batch_size * num_layers | batch_size)
precision = _get_scaled_precision_or_recall(cos_sim, "precision", preds_idf_scale)
recall = _get_scaled_precision_or_recall(cos_sim, "recall", target_idf_scale)
f1_score = 2 * precision * recall / (precision + recall)
f1_score = f1_score.masked_fill(torch.isnan(f1_score), 0.0)
return precision, recall, f1_score
def _get_hash(model_name_or_path: Optional[str] = None, num_layers: Optional[int] = None, idf: bool = False) -> str:
"""Compute `BERT_score`_ (copied and adjusted)"""
msg = f"{model_name_or_path}_L{num_layers}{'_idf' if idf else '_no-idf'}"
return msg
def _read_csv_from_local_file(baseline_path: str) -> Tensor:
"""Helper function which reads baseline the csv file from the local file.
This method implemented to avoid `pandas` dependency.
"""
with open(baseline_path) as fname:
csv_file = csv.reader(fname)
baseline_list = [[float(item) for item in row] for idx, row in enumerate(csv_file) if idx > 0]
baseline = torch.tensor(baseline_list)[:, 1:]
return baseline
def _read_csv_from_url(baseline_url: str) -> Tensor:
"""Helper function which reads the baseline csv file from URL.
This method is implemented to avoid `pandas` dependency.
"""
with urllib.request.urlopen(baseline_url) as http_request: # type: ignore
baseline_list = [
[float(item) for item in row.strip().decode("utf-8").split(",")]
for idx, row in enumerate(http_request)
if idx > 0
]
baseline = torch.tensor(baseline_list)[:, 1:]
return baseline
def _load_baseline(
lang: str = "en",
model_name_or_path: Optional[str] = None,
baseline_path: Optional[str] = None,
baseline_url: Optional[str] = None,
) -> Optional[Tensor]:
"""Load a CSV file with the baseline values used for rescaling."""
if baseline_path:
baseline: Optional[Tensor] = _read_csv_from_local_file(baseline_path)
elif baseline_url:
baseline = _read_csv_from_url(baseline_url)
# Read default baseline from the original `bert-score` package https://github.com/Tiiiger/bert_score
elif lang and model_name_or_path:
_URL_BASE = "https://raw.githubusercontent.com/Tiiiger/bert_score/master/bert_score/rescale_baseline"
baseline_url = f"{_URL_BASE}/{lang}/{model_name_or_path}.tsv"
baseline = _read_csv_from_url(baseline_url)
else:
baseline = None
warn("Baseline was not successfully loaded. No baseline is going to be used.")
return baseline
def _rescale_metrics_with_baseline(
precision: Tensor,
recall: Tensor,
f1_score: Tensor,
baseline: Tensor,
num_layers: Optional[int] = None,
all_layers: bool = False,
) -> Tuple[Tensor, Tensor, Tensor]:
"""Rescale the computed metrics with the pre-computed baseline."""
if num_layers is None and all_layers is False:
num_layers = -1
all_metrics = torch.stack([precision, recall, f1_score], dim=-1)
baseline_scale = baseline.unsqueeze(1) if all_layers else baseline[num_layers]
all_metrics = (all_metrics - baseline_scale) / (1 - baseline_scale)
return all_metrics[..., 0], all_metrics[..., 1], all_metrics[..., 2]
def bert_score(
preds: Union[List[str], Dict[str, Tensor]],
target: Union[List[str], Dict[str, Tensor]],
model_name_or_path: Optional[str] = None,
num_layers: Optional[int] = None,
all_layers: bool = False,
model: Optional[torch.nn.Module] = None,
user_tokenizer: Any = None,
user_forward_fn: Callable[[torch.nn.Module, Dict[str, Tensor]], Tensor] = None,
verbose: bool = False,
idf: bool = False,
device: Optional[Union[str, torch.device]] = None,
max_length: int = 512,
batch_size: int = 64,
num_threads: int = 4,
return_hash: bool = False,
lang: str = "en",
rescale_with_baseline: bool = False,
baseline_path: Optional[str] = None,
baseline_url: Optional[str] = None,
) -> Dict[str, Union[List[float], str]]:
"""`Bert_score Evaluating Text Generation`_ leverages the pre-trained contextual embeddings from BERT and
matches words in candidate and reference sentences by cosine similarity. It has been shown to correlate with
human judgment on sentence-level and system-level evaluation. Moreover, BERTScore computes precision, recall,
and F1 measure, which can be useful for evaluating different language generation tasks.
This implemenation follows the original implementation from `BERT_score`_
Args:
preds:
Either an iterable of predicted sentences or a `Dict[str, torch.Tensor]` containing `input_ids` and
`attention_mask` `torch.Tensor`.
target:
Either an iterable of target sentences or a `Dict[str, torch.Tensor]` containing `input_ids` and
`attention_mask` `torch.Tensor`.
model_name_or_path:
A name or a model path used to load `transformers` pretrained model.
num_layers:
A layer of representation to use.
all_layers:
An indication of whether the representation from all model's layers should be used.
If `all_layers = True`, the argument `num_layers` is ignored.
model:
A user's own model. Must be of `torch.nn.Module` instance.
user_tokenizer:
A user's own tokenizer used with the own model. This must be an instance with the `__call__` method.
This method must take an iterable of sentences (`List[str]`) and must return a python dictionary
containing `"input_ids"` and `"attention_mask"` represented by `torch.Tensor`. It is up to the user's model
of whether `"input_ids"` is a `torch.Tensor` of input ids or embedding vectors.
This tokenizer must prepend an equivalent of `[CLS]` token and append an equivalent of `[SEP]` token
as `transformers` tokenizer does.
user_forward_fn:
A user's own forward function used in a combination with `user_model`. This function must take `user_model`
and a python dictionary of containing `"input_ids"` and `"attention_mask"` represented by `torch.Tensor`
as an input and return the model's output represented by the single `torch.Tensor`.
verbose:
An indication of whether a progress bar to be displayed during the embeddings calculation.
idf:
An indication of whether normalization using inverse document frequencies should be used.
device:
A device to be used for calculation.
max_length:
A maximum length of input sequences. Sequences longer than `max_length` are to be trimmed.
batch_size:
A batch size used for model processing.
num_threads:
A number of threads to use for a dataloader.
return_hash:
An indication of whether the correspodning `hash_code` should be returned.
lang:
A language of input sentences. It is used when the scores are rescaled with a baseline.
rescale_with_baseline:
An indication of whether bertscore should be rescaled with a pre-computed baseline.
When a pretrained model from `transformers` model is used, the corresponding baseline is downloaded
from the original `bert-score` package from `BERT_score`_ if available.
In other cases, please specify a path to the baseline csv/tsv file, which must follow the formatting
of the files from `BERT_score`_
baseline_path:
A path to the user's own local csv/tsv file with the baseline scale.
baseline_url:
A url path to the user's own csv/tsv file with the baseline scale.
Returns:
Python dictionary containing the keys `precision`, `recall` and `f1` with corresponding values.
Raises:
ValueError:
If `len(preds) != len(target)`.
ModuleNotFoundError:
If `tqdm` package is required and not installed.
ModuleNotFoundError:
If ``transformers`` package is required and not installed.
ValueError:
If ``num_layer`` is larger than the number of the model layers.
ValueError:
If invalid input is provided.
Example:
>>> from torchmetrics.functional.text.bert import bert_score
>>> preds = ["hello there", "general kenobi"]
>>> target = ["hello there", "master kenobi"]
>>> from pprint import pprint
>>> pprint(bert_score(preds, target)) # doctest: +ELLIPSIS
{'f1': [0.999..., 0.996...],
'precision': [0.999..., 0.996...],
'recall': [0.999..., 0.996...]}
"""
if len(preds) != len(target):
raise ValueError("Number of predicted and reference sententes must be the same!")
if verbose and (not _TQDM_AVAILABLE):
raise ModuleNotFoundError(
"An argument `verbose = True` requires `tqdm` package be installed. Install with `pip install tqdm`."
)
if model is None:
if not _TRANSFORMERS_AUTO_AVAILABLE:
raise ModuleNotFoundError(
"`bert_score` metric with default models requires `transformers` package be installed."
" Either install with `pip install transformers>=4.0` or `pip install torchmetrics[text]`."
)
if model_name_or_path is None:
warn(
"The argument `model_name_or_path` was not specified while it is required when default"
" `transformers` model are used."
f"It is, therefore, used the default recommended model - {_DEFAULT_MODEL}."
)
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path or _DEFAULT_MODEL)
model = AutoModel.from_pretrained(model_name_or_path or _DEFAULT_MODEL)
else:
tokenizer = user_tokenizer
model.eval()
model.to(device)
try:
if num_layers and num_layers > model.config.num_hidden_layers: # type: ignore
raise ValueError(
f"num_layers={num_layers} is forbidden for {model_name_or_path}. " # type: ignore
f"Please use num_layers <= {model.config.num_hidden_layers}" # type: ignore
)
except AttributeError:
warn("It was not possible to retrieve the parameter `num_layers` from the model specification.")
_are_empty_lists = all(isinstance(text, list) and len(text) == 0 for text in (preds, target))
_are_valid_lists = all(
isinstance(text, list) and len(text) > 0 and isinstance(text[0], str) for text in (preds, target)
)
_are_valid_tensors = all(
isinstance(text, dict) and isinstance(text["input_ids"], Tensor) for text in (preds, target)
)
if _are_empty_lists:
warn("Predictions and references are empty.")
output_dict: Dict[str, Union[List[float], str]] = {
"precision": [0.0],
"recall": [0.0],
"f1": [0.0],
}
if return_hash:
output_dict.update({"hash": _get_hash(model_name_or_path, num_layers, idf)})
return output_dict
# Load baselines if needed
baseline = _load_baseline(lang, model_name_or_path, baseline_path, baseline_url) if rescale_with_baseline else None
# We ignore mypy typing below as the proper typing is ensured by conditions above, only mypy cannot infer that.
if _are_valid_lists:
target_dataset = TextDataset(target, tokenizer, max_length, idf=idf) # type: ignore
preds_dataset = TextDataset(
preds, # type: ignore
tokenizer,
max_length,
idf=idf,
tokens_idf=target_dataset.tokens_idf,
)
elif _are_valid_tensors:
target_dataset = TokenizedDataset(**target, idf=idf) # type: ignore
preds_dataset = TokenizedDataset(**preds, idf=idf, tokens_idf=target_dataset.tokens_idf) # type: ignore
else:
raise ValueError("Invalid input provided.")
target_loader = DataLoader(target_dataset, batch_size=batch_size, num_workers=num_threads)
preds_loader = DataLoader(preds_dataset, batch_size=batch_size, num_workers=num_threads)
target_embeddings, target_idf_scale = _get_embeddings_and_idf_scale(
target_loader, target_dataset.max_length, model, device, num_layers, all_layers, idf, verbose, user_forward_fn
)
preds_embeddings, preds_idf_scale = _get_embeddings_and_idf_scale(
preds_loader, preds_dataset.max_length, model, device, num_layers, all_layers, idf, verbose, user_forward_fn
)
precision, recall, f1_score = _get_precision_recall_f1(
preds_embeddings, target_embeddings, preds_idf_scale, target_idf_scale
)
if baseline is not None:
precision, recall, f1_score = _rescale_metrics_with_baseline(
precision, recall, f1_score, baseline, num_layers, all_layers
)
output_dict = {
"precision": precision.tolist(),
"recall": recall.tolist(),
"f1": f1_score.tolist(),
}
if return_hash:
output_dict.update({"hash": _get_hash(model_name_or_path, num_layers, idf)})
return output_dict
| 44.111278 | 119 | 0.670689 |
ace4bf30897723671fba8a951614fa5f8da71c30 | 9,133 | py | Python | selfdrive/manager/manager.py | advpilot/advpilot | 0d8940cd678c34c243a8590afb998c49d88599d0 | [
"MIT"
] | null | null | null | selfdrive/manager/manager.py | advpilot/advpilot | 0d8940cd678c34c243a8590afb998c49d88599d0 | [
"MIT"
] | null | null | null | selfdrive/manager/manager.py | advpilot/advpilot | 0d8940cd678c34c243a8590afb998c49d88599d0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import datetime
import os
import signal
import subprocess
import sys
import traceback
from typing import List, Tuple, Union
import cereal.messaging as messaging
import selfdrive.sentry as sentry
from common.basedir import BASEDIR
from common.params import Params, ParamKeyType
from common.text_window import TextWindow
from selfdrive.boardd.set_time import set_time
from selfdrive.hardware import HARDWARE, PC, EON
from selfdrive.manager.helpers import unblock_stdout
from selfdrive.manager.process import ensure_running
from selfdrive.manager.process_config import managed_processes
# from selfdrive.athena.registration import register, UNREGISTERED_DONGLE_ID
from selfdrive.swaglog import cloudlog, add_file_handler
from selfdrive.version import is_dirty, get_commit, get_version, get_origin, get_short_branch, \
terms_version, training_version
import json
sys.path.append(os.path.join(BASEDIR, "pyextra"))
def get_car_list() -> str:
attrs = ['FINGERPRINTS', 'FW_VERSIONS']
cars = dict({"cars": []})
models = []
for car_folder in [x[0] for x in os.walk('/data/openpilot/selfdrive/car')]:
try:
car_name = car_folder.split('/')[-1]
if car_name != "mock":
for attr in attrs:
values = __import__('selfdrive.car.%s.values' % car_name, fromlist=[attr])
if hasattr(values, attr):
attr_values = getattr(values, attr)
else:
continue
if isinstance(attr_values, dict):
for f, v in attr_values.items():
if f not in models:
models.append(f)
except (ImportError, IOError, ValueError):
pass
models.sort()
cars["cars"] = models
return json.dumps(cars)
def manager_init() -> None:
# update system time from panda
set_time(cloudlog)
# save boot log
# if not EON:
# subprocess.call("./bootlog", cwd=os.path.join(BASEDIR, "selfdrive/loggerd"))
params = Params()
params.clear_all(ParamKeyType.CLEAR_ON_MANAGER_START)
default_params: List[Tuple[str, Union[str, bytes]]] = [
("CompletedTrainingVersion", "0"),
("DisengageOnAccelerator", "0"),
("HasAcceptedTerms", "0"),
("OpenpilotEnabledToggle", "1"),
("IsMetric", "1"),
("Licence", ""),
("CarList", ""),
("CarSelected", ""),
("Locale", "zh-TW"),
("Timezone", "Asia/Taipei"),
("UseOldPanda", "0"),
("UseStockLong", "1"),
]
if not PC:
default_params.append(("LastUpdateTime", datetime.datetime.utcnow().isoformat().encode('utf8')))
# if params.get_bool("RecordFrontLock"):
# params.put_bool("RecordFront", True)
#
# if not params.get_bool("DisableRadar_Allow"):
# params.delete("DisableRadar")
# set unset params
for k, v in default_params:
if params.get(k) is None:
params.put(k, v)
# install default ssh key
install_key = False
if os.path.isfile("/EON"):
os.system("setprop persist.neos.ssh 1")
os.system("echo -n 1 > /data/params/d/SshEnabled")
if not os.path.isfile("/data/params/d/GithubSshKeys"):
install_key = True
else:
with open('/data/params/d/GithubSshKeys') as f:
if f.read().strip() == "":
install_key = True
if install_key:
os.system("echo -n openpilot > /data/params/d/GithubUsername")
os.system("cp /data/data/com.termux/files/home/setup_keys /data/params/d/GithubSshKeys")
# set language
if EON:
language = subprocess.check_output(["getprop", "persist.sys.locale"], encoding='utf8').strip()
if language != "":
params.put("Locale", language)
subprocess.call(['setprop', 'persist.sys.timezone', '"Asia/Taipei"'])
# gen car list
params.put("CarList", get_car_list())
# is this dashcam?
# if os.getenv("PASSIVE") is not None:
# params.put_bool("Passive", bool(int(os.getenv("PASSIVE", "0"))))
# if params.get("Passive") is None:
# raise Exception("Passive must be set to continue")
# Create folders needed for msgq
try:
os.mkdir("/dev/shm")
except FileExistsError:
pass
except PermissionError:
print("WARNING: failed to make /dev/shm")
params.put("CompletedTrainingVersion", training_version)
# set version params
params.put("Version", get_version())
params.put("TermsVersion", terms_version)
params.put("TrainingVersion", training_version)
params.put("GitCommit", get_commit(default=""))
params.put("GitBranch", get_short_branch(default=""))
params.put("GitRemote", get_origin(default=""))
dongle_id = HARDWARE.get_serial()
params.put("HardwareSerial", dongle_id)
# set dongle id
# reg_res = register(show_spinner=True)
# if reg_res:
# dongle_id = reg_res
# else:
# serial = params.get("HardwareSerial")
# raise Exception(f"Registration failed for device {serial}")
# os.environ['DONGLE_ID'] = dongle_id # Needed for swaglog
#
# if not is_dirty():
# os.environ['CLEAN'] = '1'
# init logging
sentry.init(sentry.SentryProject.SELFDRIVE)
cloudlog.bind_global(dongle_id=dongle_id, version=get_version(), dirty=is_dirty(),
device=HARDWARE.get_device_type())
def manager_prepare() -> None:
for p in managed_processes.values():
p.prepare()
def manager_cleanup() -> None:
# send signals to kill all procs
for p in managed_processes.values():
p.stop(block=False)
# ensure all are killed
for p in managed_processes.values():
p.stop(block=True)
cloudlog.info("everything is dead")
def manager_thread() -> None:
cloudlog.bind(daemon="manager")
cloudlog.info("manager start")
cloudlog.info({"environ": os.environ})
params = Params()
ignore: List[str] = []
# if params.get("DongleId", encoding='utf8') in (None, UNREGISTERED_DONGLE_ID):
# ignore += ["manage_athenad", "uploader"]
if os.getenv("NOBOARD") is not None:
ignore.append("pandad")
ignore += [x for x in os.getenv("BLOCK", "").split(",") if len(x) > 0]
sm = messaging.SubMaster(['deviceState', 'carParams'], poll=['deviceState'])
pm = messaging.PubMaster(['managerState'])
ensure_running(managed_processes.values(), False, params=params, CP=sm['carParams'], not_run=ignore)
while True:
sm.update()
started = sm['deviceState'].started
ensure_running(managed_processes.values(), started, params=params, CP=sm['carParams'], not_run=ignore)
running = ' '.join("%s%s\u001b[0m" % ("\u001b[32m" if p.proc.is_alive() else "\u001b[31m", p.name)
for p in managed_processes.values() if p.proc)
print(running)
cloudlog.debug(running)
# send managerState
msg = messaging.new_message('managerState')
msg.managerState.processes = [p.get_process_state_msg() for p in managed_processes.values()]
pm.send('managerState', msg)
# Exit main loop when uninstall/shutdown/reboot is needed
shutdown = False
for param in ("DoUninstall", "DoShutdown", "DoReboot"):
if params.get_bool(param):
shutdown = True
params.put("LastManagerExitReason", param)
cloudlog.warning(f"Shutting down manager - {param} set")
if shutdown:
break
def main() -> None:
prepare_only = os.getenv("PREPAREONLY") is not None
manager_init()
# Start UI early so prepare can happen in the background
if not prepare_only:
managed_processes['ui'].start()
manager_prepare()
if prepare_only:
return
# SystemExit on sigterm
signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(1))
try:
manager_thread()
except Exception:
traceback.print_exc()
sentry.capture_exception()
finally:
manager_cleanup()
params = Params()
if params.get_bool("DoUninstall"):
cloudlog.warning("uninstalling")
HARDWARE.uninstall()
elif params.get_bool("DoReboot"):
cloudlog.warning("reboot")
HARDWARE.reboot()
elif params.get_bool("DoShutdown"):
cloudlog.warning("shutdown")
HARDWARE.shutdown()
if __name__ == "__main__":
if os.path.isfile("/EON"):
if not os.path.isfile("/system/fonts/NotoSansCJKtc-Regular.otf"):
os.system("mount -o remount,rw /system")
os.system("rm -fr /system/fonts/NotoSansTC*.otf")
os.system("rm -fr /system/fonts/NotoSansSC*.otf")
os.system("rm -fr /system/fonts/NotoSansKR*.otf")
os.system("rm -fr /system/fonts/NotoSansJP*.otf")
os.system("cp -rf /data/openpilot/selfdrive/assets/fonts/NotoSansCJKtc-* /system/fonts/")
os.system("cp -rf /data/openpilot/selfdrive/assets/fonts/fonts.xml /system/etc/fonts.xml")
os.system("chmod 644 /system/etc/fonts.xml")
os.system("chmod 644 /system/fonts/NotoSansCJKtc-*")
os.system("mount -o remount,r /system")
unblock_stdout()
try:
main()
except Exception:
add_file_handler(cloudlog)
cloudlog.exception("Manager failed to start")
try:
managed_processes['ui'].stop()
except Exception:
pass
# Show last 3 lines of traceback
error = traceback.format_exc(-3)
error = "Manager failed to start\n\n" + error
with TextWindow(error) as t:
t.wait_for_exit()
raise
# manual exit because we are forked
sys.exit(0)
| 30.241722 | 106 | 0.672506 |
ace4bf39d2acc2400e08554d89364707bf667bb9 | 537 | py | Python | leetcode/python/maximumSubarray.py | yaoxuanw007/forfun | db50bd40852d49bd68bae03ceb43cb4a901c6d37 | [
"MIT"
] | null | null | null | leetcode/python/maximumSubarray.py | yaoxuanw007/forfun | db50bd40852d49bd68bae03ceb43cb4a901c6d37 | [
"MIT"
] | null | null | null | leetcode/python/maximumSubarray.py | yaoxuanw007/forfun | db50bd40852d49bd68bae03ceb43cb4a901c6d37 | [
"MIT"
] | null | null | null | # https://oj.leetcode.com/problems/maximum-subarray/
class Solution:
# @param A, a list of integers
# @return an integer
def maxSubArray(self, A):
# maxSums[i] = the max sum of contiguous subarray with A[i]
maxSums = [0] * len(A)
if len(maxSums) > 0:
maxSums[0] = A[0]
for i in xrange(1, len(maxSums)):
if maxSums[i-1] < 0:
maxSums[i] = A[i]
else:
maxSums[i] = maxSums[i-1] + A[i]
return max(maxSums)
s = Solution()
print s.maxSubArray([-2,1,-3,4,-1,2,1,-5,4]), 6
| 25.571429 | 63 | 0.571695 |
ace4bfb5180912eb42cae67fad36a88adbe6ca8f | 665 | py | Python | 02-buffering-and-streaming-data/client.py | MrelCode/socket | e80cd5a20eb1a287ccef0d4943569d69f5da6006 | [
"MIT"
] | null | null | null | 02-buffering-and-streaming-data/client.py | MrelCode/socket | e80cd5a20eb1a287ccef0d4943569d69f5da6006 | [
"MIT"
] | null | null | null | 02-buffering-and-streaming-data/client.py | MrelCode/socket | e80cd5a20eb1a287ccef0d4943569d69f5da6006 | [
"MIT"
] | null | null | null | import socket
HEADERSIZE = 10
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# s.connect((socket.namahostserver(), nomor port server))
s.connect((socket.gethostname(), 2000))
while True:
full_msg = ''
new_msg = True
while True:
msg = s.recv(31)
if new_msg:
print(f"new message length: {HEADERSIZE}")
msglen = int(msg[:HEADERSIZE])
new_msg = False
full_msg += msg.decode("utf-8")
if len(full_msg)-HEADERSIZE == msglen:
print("full message recvd")
print(full_msg[HEADERSIZE:])
new_msg = True
full_msg = ''
print(full_msg) | 23.75 | 57 | 0.581955 |
ace4c00819555713e7c35238a5ea5e707870655b | 12,050 | py | Python | custom_components/lyric/climate.py | balloob/lyric | cd640c137743c90adec067d66969efe48f1eb8b8 | [
"MIT"
] | 16 | 2019-10-29T10:18:50.000Z | 2021-01-09T23:43:51.000Z | custom_components/lyric/climate.py | balloob/lyric | cd640c137743c90adec067d66969efe48f1eb8b8 | [
"MIT"
] | 13 | 2019-06-26T12:20:04.000Z | 2021-03-01T11:07:10.000Z | custom_components/lyric/climate.py | balloob/lyric | cd640c137743c90adec067d66969efe48f1eb8b8 | [
"MIT"
] | 20 | 2019-08-19T15:03:55.000Z | 2022-02-13T14:59:28.000Z | """
Support for Honeywell Lyric thermostats.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.lyric/
"""
import logging
from os import path
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
"""
replace custom_components.lyric with
homeassistant.components.lyric when not
placed in custom components
"""
from custom_components.lyric import DATA_LYRIC, CONF_FAN, CONF_AWAY_PERIODS, DOMAIN
from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA
from homeassistant.components.climate.const import (
STATE_AUTO, STATE_COOL, STATE_HEAT, STATE_ECO,
ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_HIGH, SUPPORT_TARGET_TEMPERATURE_LOW,
SUPPORT_OPERATION_MODE, SUPPORT_AWAY_MODE, SUPPORT_FAN_MODE)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, CONF_SCAN_INTERVAL,
STATE_ON, STATE_OFF, STATE_UNKNOWN, TEMP_CELSIUS,
TEMP_FAHRENHEIT)
DEPENDENCIES = ['lyric']
_LOGGER = logging.getLogger(__name__)
SERVICE_RESUME_PROGRAM = 'lyric_resume_program'
SERVICE_RESET_AWAY = 'lyric_reset_away'
STATE_HEAT_COOL = 'heat-cool'
HOLD_NO_HOLD = 'NoHold'
SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_TARGET_TEMPERATURE_HIGH |
SUPPORT_TARGET_TEMPERATURE_LOW | SUPPORT_OPERATION_MODE |
SUPPORT_AWAY_MODE | SUPPORT_FAN_MODE)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_SCAN_INTERVAL):
vol.All(vol.Coerce(int), vol.Range(min=1))
})
RESUME_PROGRAM_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Lyric thermostat."""
if discovery_info is None:
return
_LOGGER.debug("climate discovery_info: %s" % discovery_info)
_LOGGER.debug("climate config: %s" % config)
temp_unit = hass.config.units.temperature_unit
has_fan = discovery_info.get(CONF_FAN, False)
away_periods = discovery_info.get(CONF_AWAY_PERIODS, [])
_LOGGER.debug('Set up Lyric climate platform')
devices = [LyricThermostat(location, device, hass, temp_unit, has_fan, away_periods)
for location, device in hass.data[DATA_LYRIC].thermostats()]
add_devices(devices, True)
def resume_program_service(service):
"""Resume the program on the target thermostats."""
entity_id = service.data.get(ATTR_ENTITY_ID)
_LOGGER.debug('resume_program_service entity_id: %s' % entity_id)
if entity_id:
target_thermostats = [device for device in devices
if device.entity_id in entity_id]
else:
target_thermostats = devices
for thermostat in target_thermostats:
thermostat.set_hold_mode(HOLD_NO_HOLD)
thermostat.away_override = False
hass.services.register(
DOMAIN, SERVICE_RESUME_PROGRAM, resume_program_service,
schema=RESUME_PROGRAM_SCHEMA)
class LyricThermostat(ClimateDevice):
"""Representation of a Lyric thermostat."""
def __init__(self, location, device, hass, temp_unit, has_fan, away_periods):
"""Initialize the thermostat."""
self._unit = temp_unit
self.location = location
self.device = device
self._hass = hass
self._away_periods = away_periods
_LOGGER.debug("away periods: %s" % away_periods)
# Not all lyric devices support cooling and heating remove unused
self._operation_list = [STATE_OFF]
# Add supported lyric thermostat features
if self.device.can_heat:
self._operation_list.append(STATE_HEAT)
if self.device.can_cool:
self._operation_list.append(STATE_COOL)
if self.device.can_heat and self.device.can_cool:
self._operation_list.append(STATE_AUTO)
# feature of device
self._has_fan = has_fan
if self._has_fan and "fan" in self.device.settings:
self._fan_list = self.device.settings["fan"].get("allowedModes")
else:
self._fan_list = None
# data attributes
self._away = None
self._location = None
self._name = None
self._humidity = None
self._target_temperature = None
self._setpointStatus = None
self._temperature = None
self._temperature_scale = None
self._target_temp_heat = None
self._target_temp_cool = None
self._dualSetpoint = None
self._mode = None
self._fan = None
self._min_temperature = None
self._max_temperature = None
self._changeableValues = None
self._scheduleType = None
self._scheduleSubType = None
self._scheduleCapabilities = None
self._currentSchedulePeriod = None
self._currentSchedulePeriodDay = None
self._vacationHold = None
self.away_override = False
@property
def name(self):
"""Return the name of the lyric, if any."""
return self._name
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._temperature_scale
@property
def current_temperature(self):
"""Return the current temperature."""
return self._temperature
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
if self._mode in [STATE_HEAT, STATE_COOL, STATE_OFF]:
return self._mode
elif self._mode == STATE_HEAT_COOL:
return STATE_AUTO
else:
return STATE_UNKNOWN
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if not self._dualSetpoint:
return self._target_temperature
else:
return None
@property
def target_temperature_low(self):
"""Return the upper bound temperature we try to reach."""
if self._dualSetpoint:
return self._target_temp_cool
else:
return None
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
if self._dualSetpoint:
return self._target_temp_heat
else:
return None
@property
def is_away_mode_on(self):
"""Return if away mode is on."""
if self.away_override:
return self._away
elif self._scheduleType == 'Timed' and self._away_periods:
return self._currentSchedulePeriod in self._away_periods
else:
return self._away
def set_temperature(self, **kwargs):
"""Set new target temperature."""
target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW)
target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH)
if self._dualSetpoint:
if target_temp_low is not None and target_temp_high is not None:
temp = (target_temp_low, target_temp_high)
else:
temp = kwargs.get(ATTR_TEMPERATURE)
_LOGGER.debug("Lyric set_temperature-output-value=%s", temp)
self.device.temperatureSetpoint = temp
def set_operation_mode(self, operation_mode):
"""Set operation mode."""
_LOGGER.debug(operation_mode)
_LOGGER.debug(operation_mode.capitalize())
if operation_mode in [STATE_HEAT, STATE_COOL, STATE_OFF]:
device_mode = operation_mode
elif operation_mode == STATE_AUTO:
device_mode = STATE_HEAT_COOL
self.device.operationMode = device_mode.capitalize()
@property
def operation_list(self):
"""List of available operation modes."""
return self._operation_list
def turn_away_mode_on(self):
"""Turn away on."""
self._away = True
self.away_override = True
self._hass.bus.fire('override_away_on', {
'entity_id': self.entity_id
})
def turn_away_mode_off(self):
"""Turn away off."""
self._away = False
self.away_override = True
self._hass.bus.fire('override_away_off', {
'entity_id': self.entity_id
})
@property
def current_hold_mode(self):
"""Return current hold mode."""
return self._setpointStatus
def set_hold_mode(self, hold_mode):
"""Set hold mode (PermanentHold, HoldUntil, NoHold,
VacationHold, etc.)."""
self.device.thermostatSetpointStatus = hold_mode
@property
def current_fan_mode(self):
"""Return whether the fan is on."""
if self._has_fan:
# Return whether the fan is on
return self._fan
else:
# No Fan available so disable slider
return None
@property
def fan_list(self):
"""List of available fan modes."""
return self._fan_list
def set_fan_mode(self, fan):
"""Set fan state."""
self.device.fan = fan
@property
def min_temp(self):
"""Identify min_temp in Lyric API or defaults if not available."""
return self._min_temperature
@property
def max_temp(self):
"""Identify max_temp in Lyric API or defaults if not available."""
return self._max_temperature
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attrs = {"schedule": self._scheduleType, "away_override": self.away_override}
if self._scheduleSubType:
attrs["schedule_sub"] = self._scheduleSubType
if self._vacationHold:
attrs["vacation"] = self._vacationHold
if self._currentSchedulePeriodDay:
attrs["current_schedule_day"] = self._currentSchedulePeriodDay
if self._currentSchedulePeriod:
attrs["current_schedule_period"] = self._currentSchedulePeriod
return attrs
def update(self):
"""Cache value from python-lyric."""
if self.device:
self._location = self.device.where
self._name = self.device.name
self._humidity = self.device.indoorHumidity
self._temperature = self.device.indoorTemperature
self._mode = self.device.operationMode.lower()
self._setpointStatus = self.device.thermostatSetpointStatus
self._target_temperature = self.device.temperatureSetpoint
self._target_temp_heat = self.device.heatSetpoint
self._target_temp_cool = self.device.coolSetpoint
self._dualSetpoint = self.device.hasDualSetpointStatus
self._fan = self.device.fanMode
if self.away_override == False:
self._away = self.device.away
self._min_temperature = self.device.minSetpoint
self._max_temperature = self.device.maxSetpoint
# self._changeableValues = self.device.changeableValues
self._scheduleType = self.device.scheduleType
self._scheduleSubType = self.device.scheduleSubType
# self._scheduleCapabilities = self.device.scheduleCapabilities
self._vacationHold = self.device.vacationHold
if self.device.currentSchedulePeriod:
if 'period' in self.device.currentSchedulePeriod:
self._currentSchedulePeriod = self.device.currentSchedulePeriod['period']
if 'day' in self.device.currentSchedulePeriod:
self._currentSchedulePeriod = self.device.currentSchedulePeriod['day']
if self.device.units == 'Celsius':
self._temperature_scale = TEMP_CELSIUS
else:
self._temperature_scale = TEMP_FAHRENHEIT | 35.337243 | 93 | 0.655187 |
ace4c05ead673a94b02d9a7c536f7d3426ba5519 | 14,902 | py | Python | pyiotlib/app_sdk.py | aixiwang/iot_data_svr | 258f5ebb5475e1a2f422b3daea0a56f606569254 | [
"MIT"
] | null | null | null | pyiotlib/app_sdk.py | aixiwang/iot_data_svr | 258f5ebb5475e1a2f422b3daea0a56f606569254 | [
"MIT"
] | null | null | null | pyiotlib/app_sdk.py | aixiwang/iot_data_svr | 258f5ebb5475e1a2f422b3daea0a56f606569254 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#-----------------------------------------------------------
# Copyright (c) 2015 by Aixi Wang <aixi.wang@hotmail.com>
#-----------------------------------------------------------
import logging, random, time
import os
from RpcOnTcp import *
class app_sdk:
#---------------------------
# __init__
#---------------------------
def __init__(self,auth_key,server_ip='127.0.0.1',server_port=7777):
rpc = RpcOnTcp(auth_key,server_ip,server_port)
self.rpc = rpc
#---------------------------
# set
#---------------------------
def set(self,k,v):
json_in = {
'cmd':'set',
'k': k,
'v': v,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get
#---------------------------
def get(self,k):
json_in = {
'cmd':'get',
'k': k
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# delete
#---------------------------
def delete(self,k):
json_in = {
'cmd':'delete',
'k': k
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# save_log
#---------------------------
def save_log(self,name,data):
json_in = {
'cmd':'set_ts_data',
'tag': 'log',
'name': name,
'v': data,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# remove_log
#---------------------------
def remove_log(self,name,t):
k = 'log:' + name + ':' + str(t1)
json_in = {
'cmd':'delete',
'k':k,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_logs
#---------------------------
def get_logs(self,name,t1,t2):
json_in = {
'key':'1234-5678',
'cmd':'get_ts_datas',
'tag': 'log',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_logs_keys
#---------------------------
def get_logs_keys(self,name,t1,t2):
json_in = {
'cmd':'get_ts_keys',
'tag': 'log',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# save_data
#---------------------------
def save_data(self,name,data,t='now'):
if t == 'now':
json_in = {
'cmd':'set_ts_data',
'tag': 'data',
'name': name,
'v': data,
}
else:
json_in = {
'cmd':'set_ts_data',
'tag': 'data',
'name': name,
't': str(t),
'v': data,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# remove_data
#---------------------------
def remove_data(self,name,t):
k = 'data:' + name + ':' + str(t)
json_in = {
'cmd':'delete',
'k':k,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_datas
#---------------------------
def get_datas(self,name,t1,t2):
json_in = {
'cmd':'get_ts_datas',
'tag': 'data',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_datas_keys
#---------------------------
def get_datas_keys(self,name,t1,t2):
json_in = {
'cmd':'get_ts_keys',
'tag': 'data',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# save_stats
#---------------------------
def save_stats(self,name,time,data):
json_in = {
'cmd':'set_stats_data',
'tag': 'data',
'name': name,
'time':time,
'v': data,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# remove_stats
#---------------------------
def remove_stats(self,k):
json_in = {
'cmd':'delete_stats',
'k': k,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_stats
#---------------------------
def get_stats(self,name,t1,t2):
json_in = {
'cmd':'get_stats_datas',
'tag': 'data',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_stats_keys
#---------------------------
def get_stats_keys(self,name,t1,t2):
json_in = {
'cmd':'get_stats_keys',
'tag': 'data',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# save_alarm
#---------------------------
def save_alarm(self,name,data,t='now'):
if t == 'now':
json_in = {
'cmd':'set_ts_data',
'tag': 'alarm',
'name': name,
'v': data,
}
else:
json_in = {
'cmd':'set_ts_data',
'tag': 'alarm',
'name': name,
't': str(t),
'v': data,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# remove_alarm
#---------------------------
def remove_alarm(self,name,t):
k = 'alarm:' + name + ':' + str(t1)
json_in = {
'cmd':'delete',
'k':k,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_alarms
#---------------------------
def get_alarms(self,name,t1,t2):
json_in = {
'cmd':'get_ts_datas',
'tag': 'alarm',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_alarms_keys
#---------------------------
def get_alarms_keys(self,name,t1,t2):
json_in = {
'cmd':'get_ts_keys',
'tag': 'alarm',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# save_jpg
#---------------------------
def save_jpg(self,name,data):
json_in = {
'cmd':'set_ts_data',
'tag': 'jpg',
'name': name,
'v': data,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# remove_jpg
#---------------------------
def remove_jpg(self,name,t):
k = 'jpg:' + name + ':' + str(t1)
json_in = {
'cmd':'delete',
'k':k,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_jpgs
#---------------------------
def get_jpgs(self,name,t1,t2):
json_in = {
'cmd':'get_ts_datas',
'tag': 'jpg',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_jpgs_keys
#---------------------------
def get_jpgs_keys(self,name,t1,t2):
json_in = {
'cmd':'get_ts_keys',
'tag': 'jpg',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# save_jpg2
#---------------------------
def save_jpg2(self,name,data):
json_in = {
'cmd':'set_ts_data',
'tag': 'jpg2',
'name': name,
'v': data,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# remove_jpg2
#---------------------------
def remove_jpg2(self,name,t):
k = 'jpg2:' + name + ':' + str(t1)
json_in = {
'cmd':'delete',
'k':k,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_jpgs2
#---------------------------
def get_jpgs2(self,name,t1,t2):
json_in = {
'cmd':'get_ts_datas',
'tag': 'jpg2',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# get_jpgs_keys
#---------------------------
def get_jpgs2_keys(self,name,t1,t2):
json_in = {
'cmd':'get_ts_keys',
'tag': 'jpg2',
'name': name,
't1': t1,
't2': t2,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# mqtt_pub
#---------------------------
def mqtt_pub(self,server_addr,server_port,username,password,topic,message):
json_in = {
'cmd':'mqtt_pub',
'server_addr':server_addr,
'server_port': server_port,
'username':username,
'password':password,
'topic':topic,
'message':message,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# setfile
#---------------------------
def setfile(self,filename,content):
json_in = {
'cmd':'setfile',
'filename':filename,
'content':content,
}
json_out = self.rpc.call(json_in)
return json_out
#---------------------------
# getfile
#---------------------------
def getfile(self,filename):
json_in = {
'cmd':'getfile',
'filename':filename,
}
json_out = self.rpc.call(json_in)
return json_out
#----------------------
# main
#----------------------
if __name__ == "__main__":
rpc = app_sdk('1234-1','127.0.0.1',7777)
print '-------------------------------------'
print 'test set'
rpc.set('log','======asdfasdf=================')
print '-------------------------------------'
print 'test get'
json_out = rpc.get('log')
print json_out
print '-------------------------------------'
print 'test delete'
json_out = rpc.delete('log')
print json_out
print '-------------------------------------'
print 'kv_get again'
ret_code, v = rpc.get('log')
print json_out
print '-------------------------------------'
print 'test save_log, get_logs'
# test save_log, get_logs
i = 10
while True:
print 'i:',i
rpc.save_log('test','adfasdfasdf')
time.sleep(0.1)
i -= 1
if (i == 0):
break;
t = time.time()
json_out = rpc.get_logs('test',t-3,t)
print json_out
print 'test save_data, get_datas'
# test save_data, get_datas
i = 10
while True:
print 'i:',i
rpc.save_data('test','adfasdfasdf')
time.sleep(0.1)
i -= 1
if (i == 0):
break;
t = time.time()
json_out = rpc.get_datas('test',t-3,t)
print json_out
print '-------------------------------------'
print 'test save_alarm, get_alarms'
# test save_alarm, get_alarms
i = 10
while True:
print 'i:',i
rpc.save_alarm('test','adfasdfasdf')
time.sleep(0.1)
i -= 1
if (i == 0):
break;
t = time.time()
json_out = rpc.get_alarms('test',t-3,t)
print json_out
print '-------------------------------------'
print 'test setfile'
content = 'test'.encode('hex')
json_out = rpc.setfile('/test.txt',content)
print json_out
print '-------------------------------------'
print 'test getfile'
json_out = rpc.getfile('./test.txt')
print json_out
| 30.536885 | 80 | 0.315729 |
ace4c0c70729e6d8b41ac112be51ea619c3da90b | 6,893 | py | Python | lib/cr/training.py | BoyanJIANG/4D-Compositional-Representation | 64d5f4bbd6b8e6bc3bfd8f76736f6d468c71a73c | [
"Apache-2.0"
] | 12 | 2021-06-07T08:38:56.000Z | 2022-03-08T02:16:50.000Z | lib/cr/training.py | BoyanJIANG/4D-Compositional-Representation | 64d5f4bbd6b8e6bc3bfd8f76736f6d468c71a73c | [
"Apache-2.0"
] | null | null | null | lib/cr/training.py | BoyanJIANG/4D-Compositional-Representation | 64d5f4bbd6b8e6bc3bfd8f76736f6d468c71a73c | [
"Apache-2.0"
] | 2 | 2021-06-24T03:40:57.000Z | 2021-12-05T12:52:28.000Z | import torch
import numpy as np
from torch.nn import functional as F
from lib.common import compute_iou
from lib.training import BaseTrainer
class Trainer(BaseTrainer):
r''' Trainer object for ONet 4D.
Onet 4D is trained with BCE. The Trainer object
obtains methods to perform a train and eval step as well as to visualize
the current training state.
Args:
model (nn.Module): Onet 4D model
optimizer (PyTorch optimizer): The optimizer that should be used
device (PyTorch device): the PyTorch device
input_type (string): The input type (e.g. 'img')
vis_dir (string): the visualisation directory
threshold (float): threshold value for decision boundary
'''
def __init__(self, model, optimizer, device=None, input_type='img', threshold=0.4):
self.model = model
self.optimizer = optimizer
self.device = device
self.input_type = input_type
self.threshold = threshold
def train_step(self, data):
''' Performs a train step.
Args:
data (tensor): training data
'''
self.model.train()
self.optimizer.zero_grad()
loss = self.compute_loss(data)
loss.backward()
self.optimizer.step()
return loss.item()
def eval_step(self, data):
''' Performs a validation step.
Args:
data (tensor): validation data
'''
self.model.eval()
device = self.device
inputs = data.get('inputs', torch.empty(1, 1, 0)).to(device)
batch_size, seq_len, n_pts, _ = inputs.size()
eval_dict = {}
loss = 0
with torch.no_grad():
# Encode inputs
c_p, c_m, c_i = self.model.encode_inputs(inputs)
# IoU
eval_dict_iou = self.eval_step_iou(data, c_m=c_m, c_p=c_p, c_i=c_i)
for (k, v) in eval_dict_iou.items():
eval_dict[k] = v
loss += eval_dict['iou']
eval_dict['loss'] = loss.mean().item()
return eval_dict
def eval_step_iou(self, data, c_p=None, c_m=None, c_i=None):
''' Calculates the IoU for the evaluation step.
Args:
data (tensor): training data
c_t (tensor): temporal conditioned latent code
z (tensor): latent code
'''
device = self.device
threshold = self.threshold
eval_dict = {}
pts_iou = data.get('points_iou').to(device)
occ_iou = data.get('points_iou.occ')
pts_iou_t = data.get('points_iou.time').to(device)
batch_size, n_steps, n_pts, dim = pts_iou.shape
p = pts_iou
c_i = c_i.unsqueeze(0).repeat(1, n_steps, 1)
c_p_at_t = self.model.transform_to_t_eval(pts_iou_t[0], p=c_p, c_t=c_m)
c_s_at_t = torch.cat([c_i, c_p_at_t], -1)
c_s_at_t = c_s_at_t.view(batch_size * n_steps, c_s_at_t.shape[-1])
p = p.view(batch_size * n_steps, n_pts, -1)
occ_iou = occ_iou.view(batch_size * n_steps, n_pts)
occ_pred = self.model.decode(p, c=c_s_at_t)
occ_pred = (occ_pred.probs > threshold).cpu().numpy()
occ_gt = (occ_iou >= 0.5).numpy()
iou = compute_iou(occ_pred, occ_gt)
iou = iou.reshape(batch_size, -1).mean(0)
eval_dict['iou'] = iou.sum() / len(iou)
for i in range(len(iou)):
eval_dict['iou_t%d' % i] = iou[i]
return eval_dict
def get_loss_recon_t(self, data, c_p=None, c_m=None, c_i=None, is_exchange=None):
''' Calculates the reconstruction loss.
Args:
data (tensor): training data
c_t (tensor): temporal conditioned latent code
z (tensor): latent code
'''
device = self.device
if is_exchange:
p_t = data.get('points_t_ex').to(device)
occ_t = data.get('points_t_ex.occ').to(device)
time_val = data.get('points_t_ex.time').to(device)
else:
p_t = data.get('points_t').to(device)
occ_t = data.get('points_t.occ').to(device)
time_val = data.get('points_t.time').to(device)
batch_size, n_pts, _ = p_t.shape
c_p_at_t = self.model.transform_to_t(time_val, p=c_p, c_t=c_m)
c_s_at_t = torch.cat([c_i, c_p_at_t], 1)
p = p_t
logits_pred = self.model.decode(p, c=c_s_at_t).logits
loss_occ_t = F.binary_cross_entropy_with_logits(
logits_pred, occ_t.view(batch_size, -1), reduction='none')
loss_occ_t = loss_occ_t.mean()
return loss_occ_t
def get_loss_recon_t0(self, data, c_p=None, c_i=None, is_exchange=None):
''' Calculates the reconstruction loss.
Args:
data (tensor): training data
c_t (tensor): temporal conditioned latent code
z (tensor): latent code
'''
if is_exchange:
p_t0 = data.get('points_ex')
occ_t0 = data.get('points_ex.occ')
else:
p_t0 = data.get('points')
occ_t0 = data.get('points.occ')
batch_size, n_pts, _ = p_t0.shape
device = self.device
batch_size = p_t0.shape[0]
c_s_at_t0 = torch.cat([c_i, c_p], 1)
p = p_t0
logits_t0 = self.model.decode(p.to(device), c=c_s_at_t0).logits
loss_occ_t0 = F.binary_cross_entropy_with_logits(
logits_t0, occ_t0.view(batch_size, -1).to(device),
reduction='none')
loss_occ_t0 = loss_occ_t0.mean()
return loss_occ_t0
def compute_loss(self, data):
''' Calculates the loss.
Args:
data (tensor): training data
'''
device = self.device
seq1, seq2 = data
# Encode inputs
inputs1 = seq1.get('inputs', torch.empty(1, 1, 0)).to(device)
inputs2 = seq2.get('inputs', torch.empty(1, 1, 0)).to(device)
c_p_1, c_m_1, c_i_1 = self.model.encode_inputs(inputs1)
c_p_2, c_m_2, c_i_2 = self.model.encode_inputs(inputs2)
is_exchange = np.random.randint(2)
if is_exchange:
in_c_i_1 = c_i_2
in_c_i_2 = c_i_1
else:
in_c_i_1 = c_i_1
in_c_i_2 = c_i_2
loss_recon_t_1 = self.get_loss_recon_t(seq1, c_m=c_m_1, c_p=c_p_1, c_i=in_c_i_1, is_exchange=is_exchange)
loss_recon_t0_1 = self.get_loss_recon_t0(seq1, c_p=c_p_1, c_i=in_c_i_1, is_exchange=is_exchange)
loss_recon_t_2 = self.get_loss_recon_t(seq2, c_m=c_m_2, c_p=c_p_2, c_i=in_c_i_2, is_exchange=is_exchange)
loss_recon_t0_2 = self.get_loss_recon_t0(seq2, c_p=c_p_2, c_i=in_c_i_2, is_exchange=is_exchange)
loss_recon_t = (loss_recon_t_1 + loss_recon_t_2) / 2.0
loss_recon_t0 = (loss_recon_t0_1 + loss_recon_t0_2) / 2.0
loss = loss_recon_t + loss_recon_t0
return loss
| 31.619266 | 113 | 0.596547 |
ace4c0eed5f512c7f15a210e5e5177cff43b69d1 | 4,090 | py | Python | test/test_forecast.py | gianpDomiziani/FLAML | 8eceda06cd59921be6915edb1495801a01bca1ec | [
"MIT"
] | 1 | 2021-09-08T14:38:29.000Z | 2021-09-08T14:38:29.000Z | test/test_forecast.py | popolee0513/FLAML | 339eb80f4404c0a5968c4170e796848d08ee88ba | [
"MIT"
] | null | null | null | test/test_forecast.py | popolee0513/FLAML | 339eb80f4404c0a5968c4170e796848d08ee88ba | [
"MIT"
] | 1 | 2021-10-04T09:52:58.000Z | 2021-10-04T09:52:58.000Z | import numpy as np
from flaml import AutoML
def test_forecast_automl(budget=5):
# using dataframe
import statsmodels.api as sm
data = sm.datasets.co2.load_pandas().data['co2'].resample('MS').mean()
data = data.fillna(data.bfill()).to_frame().reset_index().rename(
columns={'index': 'ds', 'co2': 'y'})
num_samples = data.shape[0]
time_horizon = 12
split_idx = num_samples - time_horizon
df = data[:split_idx]
X_test = data[split_idx:]['ds']
y_test = data[split_idx:]['y']
automl = AutoML()
settings = {
"time_budget": budget, # total running time in seconds
"metric": 'mape', # primary metric
"task": 'forecast', # task type
"log_file_name": 'CO2_forecast.log', # flaml log file
"eval_method": "holdout",
"label": ('ds', 'y'),
}
'''The main flaml automl API'''
try:
automl.fit(dataframe=df, **settings, period=time_horizon)
except ImportError:
print("not using FBProphet due to ImportError")
automl.fit(dataframe=df, **settings, estimator_list=[
'arima', 'sarimax'], period=time_horizon)
''' retrieve best config and best learner'''
print('Best ML leaner:', automl.best_estimator)
print('Best hyperparmeter config:', automl.best_config)
print(f'Best mape on validation data: {automl.best_loss}')
print(f'Training duration of best run: {automl.best_config_train_time}s')
print(automl.model.estimator)
''' pickle and save the automl object '''
import pickle
with open('automl.pkl', 'wb') as f:
pickle.dump(automl, f, pickle.HIGHEST_PROTOCOL)
''' compute predictions of testing dataset '''
y_pred = automl.predict(X_test)
print('Predicted labels', y_pred)
print('True labels', y_test)
''' compute different metric values on testing dataset'''
from flaml.ml import sklearn_metric_loss_score
print('mape', '=', sklearn_metric_loss_score('mape', y_pred, y_test))
from flaml.data import get_output_from_log
time_history, best_valid_loss_history, valid_loss_history, config_history, metric_history = \
get_output_from_log(filename=settings['log_file_name'], time_budget=budget)
for config in config_history:
print(config)
print(automl.prune_attr)
print(automl.max_resource)
print(automl.min_resource)
X_train = df['ds']
y_train = df['y']
automl = AutoML()
try:
automl.fit(X_train=X_train, y_train=y_train, **settings, period=time_horizon)
except ImportError:
print("not using FBProphet due to ImportError")
automl.fit(X_train=X_train, y_train=y_train, **settings, estimator_list=[
'arima', 'sarimax'], period=time_horizon)
def test_numpy():
X_train = np.arange('2014-01', '2021-01', dtype='datetime64[M]')
y_train = np.random.random(size=72)
automl = AutoML()
try:
automl.fit(
X_train=X_train[:60], # a single column of timestamp
y_train=y_train, # value for each timestamp
period=12, # time horizon to forecast, e.g., 12 months
task='forecast', time_budget=3, # time budget in seconds
log_file_name="test/forecast.log")
print(automl.predict(X_train[60:]))
print(automl.predict(12))
except ValueError:
print("ValueError for FBProphet is raised as expected.")
except ImportError:
print("not using FBProphet due to ImportError")
automl = AutoML()
automl.fit(
X_train=X_train[:72], # a single column of timestamp
y_train=y_train, # value for each timestamp
period=12, # time horizon to forecast, e.g., 12 months
task='forecast', time_budget=1, # time budget in seconds
estimator_list=['arima', 'sarimax'],
log_file_name="test/forecast.log")
print(automl.predict(X_train[72:]))
# an alternative way to specify predict steps for arima/sarimax
print(automl.predict(12))
if __name__ == "__main__":
test_forecast_automl(60)
| 40.098039 | 97 | 0.648655 |
ace4c1079b48f501f9bd81c5a03dbc61ccd78435 | 4,366 | py | Python | simtbx/diffBragg/tests/tst_diffBragg_lambda_coefficients.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 155 | 2016-11-23T12:52:16.000Z | 2022-03-31T15:35:44.000Z | simtbx/diffBragg/tests/tst_diffBragg_lambda_coefficients.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 590 | 2016-12-10T11:31:18.000Z | 2022-03-30T23:10:09.000Z | simtbx/diffBragg/tests/tst_diffBragg_lambda_coefficients.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 115 | 2016-11-15T08:17:28.000Z | 2022-02-09T15:30:14.000Z |
"""
This test checks the lambda coefficients property and derivatives
"""
from __future__ import division
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument("--cuda", action="store_true")
parser.add_argument("--plot", action='store_true')
parser.add_argument("--idx", type=int, help="coefficient index (0 or 1)", default=0, choices=[0,1])
args = parser.parse_args()
if args.cuda:
import os
os.environ["DIFFBRAGG_USE_CUDA"]="1"
import numpy as np
import pylab as plt
from scipy.stats import linregress
from scipy.spatial.transform import Rotation
from simtbx.nanoBragg import sim_data
from scitbx.matrix import sqr, rec
from cctbx import uctbx
from dxtbx.model import Crystal
ucell = (70, 60, 50, 90.0, 110, 90.0)
symbol = "C121"
a_real, b_real, c_real = sqr(uctbx.unit_cell(ucell).orthogonalization_matrix()).transpose().as_list_of_lists()
C = Crystal(a_real, b_real, c_real, symbol)
# random raotation
rotation = Rotation.random(num=1, random_state=101)[0]
Q = rec(rotation.as_quat(), n=(4, 1))
rot_ang, rot_axis = Q.unit_quaternion_as_axis_and_angle()
C.rotate_around_origin(rot_axis, rot_ang)
S = sim_data.SimData(use_default_crystal=True)
S.crystal.dxtbx_crystal = C
spectrum = S.beam.spectrum
wave, flux = spectrum[0]
Nwave = 5
waves = np.linspace(wave-wave*0.002, wave+wave*0.002, Nwave)
fluxes = np.ones(Nwave) * flux / Nwave
lambda0_GT = 0
lambda1_GT = 1
S.beam.spectrum = list(zip(waves, fluxes))
S.detector = sim_data.SimData.simple_detector(180, 0.1, (1024, 1024))
S.instantiate_diffBragg(verbose=0, oversample=0, auto_set_spotscale=True)
S.D.lambda_coefficients = lambda0_GT, lambda1_GT
S.D.spot_scale = 100000
S.D.Ncells_abc = 12
if args.idx == 0:
S.D.refine(12)
else:
S.D.refine(13)
S.D.initialize_managers()
S.D.region_of_interest = ((0, 1023), (0, 1023))
S.D.add_diffBragg_spots()
img = S.D.raw_pixels.as_numpy_array()
derivs = S.D.get_lambda_derivative_pixels()
deriv = derivs[0].as_numpy_array().reshape(img.shape)
S.D.raw_pixels *= 0
S.D.use_lambda_coefficients = False
S.D.add_diffBragg_spots()
test_img = S.D.raw_pixels.as_numpy_array()
assert np.allclose(img, test_img)
S.D.use_lambda_coefficients = True
S.D.raw_pixels *= 0
print("OK")
bragg = img > 1e-1 # select bragg scattering regions
all_error = []
all_error2 = []
shifts = []
shifts2 = []
from scipy import constants
ENERGY_CONV = 1e10*constants.c*constants.h / constants.electron_volt
energy_shifts = 0.1, .3, .5, 1, 3, 5, 10 # in electron volt
b_percs = 0.001, 0.002, 0.004, 0.008, 0.016, 0.032, 0.064
reference_energy = ENERGY_CONV / wave
for i_shift, en_shift in enumerate(energy_shifts):
wave_shifted = ENERGY_CONV / (reference_energy + en_shift)
wave_shift = wave - wave_shifted
delta_a = wave_shift
delta_b = lambda1_GT*b_percs[i_shift]
if args.idx == 0:
shift = b_percs[i_shift]*0.01
new_waves = waves*lambda1_GT + lambda0_GT+shift
else:
shift = b_percs[i_shift]*0.01
new_waves = waves*(lambda1_GT+shift) + lambda0_GT
en = np.mean(ENERGY_CONV/new_waves)
if args.idx == 0:
S.D.lambda_coefficients = lambda0_GT + shift, lambda1_GT
shifts.append(shift)
else:
S.D.lambda_coefficients = lambda0_GT, lambda1_GT + shift
shifts.append(shift)
S.D.raw_pixels *= 0
S.D.region_of_interest = ((0, 1023), (0, 1023))
S.D.add_diffBragg_spots()
img2 = S.D.raw_pixels.as_numpy_array()
fdiff = (img2 - img) / shift
if args.idx == 0:
error = np.abs(fdiff[bragg] - deriv[bragg]).mean()
else:
error = np.abs(fdiff[bragg] - deriv[bragg]).mean()
all_error.append(error)
print ("error=%f, step=%f, energy=%f" % (error, delta_a, en))
#if args.plot:
# plt.subplot(121)
# plt.imshow(fdiff)
# plt.title("finite diff")
# plt.subplot(122)
# plt.imshow(deriv)
# plt.title("analytical")
# plt.draw()
# plt.suptitle("Shift %d / %d"
# % (i_shift + 1, len(perc)))
# plt.pause(0.8)
if args.plot:
#plt.close()
plt.plot(shifts, all_error, 'o')
plt.show()
#if args.curvatures:
# plt.plot(shifts2, all_error2, 'o')
# plt.show()
l = linregress(shifts, all_error)
assert l.rvalue > .9999 # this is definitely a line!
assert l.slope > 0
assert l.pvalue < 1e-6
print("OK!")
| 28.167742 | 110 | 0.684837 |
ace4c1673b41800da78b365ffee98189370f5f99 | 33,629 | py | Python | confidant_client/__init__.py | fpiedrah/python-confidant-client | e28be04308f60f2bb4301c70023e454c15cf1259 | [
"Apache-2.0"
] | null | null | null | confidant_client/__init__.py | fpiedrah/python-confidant-client | e28be04308f60f2bb4301c70023e454c15cf1259 | [
"Apache-2.0"
] | null | null | null | confidant_client/__init__.py | fpiedrah/python-confidant-client | e28be04308f60f2bb4301c70023e454c15cf1259 | [
"Apache-2.0"
] | null | null | null | """A client module for Confidant."""
from __future__ import absolute_import
import logging
import json
import base64
import os
import yaml
# Import third party libs
import requests
import boto3
import kmsauth
import six
from cryptography.fernet import Fernet
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util import Retry
import confidant_client.services
from confidant_client.lib import cryptolib
# shut up requests module
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)
# shut up boto3 and botocore
boto3.set_stream_logger(level=logging.WARNING)
logging.getLogger('botocore').setLevel(logging.WARNING)
VERSION = '1.5.5'
JSON_HEADERS = {'Content-type': 'application/json', 'Accept': 'text/plain'}
TOKEN_SKEW = 3
TIME_FORMAT = "%Y%m%dT%H%M%SZ"
def ensure_bytes(str_or_bytes, encoding='utf-8', errors='strict'):
"""Ensures an input is bytes, encoding if it is a string.
"""
if isinstance(str_or_bytes, six.text_type):
return str_or_bytes.encode(encoding, errors)
return str_or_bytes
class ConfidantClient(object):
"""A class that represents a confidant client."""
def __init__(
self,
url=None,
auth_key=None,
auth_context=None,
token_lifetime=None,
token_version=None,
token_cache_file=None,
assume_role=None,
mfa_pin=None,
region=None,
retries=None,
backoff=None,
config_files=None,
profile=None,
verify=None
):
"""Create a ConfidantClient object.
Args:
url: URL of confidant server. Default: None
auth_key: The KMS key ARN or alias to use for authentication.
Default: None
auth_context: The KMS encryption context to use for authentication.
Default: None
token_lifetime: Lifetime of the authentication token generated.
Default: 10
token_version: The version of the authentication token. Default: 2
token_cache_file: The location to use for caching the auth token.
If set to empty string, no cache will be used. Default:
/dev/shm/confidant/confidant_token
assume_role: IAM role to assume for getting KMS auth token.
Default: None
mfa_pin: pin to use when assuming a role or getting an MFA session.
Default: None
region: AWS region to connect to. Default: None.
retries: Number of retries to use on failed requests. Default: 0
backoff: Backoff factor for retries. See urllib3's Retry helper.
Default: 1
config_files: A list of config files to attempt to load
configuration from. First file found will be used. Default:
['~/.confidant', '/etc/confidant/config']
profile: profile to read config values from.
verify: Whether we verify the servers TLS certificate.
"""
# Set defaults
self.config = {
'url': None,
'auth_key': None,
'auth_context': {},
'token_lifetime': 10,
'token_version': 2,
'token_cache_file': '/dev/shm/confidant/confidant_token',
'assume_role': None,
'region': None,
'retries': 0,
'backoff': 1,
'verify': True
}
if config_files is None:
config_files = ['~/.confidant', '/etc/confidant/config']
if profile is None:
profile = 'default'
# Override defaults from config file
self.config.update(self._load_config(config_files, profile))
# Override config from passed-in args
args_config = {
'url': url,
'auth_key': auth_key,
'auth_context': auth_context,
'token_lifetime': token_lifetime,
'token_version': token_version,
'token_cache_file': token_cache_file,
'region': region,
'backoff': backoff,
'assume_role': assume_role,
'verify': verify
}
for key, val in args_config.items():
if val is not None:
self.config[key] = val
# Use session to re-try failed requests.
self.request_session = requests.Session()
self.request_session.verify = self.config['verify']
for proto in ['http://', 'https://']:
self.request_session.mount(
proto,
HTTPAdapter(
max_retries=Retry(
total=self.config['retries'],
status_forcelist=[500, 503],
backoff_factor=self.config['backoff']
)
)
)
self.iam_client = confidant_client.services.get_boto_client(
'iam',
region=self.config['region']
)
self._load_user_auth_context()
self._validate_client()
self.sts_client = confidant_client.services.get_boto_client(
'sts',
region=self.config['region']
)
self.kms_client = confidant_client.services.get_boto_client(
'kms',
region=self.config['region']
)
if self.config['assume_role']:
self.aws_creds = self._get_assume_role_creds(
self.config['assume_role'],
mfa_pin
)
elif mfa_pin:
self.aws_creds = self._get_mfa_creds(mfa_pin)
else:
self.aws_creds = None
try:
self.generator = kmsauth.KMSTokenGenerator(
self.config['auth_key'],
self.config['auth_context'],
self.config['region'],
token_version=self.config['token_version'],
token_cache_file=self.config['token_cache_file'],
token_lifetime=self.config['token_lifetime'],
aws_creds=self.aws_creds
)
except kmsauth.ConfigurationError:
raise ClientConfigurationError('Error configuring kmsauth client.')
def _load_config(self, config_files, profile):
"""Initialize client settings from config."""
for filename in config_files:
try:
with open(os.path.expanduser(filename), 'r') as f:
config = yaml.safe_load(f.read())
return config.get(profile, {})
except IOError:
logging.debug('{0} config file not found.'.format(filename))
pass
except yaml.YAMLError as e:
msg = 'Failed to parse {0}: {1}'.format(filename, e)
logging.error(msg)
raise ClientConfigurationError(msg)
# No file found
return {}
def _load_user_auth_context(self):
"""Conditionally load from auth context for users."""
if self.config['auth_context'].get('user_type') == 'user':
if not self.config['auth_context'].get('from'):
try:
username = self.iam_client.get_user()['User']['UserName']
self.config['auth_context']['from'] = username
except Exception:
logging.warning(
'Could not set from auth_context from get_user.'
)
def _validate_client(self):
"""Ensure the configuration passed into init is valid."""
if not self.config['url']:
raise ClientConfigurationError('url not provided.')
if not self.config['auth_key']:
raise ClientConfigurationError('auth_key not provided.')
if not self.config['auth_context']:
raise ClientConfigurationError('auth_context not provided.')
def get_config(self):
return self.config
def _get_username(self):
"""Get a username formatted for a specific token version."""
return self.generator.get_username()
def _get_assume_role_creds(self, role, mfa_pin=None):
"""Get AWS credentials for the specified role."""
# A full ARN is passed in
if role.startswith('arn:aws'):
base_arn = role.rsplit(':', 1)[0]
role_name = role.rsplit('/', 1)[1]
role_arn = role
user = None
# A role name is passed in
else:
user = self.iam_client.get_user()
base_arn = user['User']['Arn'].rsplit(':', 1)[0]
role_name = role
role_arn = '{0}:role/{1}'.format(base_arn, role)
if mfa_pin:
if user is None:
user = self.iam_client.get_user()
username = user['User']['UserName']
mfa_arn = '{0}:mfa/{1}'.format(base_arn, username)
return self.sts_client.assume_role(
RoleArn=role_arn,
RoleSessionName='{0}_confidant'.format(role_name),
SerialNumber=mfa_arn,
TokenCode=mfa_pin
)['Credentials']
else:
return self.sts_client.assume_role(
RoleArn=role_arn,
RoleSessionName='{0}_confidant'.format(role_name)
)['Credentials']
def _get_mfa_creds(self, mfa_pin):
"""Get an AWS session token credentials, assumed with MFA."""
user = self.iam_client.get_user()
base_arn = user['User']['Arn'].rsplit(':', 1)[0]
mfa_arn = '{0}:mfa/{1}'.format(base_arn, user['User']['UserName'])
return self.sts_client.get_session_token(
SerialNumber=mfa_arn,
TokenCode=mfa_pin
)['Credentials']
def _get_token(self):
"""Get an authentication token."""
return self.generator.get_token()
def _check_response_code(self, response, expected=None):
if expected is None:
expected = [200]
if response.status_code not in expected:
logging.error('API error (response code {0}): {1}'.format(
response.status_code,
response.text
))
return False
return True
def get_service(self, service, decrypt_blind=False):
"""Get a service's metadata and secrets."""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
# Make a request to confidant with the provided url, to fetch the
# service providing the service name and base64 encoded
# token for authentication.
try:
response = self._execute_request(
'get',
'{0}/v1/services/{1}'.format(self.config['url'], service),
expected_return_codes=[200, 404]
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
if response.status_code == 404:
logging.debug('Service not found in confidant.')
ret['result'] = True
return ret
try:
data = response.json()
if decrypt_blind:
data['blind_credentials'] = self._decrypt_blind_credentials(
data['blind_credentials']
)
except ValueError:
logging.exception(
'Received badly formatted json data from confidant.'
)
return ret
ret['service'] = data
ret['result'] = True
return ret
def get_blind_credential(self, id, decrypt_blind=False):
"""Get a blind credential from ID."""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
# Make a request to confidant with the provided url, to fetch the
# service providing the service name and base64 encoded
# token for authentication.
try:
response = self._execute_request(
'get',
'{0}/v1/blind_credentials/{1}'.format(self.config['url'], id),
expected_return_codes=[200, 404]
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
if response.status_code == 404:
logging.debug('Blind credential not found in confidant.')
ret['result'] = False
return ret
try:
data = response.json()
if decrypt_blind:
data['decrypted_credential_pairs'] = self._get_decrypted_pairs(
data
)
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['blind_credential'] = data
ret['result'] = True
return ret
def _decrypt_blind_credentials(self, blind_credentials):
_blind_credentials = []
for blind_credential in blind_credentials:
decrypted_pairs = self._get_decrypted_pairs(
blind_credential
)
blind_credential['decrypted_credential_pairs'] = decrypted_pairs
_blind_credentials.append(blind_credential)
return _blind_credentials
def _get_decrypted_pairs(self, credential):
"""
From credential, get decrypted blind credential pairs.
Given a region => data_key dict of data keys, a region => context dict
of KMS encryption context, a dict of encrypted credential pairs, a
cipher and a cipher version, return decrypted credential_pairs.
"""
region = self.config['region']
_context = credential['metadata']['context'][region]
if self.aws_creds:
_kms_client = confidant_client.services.get_boto_client(
'kms',
region=self.config['region'],
aws_access_key_id=self.aws_creds['AccessKeyId'],
aws_secret_access_key=self.aws_creds['SecretAccessKey'],
aws_session_token=self.aws_creds['SessionToken']
)
else:
_kms_client = self.kms_client
_data_key = cryptolib.decrypt_datakey(
base64.b64decode(
ensure_bytes(credential['data_key'][region])
),
_context,
_kms_client
)
_credential_pair = credential['credential_pairs'][region]
f = Fernet(_data_key)
return json.loads(f.decrypt(_credential_pair.encode('utf-8')))
def _get_keys_and_encrypted_pairs(
self,
blind_keys,
context,
credential_pairs,
cipher_type,
cipher_version
):
"""
Get data keys and encrypted credential_pairs.
Given a region => kms key dict of blind keys, a region => context dict
of KMS encryption context, a dict of credential pairs, a cipher and a
cipher version, generate a dict of region => data keys and a dict of
region => encrypted credential_pairs and return both in a tuple.
"""
data_keys = {}
_credential_pairs = {}
for region, blind_key in six.iteritems(blind_keys):
if self.aws_creds:
session = confidant_client.services.get_boto_session(
region=region,
aws_access_key_id=self.aws_creds['AccessKeyId'],
aws_secret_access_key=self.aws_creds['SecretAccessKey'],
aws_session_token=self.aws_creds['SessionToken']
)
else:
session = confidant_client.services.get_boto_session(
region=region
)
_kms = session.client('kms')
data_key = cryptolib.create_datakey(
context[region],
blind_key,
_kms
)
data_keys[region] = base64.b64encode(
ensure_bytes(data_key['ciphertext'])
).decode('ascii')
# TODO: this crypto code needs to come from a library. Right now we
# only support fernet and cipher_version 2, so we're hardcoding it
# and ignoring the arguments.
f = Fernet(data_key['plaintext'])
# For paranoia sake, let's purposely purge plaintext from the
# data_key, incase someone decides later to include the data_key
# directly into the return.
del data_key['plaintext']
_credential_pairs[region] = f.encrypt(
json.dumps(credential_pairs).encode('utf-8')
).decode('ascii')
return data_keys, _credential_pairs
def revert_credential(
self,
id,
revision=None
):
"""Reverts a credential to a previous revision.
Args:
id: The ID of the credential.
revision: The revision number to revert to, or None to revert to
the immediately previous revision.
"""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
# Find the current revision
try:
response = self._execute_request(
'get',
'{0}/v1/credentials/{1}'.format(self.config['url'], id)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
current_cred_revision = response.json()
if current_cred_revision['revision'] == 1:
logging.error('This credential has no previous revision')
return ret
if revision:
if revision == current_cred_revision['revision']:
logging.error('Revision number is the same as current revision')
return ret
else:
# Set revision to the second most recent.
revision = current_cred_revision['revision'] - 1
logging.info(
'Attempting to revert credential to revision {}'.format(revision)
)
try:
response = self._execute_request(
'get',
'{0}/v1/credentials/{1}-{2}'.format(
self.config['url'], id, revision
)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
cred_revision = response.json()
if self._identical_fields(
current_cred_revision, cred_revision,
['name', 'credential_pairs', 'metadata', 'enabled']):
logging.error(
'Cannot revert to revision {}. No difference between '
'it and current revision.'.format(revision))
return ret
try:
response = self._execute_request(
'put',
'{0}/v1/credentials/{1}'.format(self.config['url'], id),
headers=JSON_HEADERS,
data=json.dumps(cred_revision)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
try:
data = response.json()
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['credential'] = data
ret['result'] = True
return ret
def revert_service(
self,
id,
revision=None
):
"""Reverts a service to a previous revision.
Args:
id: The ID of the service.
revision: The revision number to revert to, or None to revert to
the immediately previous revision.
"""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
# Find the current revision
try:
response = self._execute_request(
'get',
'{0}/v1/archive/services/{1}'.format(self.config['url'], id)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
service_revisions = response.json()['revisions']
current_service_revision = service_revisions[0]
if current_service_revision['revision'] == 1:
logging.error('This service has no previous revision')
return ret
if revision:
if revision == current_service_revision['revision']:
logging.error('Revision number is the same as current revision')
return ret
else:
# Set revision to the second most recent.
revision = current_service_revision['revision'] - 1
logging.info(
'Attempting to revert service to revision {}'.format(revision)
)
service_revision = None
for r in service_revisions:
if r['revision'] == revision:
service_revision = r
break
if not service_revision:
logging.error('Cannot find revision {}'.format(revision))
return ret
if self._identical_fields(
current_service_revision, service_revision,
['credentials', 'blind_credentials', 'enabled']):
logging.error(
'Cannot revert to revision {}. No difference between '
'it and current revision.'.format(revision))
return ret
try:
response = self._execute_request(
'put',
'{0}/v1/services/{1}'.format(self.config['url'], id),
headers=JSON_HEADERS,
data=json.dumps(service_revision)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
try:
data = response.json()
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['service'] = data
ret['result'] = True
return ret
def revert_blind_credential(
self,
id,
revision=None
):
"""Reverts a blind credential to a previous revision.
Args:
id: The ID of the blind credential.
revision: The revision number to revert to, or None to revert to
the immediately previous revision.
"""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
# Find the current revision
try:
response = self._execute_request(
'get',
'{0}/v1/blind_credentials/{1}'.format(self.config['url'], id)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
current_cred_revision = response.json()
if current_cred_revision['revision'] == 1:
logging.error('This blind credential has no previous revision')
return ret
if revision:
if revision == current_cred_revision['revision']:
logging.error('Revision number is the same as current revision')
return ret
else:
# Set revision to the second most recent.
revision = current_cred_revision['revision'] - 1
logging.info(
'Attempting to revert credential to revision {}'.format(revision)
)
try:
response = self._execute_request(
'get',
'{0}/v1/blind_credentials/{1}-{2}'.format(
self.config['url'], id, revision
)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
cred_revision = response.json()
if self._identical_fields(
current_cred_revision, cred_revision,
['name', 'credential_keys', 'credential_pairs', 'metadata',
'enabled']):
logging.error(
'Cannot revert to revision {}. No difference between '
'it and current revision.'.format(revision))
return ret
try:
response = self._execute_request(
'put',
'{0}/v1/blind_credentials/{1}'.format(self.config['url'], id),
headers=JSON_HEADERS,
data=json.dumps(cred_revision)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
try:
data = response.json()
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['blind_credential'] = data
ret['result'] = True
return ret
def _identical_fields(self, a, b, fields):
for field in fields:
if a.get(field) != b.get(field):
return False
return True
def create_blind_credential(
self,
blind_keys,
contexts,
name,
credential_pairs,
metadata=None,
cipher_type='fernet',
cipher_version=2,
store_keys=True,
enabled=True,
documentation=None
):
"""Create a server blinded credential and store it in Confidant."""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
if metadata is None:
metadata = {}
metadata['context'] = contexts
data_keys, _credential_pairs = self._get_keys_and_encrypted_pairs(
blind_keys,
contexts,
credential_pairs,
cipher_type,
cipher_version
)
data = {
'name': name,
'credential_pairs': _credential_pairs,
'data_key': data_keys,
'metadata': metadata,
'cipher_type': cipher_type,
'cipher_version': cipher_version,
'enabled': enabled,
'documentation': documentation
}
if store_keys:
data['credential_keys'] = list(credential_pairs.keys())
try:
response = self._execute_request(
'post',
'{0}/v1/blind_credentials'.format(self.config['url']),
timeout=5,
headers=JSON_HEADERS,
data=json.dumps(data),
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
try:
data = response.json()
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['blind_credential'] = data
ret['result'] = True
return ret
def update_blind_credential(
self,
id,
blind_keys=None,
contexts=None,
name=None,
credential_pairs=None,
metadata=None,
cipher_type=None,
cipher_version=None,
store_keys=True,
enabled=None,
documentation=None
):
"""Update a server blinded credential in Confidant."""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
cred = self.get_blind_credential(id)
if not cred['result']:
return ret
data = cred['blind_credential']
del data['revision']
del data['modified_by']
del data['modified_date']
if name is not None:
data['name'] = name
if metadata is not None:
_context = data['metadata']['context']
data['metadata'] = metadata
data['metadata']['context'] = _context
if documentation is not None:
data['documentation'] = documentation
if credential_pairs is not None:
if contexts is not None:
data['metadata']['context'] = contexts
else:
contexts = data['metadata']['context']
if cipher_type is not None:
data['cipher_type'] = cipher_type
else:
cipher_type = data['cipher_type']
if cipher_version is not None:
data['cipher_version'] = cipher_version
else:
cipher_version = data['cipher_version']
data_keys, _credential_pairs = self._get_keys_and_encrypted_pairs(
blind_keys,
contexts,
credential_pairs,
cipher_type,
cipher_version
)
data['data_key'] = data_keys
data['credential_pairs'] = _credential_pairs
if store_keys:
data['credential_keys'] = list(credential_pairs.keys())
if enabled is not None:
data['enabled'] = enabled
try:
response = self._execute_request(
'put',
'{0}/v1/blind_credentials/{1}'.format(self.config['url'], id),
timeout=5,
headers=JSON_HEADERS,
data=json.dumps(data)
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
try:
data = response.json()
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['blind_credential'] = data
ret['result'] = True
return ret
def list_blind_credentials(self):
"""Get a list of blind credentials."""
# Return a dict, always with an attribute that specifies whether or not
# the function was able to successfully get a result.
ret = {'result': False}
# Make a request to confidant with the provided url, to fetch the
# service providing the service name and base64 encoded
# token for authentication.
try:
response = self._execute_request(
'get',
'{0}/v1/blind_credentials'.format(self.config['url'])
)
except RequestExecutionError:
logging.exception('Error with executing request')
return ret
try:
data = response.json()
except ValueError:
logging.error('Received badly formatted json data from confidant.')
return ret
ret['blind_credentials'] = data['blind_credentials']
ret['result'] = True
return ret
def _execute_request(
self,
method,
url,
expected_return_codes=[200],
timeout=2,
**kwargs
):
try:
if method == 'get':
response = self.request_session.get(
url,
auth=(self._get_username(), self._get_token()),
allow_redirects=False,
timeout=timeout,
**kwargs
)
elif method == 'post':
response = self.request_session.post(
url,
auth=(self._get_username(), self._get_token()),
allow_redirects=False,
timeout=timeout,
**kwargs
)
elif method == 'put':
response = self.request_session.put(
url,
auth=(self._get_username(), self._get_token()),
allow_redirects=False,
timeout=timeout,
**kwargs
)
else:
raise ValueError('Unexpected method: {}'.format(method))
except requests.ConnectionError:
raise RequestExecutionError('Failed to connect to confidant.')
except requests.Timeout:
raise RequestExecutionError('Confidant request timed out.')
if not self._check_response_code(
response, expected=expected_return_codes):
raise RequestExecutionError('Unexpected return code')
return response
class TokenCreationError(Exception):
"""An exception raised when a token was unsuccessfully created."""
pass
class ClientConfigurationError(Exception):
"""An exception raised when the client has been invalidly configured."""
pass
class RequestExecutionError(Exception):
"""An exception raised when a request to Confidant failed."""
pass
| 37.407119 | 80 | 0.557198 |
ace4c2bfe4e7f9f4b6d48b8c588fcd4557175732 | 10,220 | py | Python | from_3b1b/on_hold/eop/reusables/histograms.py | sanjaydatasciencedojo/manim | 603a1a21dbb5eca325ed670f46ea72401a8edf1d | [
"MIT"
] | null | null | null | from_3b1b/on_hold/eop/reusables/histograms.py | sanjaydatasciencedojo/manim | 603a1a21dbb5eca325ed670f46ea72401a8edf1d | [
"MIT"
] | null | null | null | from_3b1b/on_hold/eop/reusables/histograms.py | sanjaydatasciencedojo/manim | 603a1a21dbb5eca325ed670f46ea72401a8edf1d | [
"MIT"
] | null | null | null | from random import *
from manimlib.imports import *
def text_range(start,stop,step): # a range as a list of strings
numbers = np.arange(start,stop,step)
labels = []
for x in numbers:
labels.append(str(x))
return labels
class Histogram(VMobject):
CONFIG = {
"start_color" : RED,
"end_color" : BLUE,
"x_scale" : 1.0,
"y_scale" : 1.0,
"x_labels" : "auto", # widths, mids, auto, none, [...]
"y_labels" : "auto", # auto, none, [...]
"y_label_position" : "top", # "center"
"x_min" : 0,
"bar_stroke_width" : 5,
"outline_stroke_width" : 0,
"stroke_color" : WHITE
}
def __init__(self, x_values, y_values, mode = "widths", **kwargs):
# mode = "widths" : x_values means the widths of the bars
# mode = "posts" : x_values means the delimiters btw the bars
digest_config(self, kwargs)
if mode == "widths" and len(x_values) != len(y_values):
raise Exception("Array lengths do not match up!")
elif mode == "posts" and len(x_values) != len(y_values) + 1:
raise Exception("Array lengths do not match up!")
self.y_values = y_values
self.x_values = x_values
self.mode = mode
self.process_values()
VMobject.__init__(self, **kwargs)
def process_values(self):
# preliminaries
self.y_values = np.array(self.y_values)
if self.mode == "widths":
self.widths = self.x_values
self.posts = np.cumsum(self.widths)
self.posts = np.insert(self.posts, 0, 0)
self.posts += self.x_min
self.x_max = self.posts[-1]
elif self.mode == "posts":
self.posts = self.x_values
self.widths = self.x_values[1:] - self.x_values[:-1]
self.x_min = self.posts[0]
self.x_max = self.posts[-1]
else:
raise Exception("Invalid mode or no mode specified!")
self.x_mids = 0.5 * (self.posts[:-1] + self.posts[1:])
self.widths_scaled = self.x_scale * self.widths
self.posts_scaled = self.x_scale * self.posts
self.x_min_scaled = self.x_scale * self.x_min
self.x_max_scaled = self.x_scale * self.x_max
self.y_values_scaled = self.y_scale * self.y_values
def generate_points(self):
self.process_values()
for submob in self.submobjects:
self.remove(submob)
def empty_string_array(n):
arr = []
for i in range(n):
arr.append("")
return arr
def num_arr_to_string_arr(arr): # converts number array to string array
ret_arr = []
for x in arr:
if x == np.floor(x):
new_x = int(np.floor(x))
else:
new_x = x
ret_arr.append(str(new_x))
return ret_arr
previous_bar = ORIGIN
self.bars = VGroup()
self.x_labels_group = VGroup()
self.y_labels_group = VGroup()
outline_points = []
if self.x_labels == "widths":
self.x_labels = num_arr_to_string_arr(self.widths)
elif self.x_labels == "mids":
self.x_labels = num_arr_to_string_arr(self.x_mids)
elif self.x_labels == "auto":
self.x_labels = num_arr_to_string_arr(self.x_mids)
elif self.x_labels == "none":
self.x_labels = empty_string_array(len(self.widths))
if self.y_labels == "auto":
self.y_labels = num_arr_to_string_arr(self.y_values)
elif self.y_labels == "none":
self.y_labels = empty_string_array(len(self.y_values))
for (i,x) in enumerate(self.x_mids):
bar = Rectangle(
width = self.widths_scaled[i],
height = self.y_values_scaled[i],
stroke_width = self.bar_stroke_width,
stroke_color = self.stroke_color,
)
if bar.height == 0:
bar.height = 0.01
bar.generate_points()
t = float(x - self.x_min)/(self.x_max - self.x_min)
bar_color = interpolate_color(
self.start_color,
self.end_color,
t
)
bar.set_fill(color = bar_color, opacity = 1)
bar.next_to(previous_bar,RIGHT,buff = 0, aligned_edge = DOWN)
self.bars.add(bar)
x_label = TextMobject(self.x_labels[i])
x_label.next_to(bar,DOWN)
self.x_labels_group.add(x_label)
y_label = TextMobject(self.y_labels[i])
if self.y_label_position == "top":
y_label.next_to(bar, UP)
elif self.y_label_position == "center":
y_label.move_to(bar)
else:
raise Exception("y_label_position must be top or center")
self.y_labels_group.add(y_label)
if i == 0:
# start with the lower left
outline_points.append(bar.get_anchors()[-2])
# upper two points of each bar
outline_points.append(bar.get_anchors()[0])
outline_points.append(bar.get_anchors()[1])
previous_bar = bar
# close the outline
# lower right
outline_points.append(bar.get_anchors()[2])
# lower left
outline_points.append(outline_points[0])
self.outline = Polygon(*outline_points,
stroke_width = self.outline_stroke_width,
stroke_color = self.stroke_color)
self.add(self.bars, self.x_labels_group, self.y_labels_group, self.outline)
self.move_to(ORIGIN)
def get_lower_left_point(self):
return self.bars[0].get_anchors()[-2]
class BuildUpHistogram(Animation):
def __init__(self, hist, **kwargs):
self.histogram = hist
class FlashThroughHistogram(Animation):
CONFIG = {
"cell_color" : WHITE,
"cell_opacity" : 0.8,
"hist_opacity" : 0.2
}
def __init__(self, mobject,
direction = "horizontal",
mode = "random",
**kwargs):
digest_config(self, kwargs)
self.cell_height = mobject.y_scale
self.prototype_cell = Rectangle(
width = 1,
height = self.cell_height,
fill_color = self.cell_color,
fill_opacity = self.cell_opacity,
stroke_width = 0,
)
x_values = mobject.x_values
y_values = mobject.y_values
self.mode = mode
self.direction = direction
self.generate_cell_indices(x_values,y_values)
Animation.__init__(self,mobject,**kwargs)
def generate_cell_indices(self,x_values,y_values):
self.cell_indices = []
for (i,x) in enumerate(x_values):
nb_cells = int(np.floor(y_values[i]))
for j in range(nb_cells):
self.cell_indices.append((i, j))
self.reordered_cell_indices = self.cell_indices
if self.mode == "random":
shuffle(self.reordered_cell_indices)
def cell_for_index(self,i,j):
if self.direction == "vertical":
width = self.mobject.x_scale
height = self.mobject.y_scale
x = (i + 0.5) * self.mobject.x_scale
y = (j + 0.5) * self.mobject.y_scale
center = self.mobject.get_lower_left_point() + x * RIGHT + y * UP
elif self.direction == "horizontal":
width = self.mobject.x_scale / self.mobject.y_values[i]
height = self.mobject.y_scale * self.mobject.y_values[i]
x = i * self.mobject.x_scale + (j + 0.5) * width
y = height / 2
center = self.mobject.get_lower_left_point() + x * RIGHT + y * UP
cell = Rectangle(width = width, height = height)
cell.move_to(center)
return cell
def interpolate_mobject(self,t):
if t == 0:
self.mobject.add(self.prototype_cell)
flash_nb = int(t * (len(self.cell_indices))) - 1
(i,j) = self.reordered_cell_indices[flash_nb]
cell = self.cell_for_index(i,j)
self.prototype_cell.width = cell.get_width()
self.prototype_cell.height = cell.get_height()
self.prototype_cell.generate_points()
self.prototype_cell.move_to(cell.get_center())
if t == 1:
self.mobject.remove(self.prototype_cell)
def clean_up_from_scene(self, scene = None):
Animation.clean_up_from_scene(self, scene)
self.update(1)
if scene is not None:
if self.is_remover():
scene.remove(self.prototype_cell)
else:
scene.add(self.prototype_cell)
return self
class OutlineableBars(VGroup):
# A group of bars (rectangles), together with
# a method that draws an outline around them,
# assuming the bars are arranged in a histogram
# (aligned at the bottom without gaps).
# We use this to morph a row of bricks into a histogram.
CONFIG = {
"outline_stroke_width" : 3,
"stroke_color" : WHITE
}
def create_outline(self, animated = False, **kwargs):
outline_points = []
for (i, bar) in enumerate(self.submobjects):
if i == 0:
# start with the lower left
outline_points.append(bar.get_corner(DOWN + LEFT))
# upper two points of each bar
outline_points.append(bar.get_corner(UP + LEFT))
outline_points.append(bar.get_corner(UP + RIGHT))
previous_bar = bar
# close the outline
# lower right
outline_points.append(previous_bar.get_corner(DOWN + RIGHT))
# lower left
outline_points.append(outline_points[0])
self.outline = Polygon(*outline_points,
stroke_width = self.outline_stroke_width,
stroke_color = self.stroke_color)
if animated:
self.play(FadeIn(self.outline, **kwargs))
return self.outline
| 29.116809 | 83 | 0.567808 |
ace4c2d459c5f145c8b70d5489ae9db5130fedae | 68,585 | py | Python | benchmarks/SimResults/micro_pinned_train_combos/cmpA_zeusmpxalancbmknamdgobmk/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/micro_pinned_train_combos/cmpA_zeusmpxalancbmknamdgobmk/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/micro_pinned_train_combos/cmpA_zeusmpxalancbmknamdgobmk/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.00121189,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.553979,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.959291,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.550181,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.06345,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.547399,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.96717,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.000228952,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0200822,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.14517,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.14852,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.145399,
'Execution Unit/Register Files/Runtime Dynamic': 0.168602,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.35079,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.976047,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.81617,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00814503,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00814503,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00706481,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00271877,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0021335,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0254884,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0791477,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.142776,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.333556,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.484932,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.0659,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.21939,
'L2/Runtime Dynamic': 0.0586636,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.60513,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.13433,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.141316,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.141316,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 6.27517,
'Load Store Unit/Runtime Dynamic': 2.97256,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.34846,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.69692,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.12367,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.126713,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0554277,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.772294,
'Memory Management Unit/Runtime Dynamic': 0.182141,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 26.7645,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000799445,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.028337,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.27994,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.309076,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 8.40451,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.118459,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.295732,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.579659,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.279585,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.45096,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.22763,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.958175,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.230894,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.33056,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.10951,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0117271,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.131592,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0867287,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.241102,
'Execution Unit/Register Files/Runtime Dynamic': 0.0984558,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.30685,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.708087,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.46583,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000624997,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000624997,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000544139,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000210517,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00124587,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00304,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00600075,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0833746,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.30334,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.183387,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.283178,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.77923,
'Instruction Fetch Unit/Runtime Dynamic': 0.55898,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.114301,
'L2/Runtime Dynamic': 0.0289234,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.064,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.951629,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.059104,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0591041,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.3431,
'Load Store Unit/Runtime Dynamic': 1.30221,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.14574,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.291481,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0517237,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0534339,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.329742,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0300833,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.574703,
'Memory Management Unit/Runtime Dynamic': 0.0835172,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.7314,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.28807,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0161199,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.138626,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.442817,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.88228,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.20328,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.362354,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.12018,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.358309,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.577939,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.291724,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.22797,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.238062,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.24978,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.211626,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0150291,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.183873,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.111149,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.395499,
'Execution Unit/Register Files/Runtime Dynamic': 0.126178,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.4382,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.946516,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 3.0684,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000762169,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000762169,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000664944,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00025801,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00159667,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00378595,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00726845,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.106851,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.222276,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.362913,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 0.703094,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0498701,
'L2/Runtime Dynamic': 0.0106149,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.84563,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.25671,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0843914,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0843913,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.24414,
'Load Store Unit/Runtime Dynamic': 1.75729,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.208095,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.416189,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0738535,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0746022,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0364395,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.682971,
'Memory Management Unit/Runtime Dynamic': 0.111042,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 23.7802,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.556691,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0229407,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.170834,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.750466,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 6.40091,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 9.4469e-07,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 2.02403e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.183331,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.295706,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.149262,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.628298,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.209675,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.2332,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 3.82383e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00768971,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0556064,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0568701,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0556102,
'Execution Unit/Register Files/Runtime Dynamic': 0.0645598,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.117147,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.314474,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.6154,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00213739,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00213739,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00192708,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000781778,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000816944,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00701881,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0181561,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0546707,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.47753,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.174292,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.185686,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.86481,
'Instruction Fetch Unit/Runtime Dynamic': 0.439824,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0328463,
'L2/Runtime Dynamic': 0.00881766,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.70928,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.720219,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0476278,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0476278,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.93419,
'Load Store Unit/Runtime Dynamic': 1.00273,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.117442,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.234884,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0416805,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0419578,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.21622,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0292126,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.443929,
'Memory Management Unit/Runtime Dynamic': 0.0711704,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.0984,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 9.58302e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00827149,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0941691,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.10245,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.24039,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 1.8926517721156508,
'Runtime Dynamic': 1.8926517721156508,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.134287,
'Runtime Dynamic': 0.0902045,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 88.5088,
'Peak Power': 121.621,
'Runtime Dynamic': 23.0183,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 88.3745,
'Total Cores/Runtime Dynamic': 22.9281,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.134287,
'Total L3s/Runtime Dynamic': 0.0902045,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.038293 | 124 | 0.681913 |
ace4c39b54fd7a60e1f777d42547d48e7d412445 | 650 | py | Python | classical/fields/schematics.py | altvod/classical | 8b9bad7350a854cf43af08277bb1b32d29abafe8 | [
"MIT"
] | null | null | null | classical/fields/schematics.py | altvod/classical | 8b9bad7350a854cf43af08277bb1b32d29abafe8 | [
"MIT"
] | null | null | null | classical/fields/schematics.py | altvod/classical | 8b9bad7350a854cf43af08277bb1b32d29abafe8 | [
"MIT"
] | null | null | null | import schematics
from classical.fields.base import ClassField, FieldInspector, FieldSchema
class SchematicsFieldInspector(FieldInspector[ClassField]):
@classmethod
def _validate_cls(cls, insp_cls: type) -> None:
if not issubclass(insp_cls, schematics.Model):
cls._raise_unsupported_field_class(insp_cls=insp_cls)
@classmethod
def _get_class_fields(cls, insp_cls: type) -> FieldSchema[ClassField]:
cls._validate_cls(insp_cls)
result = FieldSchema()
for name in insp_cls._schema.fields: # noqa
result.append(ClassField(init_name=name, attr_name=name))
return result
| 34.210526 | 74 | 0.72 |
ace4c401f2ba84662d4cafcb580b04aa29dcab2e | 306 | py | Python | gn/highest_version_dir.py | ndsol/subskia | 9a8f6e5ffc6676281a4389aa1503ba6c4352eaca | [
"BSD-3-Clause"
] | null | null | null | gn/highest_version_dir.py | ndsol/subskia | 9a8f6e5ffc6676281a4389aa1503ba6c4352eaca | [
"BSD-3-Clause"
] | null | null | null | gn/highest_version_dir.py | ndsol/subskia | 9a8f6e5ffc6676281a4389aa1503ba6c4352eaca | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import sys
dirpath = sys.argv[1]
regex = re.compile(sys.argv[2])
print(sorted(filter(regex.match, os.listdir(dirpath)))[-1])
| 19.125 | 72 | 0.722222 |
ace4c42756f55b8294c68a2e04e6fda1c0531d73 | 1,155 | py | Python | flarepy/examples/tutorials/OLD/slider_demo.py | Alex-Ian-Hamilton/flarepy | e441fcfebb6bf68bfb0070155b8659eb86d26571 | [
"BSD-3-Clause"
] | 1 | 2019-08-30T06:47:21.000Z | 2019-08-30T06:47:21.000Z | flarepy/examples/tutorials/OLD/slider_demo.py | Alex-Ian-Hamilton/flarepy | e441fcfebb6bf68bfb0070155b8659eb86d26571 | [
"BSD-3-Clause"
] | null | null | null | flarepy/examples/tutorials/OLD/slider_demo.py | Alex-Ian-Hamilton/flarepy | e441fcfebb6bf68bfb0070155b8659eb86d26571 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, RadioButtons
fig, ax = plt.subplots()
plt.subplots_adjust(left=0.25, bottom=0.25)
t = np.arange(0.0, 1.0, 0.001)
a0 = 5
f0 = 3
s = a0*np.sin(2*np.pi*f0*t)
l, = plt.plot(t, s, lw=2, color='red')
plt.axis([0, 1, -10, 10])
axcolor = 'lightgoldenrodyellow'
axfreq = plt.axes([0.25, 0.1, 0.65, 0.03])
axamp = plt.axes([0.25, 0.15, 0.65, 0.03])
sfreq = Slider(axfreq, 'Freq', 0.1, 30.0, valinit=f0)
samp = Slider(axamp, 'Amp', 0.1, 10.0, valinit=a0)
def update(val):
amp = samp.val
freq = sfreq.val
l.set_ydata(amp*np.sin(2*np.pi*freq*t))
fig.canvas.draw_idle()
sfreq.on_changed(update)
samp.on_changed(update)
resetax = plt.axes([0.8, 0.025, 0.1, 0.04])
button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975')
def reset(event):
sfreq.reset()
samp.reset()
button.on_clicked(reset)
rax = plt.axes([0.025, 0.5, 0.15, 0.15], facecolor=axcolor)
radio = RadioButtons(rax, ('red', 'blue', 'green'), active=0)
def colorfunc(label):
l.set_color(label)
fig.canvas.draw_idle()
radio.on_clicked(colorfunc)
plt.show()
| 23.571429 | 68 | 0.662338 |
ace4c4362065b4407646d62eac03946f050cb73d | 474 | py | Python | tests/test_sound.py | Hari-07/manim | bbe113e7d33636c8901d6c7cee81cb2f4b69cc8b | [
"MIT"
] | 1 | 2021-12-05T15:26:35.000Z | 2021-12-05T15:26:35.000Z | tests/test_sound.py | Hari-07/manim | bbe113e7d33636c8901d6c7cee81cb2f4b69cc8b | [
"MIT"
] | 3 | 2020-07-14T02:46:11.000Z | 2020-09-09T15:15:55.000Z | tests/test_sound.py | Hari-07/manim | bbe113e7d33636c8901d6c7cee81cb2f4b69cc8b | [
"MIT"
] | null | null | null | import os
import struct
import wave
from manim import Scene
def test_add_sound():
# create sound file
f = wave.open("noise.wav", "w")
f.setparams((2, 2, 44100, 0, "NONE", "not compressed"))
for _ in range(22050): # half a second of sound
packed_value = struct.pack("h", 14242)
f.writeframes(packed_value)
f.writeframes(packed_value)
f.close()
scene = Scene()
scene.add_sound("noise.wav")
os.remove("noise.wav")
| 20.608696 | 59 | 0.630802 |
ace4c46f566dea6e2432f75c5643d2a9bedfcffa | 16,133 | py | Python | qiskit/aqua/_discover.py | pistoia/qiskit-aqua | c7900ffdabc1499145739bfab29a392709bee1a0 | [
"Apache-2.0"
] | null | null | null | qiskit/aqua/_discover.py | pistoia/qiskit-aqua | c7900ffdabc1499145739bfab29a392709bee1a0 | [
"Apache-2.0"
] | null | null | null | qiskit/aqua/_discover.py | pistoia/qiskit-aqua | c7900ffdabc1499145739bfab29a392709bee1a0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018 IBM.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
Methods for pluggable objects discovery, registration, information
"""
import logging
import sys
import os
import pkgutil
import importlib
import inspect
import copy
from collections import namedtuple
from enum import Enum
from qiskit.aqua import AquaError
import pkg_resources
logger = logging.getLogger(__name__)
PLUGGABLES_ENTRY_POINT = 'qiskit.aqua.pluggables'
class PluggableType(Enum):
ALGORITHM = 'algorithm'
OPTIMIZER = 'optimizer'
VARIATIONAL_FORM = 'variational_form'
INITIAL_STATE = 'initial_state'
IQFT = 'iqft'
QFT = 'qft'
ORACLE = 'oracle'
FEATURE_MAP = 'feature_map'
MULTICLASS_EXTENSION = 'multiclass_extension'
UNCERTAINTY_PROBLEM = 'uncertainty_problem'
UNCERTAINTY_MODEL = 'uncertainty_model'
INPUT = 'input'
EIGENVALUES = 'eigs'
RECIPROCAL = 'reciprocal'
def _get_pluggables_types_dictionary():
"""
Gets all the pluggables types
Any new pluggable type should be added here
"""
from qiskit.aqua.components.uncertainty_problems import UncertaintyProblem
from qiskit.aqua.components.random_distributions import RandomDistribution
from qiskit.aqua.components.optimizers import Optimizer
from qiskit.aqua.algorithms.quantum_algorithm import QuantumAlgorithm
from qiskit.aqua.components.variational_forms import VariationalForm
from qiskit.aqua.components.initial_states import InitialState
from qiskit.aqua.components.iqfts import IQFT
from qiskit.aqua.components.qfts import QFT
from qiskit.aqua.components.oracles import Oracle
from qiskit.aqua.components.feature_maps import FeatureMap
from qiskit.aqua.components.multiclass_extensions import MulticlassExtension
from qiskit.aqua.input import AlgorithmInput
from qiskit.aqua.components.eigs import Eigenvalues
from qiskit.aqua.components.reciprocals import Reciprocal
return {
PluggableType.ALGORITHM: QuantumAlgorithm,
PluggableType.OPTIMIZER: Optimizer,
PluggableType.VARIATIONAL_FORM: VariationalForm,
PluggableType.INITIAL_STATE: InitialState,
PluggableType.IQFT: IQFT,
PluggableType.QFT: QFT,
PluggableType.ORACLE: Oracle,
PluggableType.FEATURE_MAP: FeatureMap,
PluggableType.MULTICLASS_EXTENSION: MulticlassExtension,
PluggableType.UNCERTAINTY_PROBLEM: UncertaintyProblem,
PluggableType.UNCERTAINTY_MODEL: RandomDistribution,
PluggableType.INPUT: AlgorithmInput,
PluggableType.EIGENVALUES: Eigenvalues,
PluggableType.RECIPROCAL: Reciprocal
}
_NAMES_TO_EXCLUDE = [os.path.basename(__file__)]
_FOLDERS_TO_EXCLUDE = ['__pycache__']
RegisteredPluggable = namedtuple(
'RegisteredPluggable', ['name', 'cls', 'configuration'])
_REGISTERED_PLUGGABLES = {}
_DISCOVERED = False
def refresh_pluggables():
"""
Attempts to rediscover all pluggable modules
"""
global _REGISTERED_PLUGGABLES
_REGISTERED_PLUGGABLES = {}
global _DISCOVERED
_DISCOVERED = True
_discover_local_pluggables()
_discover_entry_point_pluggables()
if logger.isEnabledFor(logging.DEBUG):
for ptype in local_pluggables_types():
logger.debug("Found: '{}' has pluggables {} ".format(ptype.value, local_pluggables(ptype)))
def _discover_on_demand():
"""
Attempts to discover pluggable modules, if not already discovered
"""
global _DISCOVERED
if not _DISCOVERED:
_DISCOVERED = True
_discover_local_pluggables()
_discover_entry_point_pluggables()
if logger.isEnabledFor(logging.DEBUG):
for ptype in local_pluggables_types():
logger.debug("Found: '{}' has pluggables {} ".format(ptype.value, local_pluggables(ptype)))
def _discover_entry_point_pluggables():
"""
Discovers the pluggable modules defined by entry_points in setup
and attempts to register them. Pluggable modules should subclass Pluggable Base classes.
"""
for entry_point in pkg_resources.iter_entry_points(PLUGGABLES_ENTRY_POINT):
try:
ep = entry_point.load()
_registered = False
for pluggable_type, c in _get_pluggables_types_dictionary().items():
if not inspect.isabstract(ep) and issubclass(ep, c):
_register_pluggable(pluggable_type, ep)
_registered = True
# print("Registered entry point pluggable type '{}' '{}' class '{}'".format(pluggable_type.value, entry_point, ep))
logger.debug("Registered entry point pluggable type '{}' '{}' class '{}'".format(pluggable_type.value, entry_point, ep))
break
if not _registered:
# print("Unknown entry point pluggable '{}' class '{}'".format(entry_point, ep))
logger.debug("Unknown entry point pluggable '{}' class '{}'".format(entry_point, ep))
except Exception as e:
# Ignore entry point that could not be initialized.
# print("Failed to load entry point '{}' error {}".format(entry_point, str(e)))
logger.debug("Failed to load entry point '{}' error {}".format(entry_point, str(e)))
def _discover_local_pluggables_in_dirs(directory,
parentname,
names_to_exclude=_NAMES_TO_EXCLUDE,
folders_to_exclude=_FOLDERS_TO_EXCLUDE):
for _, name, ispackage in pkgutil.iter_modules([directory]):
if ispackage:
continue
# Iterate through the modules
if name not in names_to_exclude: # skip those modules
try:
fullname = parentname + '.' + name
modspec = importlib.util.find_spec(fullname)
mod = importlib.util.module_from_spec(modspec)
modspec.loader.exec_module(mod)
for _, cls in inspect.getmembers(mod, inspect.isclass):
# Iterate through the classes defined on the module.
try:
if cls.__module__ == modspec.name:
for pluggable_type, c in _get_pluggables_types_dictionary().items():
if not inspect.isabstract(cls) and issubclass(cls, c):
_register_pluggable(pluggable_type, cls)
importlib.import_module(fullname)
break
except Exception as e:
# Ignore pluggables that could not be initialized.
# print('Failed to load pluggable {} error {}'.format(fullname, str(e)))
logger.debug('Failed to load pluggable {} error {}'.format(fullname, str(e)))
except Exception as e:
# Ignore pluggables that could not be initialized.
# print('Failed to load {} error {}'.format(fullname, str(e)))
logger.debug('Failed to load {} error {}'.format(fullname, str(e)))
for item in sorted(os.listdir(directory)):
fullpath = os.path.join(directory, item)
if item not in folders_to_exclude and not item.endswith('dSYM') and os.path.isdir(fullpath):
_discover_local_pluggables_in_dirs(
fullpath, parentname + '.' + item, names_to_exclude, folders_to_exclude)
def _discover_local_pluggables(directory=os.path.dirname(__file__),
parentname=os.path.splitext(__name__)[0],
names_to_exclude=_NAMES_TO_EXCLUDE,
folders_to_exclude=_FOLDERS_TO_EXCLUDE):
"""
Discovers the pluggable modules on the directory and subdirectories of the current module
and attempts to register them. Pluggable modules should subclass Pluggable Base classes.
Args:
directory (str, optional): Directory to search for pluggable. Defaults
to the directory of this module.
parentname (str, optional): Module parent name. Defaults to current directory name
"""
def _get_sys_path(directory):
syspath = [os.path.abspath(directory)]
for item in os.listdir(directory):
fullpath = os.path.join(directory, item)
if item != '__pycache__' and not item.endswith('dSYM') and os.path.isdir(fullpath):
syspath += _get_sys_path(fullpath)
return syspath
syspath_save = sys.path
sys.path = sys.path + _get_sys_path(directory)
try:
_discover_local_pluggables_in_dirs(directory, parentname)
finally:
sys.path = syspath_save
def register_pluggable(cls):
"""
Registers a pluggable class
Args:
cls (object): Pluggable class.
Returns:
name: pluggable name
"""
_discover_on_demand()
pluggable_type = None
for type, c in _get_pluggables_types_dictionary().items():
if issubclass(cls, c):
pluggable_type = type
break
if pluggable_type is None:
raise AquaError(
'Could not register class {} is not subclass of any known pluggable'.format(cls))
return _register_pluggable(pluggable_type, cls)
global_class = None
def _register_pluggable(pluggable_type, cls):
"""
Registers a pluggable class
Args:
pluggable_type(PluggableType): The pluggable type
cls (object): Pluggable class.
Returns:
name: pluggable name
Raises:
AquaError: if the class is already registered or could not be registered
"""
if pluggable_type not in _REGISTERED_PLUGGABLES:
_REGISTERED_PLUGGABLES[pluggable_type] = {}
# fix pickle problems
method = 'from {} import {}\nglobal global_class\nglobal_class = {}'.format(cls.__module__, cls.__qualname__, cls.__qualname__)
exec(method)
cls = global_class
# Verify that the pluggable is not already registered.
registered_classes = _REGISTERED_PLUGGABLES[pluggable_type]
if cls in [pluggable.cls for pluggable in registered_classes.values()]:
raise AquaError(
'Could not register class {} is already registered'.format(cls))
# Verify that it has a minimal valid configuration.
try:
pluggable_name = cls.CONFIGURATION['name']
except (LookupError, TypeError):
raise AquaError('Could not register pluggable: invalid configuration')
# Verify that the pluggable is valid
check_pluggable_valid = getattr(cls, 'check_pluggable_valid', None)
if check_pluggable_valid is not None:
try:
check_pluggable_valid()
except Exception as e:
logger.debug(str(e))
raise AquaError('Could not register class {}. Name {} is not valid'.format(cls, pluggable_name)) from e
if pluggable_name in _REGISTERED_PLUGGABLES[pluggable_type]:
raise AquaError('Could not register class {}. Name {} {} is already registered'.format(cls,
pluggable_name, _REGISTERED_PLUGGABLES[pluggable_type][pluggable_name].cls))
# Append the pluggable to the `registered_classes` dict.
_REGISTERED_PLUGGABLES[pluggable_type][pluggable_name] = RegisteredPluggable(
pluggable_name, cls, copy.deepcopy(cls.CONFIGURATION))
return pluggable_name
def deregister_pluggable(pluggable_type, pluggable_name):
"""
Deregisters a pluggable class
Args:
pluggable_type(PluggableType): The pluggable type
pluggable_name (str): The pluggable name
Raises:
AquaError: if the class is not registered
"""
_discover_on_demand()
if pluggable_type not in _REGISTERED_PLUGGABLES:
raise AquaError('Could not deregister {} {} not registered'.format(
pluggable_type, pluggable_name))
if pluggable_name not in _REGISTERED_PLUGGABLES[pluggable_type]:
raise AquaError('Could not deregister {} {} not registered'.format(
pluggable_type, pluggable_name))
_REGISTERED_PLUGGABLES[pluggable_type].pop(pluggable_name)
def get_pluggable_class(pluggable_type, pluggable_name):
"""
Accesses pluggable class
Args:
pluggable_type(PluggableType or str): The pluggable type
pluggable_name (str): The pluggable name
Returns:
cls: pluggable class
Raises:
AquaError: if the class is not registered
"""
_discover_on_demand()
if isinstance(pluggable_type, str):
for ptype in PluggableType:
if ptype.value == pluggable_type:
pluggable_type = ptype
break
if not isinstance(pluggable_type, PluggableType):
raise AquaError('Invalid pluggable type {} {}'.format(
pluggable_type, pluggable_name))
if pluggable_type not in _REGISTERED_PLUGGABLES:
raise AquaError('{} {} not registered'.format(
pluggable_type, pluggable_name))
if pluggable_name not in _REGISTERED_PLUGGABLES[pluggable_type]:
raise AquaError('{} {} not registered'.format(
pluggable_type, pluggable_name))
return _REGISTERED_PLUGGABLES[pluggable_type][pluggable_name].cls
def get_pluggable_configuration(pluggable_type, pluggable_name):
"""
Accesses pluggable configuration
Args:
pluggable_type(PluggableType or str): The pluggable type
pluggable_name (str): The pluggable name
Returns:
configuration: pluggable configuration
Raises:
AquaError: if the class is not registered
"""
_discover_on_demand()
if isinstance(pluggable_type, str):
for ptype in PluggableType:
if ptype.value == pluggable_type:
pluggable_type = ptype
break
if not isinstance(pluggable_type, PluggableType):
raise AquaError('Invalid pluggable type {} {}'.format(
pluggable_type, pluggable_name))
if pluggable_type not in _REGISTERED_PLUGGABLES:
raise AquaError('{} {} not registered'.format(
pluggable_type, pluggable_name))
if pluggable_name not in _REGISTERED_PLUGGABLES[pluggable_type]:
raise AquaError('{} {} not registered'.format(
pluggable_type, pluggable_name))
return copy.deepcopy(_REGISTERED_PLUGGABLES[pluggable_type][pluggable_name].configuration)
def local_pluggables_types():
"""
Accesses all pluggable types
Returns:
types: pluggable types
"""
_discover_on_demand()
return list(_REGISTERED_PLUGGABLES.keys())
def local_pluggables(pluggable_type):
"""
Accesses pluggable names
Args:
pluggable_type(PluggableType or str): The pluggable type
Returns:
names: pluggable names
Raises:
AquaError: if the tyoe is not registered
"""
_discover_on_demand()
if isinstance(pluggable_type, str):
for ptype in PluggableType:
if ptype.value == pluggable_type:
pluggable_type = ptype
break
if not isinstance(pluggable_type, PluggableType):
raise AquaError(
'Invalid pluggable type {}'.format(pluggable_type))
if pluggable_type not in _REGISTERED_PLUGGABLES:
raise AquaError('{} not registered'.format(pluggable_type))
return [pluggable.name for pluggable in _REGISTERED_PLUGGABLES[pluggable_type].values()]
| 37.258661 | 171 | 0.663485 |
ace4c4a9d0b6bb9b52546f70c189ac6a27413266 | 173 | py | Python | code/pyTankBot/calculateDistance.py | henkkx/pyTankBot | 8450640dba79480c0fe4098ee1125dba1902abba | [
"MIT"
] | null | null | null | code/pyTankBot/calculateDistance.py | henkkx/pyTankBot | 8450640dba79480c0fe4098ee1125dba1902abba | [
"MIT"
] | null | null | null | code/pyTankBot/calculateDistance.py | henkkx/pyTankBot | 8450640dba79480c0fe4098ee1125dba1902abba | [
"MIT"
] | null | null | null | import math
def calculate_distance(ownX, ownY, otherX, otherY):
headingX = otherX - ownX
headingY = otherY - ownY
return math.sqrt(headingX**2 + headingY**2)
| 19.222222 | 51 | 0.687861 |
ace4c4b62f5e382204ccbc58ae334a62c596e11d | 6,729 | py | Python | config/settings/production.py | underchemist/qc-timelimit-duel-draft | 7c919d65d5b8a3e34d57da13d80e3d47c5222b3a | [
"MIT"
] | null | null | null | config/settings/production.py | underchemist/qc-timelimit-duel-draft | 7c919d65d5b8a3e34d57da13d80e3d47c5222b3a | [
"MIT"
] | null | null | null | config/settings/production.py | underchemist/qc-timelimit-duel-draft | 7c919d65d5b8a3e34d57da13d80e3d47c5222b3a | [
"MIT"
] | null | null | null | import logging
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env("DJANGO_SECRET_KEY")
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["example.com"])
# DATABASES
# ------------------------------------------------------------------------------
DATABASES["default"] = env.db("DATABASE_URL") # noqa F405
DATABASES["default"]["ATOMIC_REQUESTS"] = True # noqa F405
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa F405
# CACHES
# ------------------------------------------------------------------------------
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": env("REDIS_URL"),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
# Mimicing memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
"IGNORE_EXCEPTIONS": True,
},
}
}
# SECURITY
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-ssl-redirect
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure
SESSION_COOKIE_SECURE = True
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure
CSRF_COOKIE_SECURE = True
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
SECURE_HSTS_SECONDS = 60
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True
)
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True
)
# STATIC
# ------------------------
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# MEDIA
# ------------------------------------------------------------------------------
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email
DEFAULT_FROM_EMAIL = env(
"DJANGO_DEFAULT_FROM_EMAIL", default="qc-timelimit-duel-draft <noreply@example.com>"
)
# https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = env("DJANGO_SERVER_EMAIL", default=DEFAULT_FROM_EMAIL)
# https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = env(
"DJANGO_EMAIL_SUBJECT_PREFIX", default="[qc-timelimit-duel-draft]"
)
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL regex.
ADMIN_URL = env("DJANGO_ADMIN_URL")
# Anymail (Mailgun)
# ------------------------------------------------------------------------------
# https://anymail.readthedocs.io/en/stable/installation/#installing-anymail
INSTALLED_APPS += ["anymail"] # noqa F405
EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend"
# https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference
ANYMAIL = {
"MAILGUN_API_KEY": env("MAILGUN_API_KEY"),
"MAILGUN_SENDER_DOMAIN": env("MAILGUN_DOMAIN"),
"MAILGUN_API_URL": env("MAILGUN_API_URL", default="https://api.mailgun.net/v3"),
}
# Gunicorn
# ------------------------------------------------------------------------------
INSTALLED_APPS += ["gunicorn"] # noqa F405
# WhiteNoise
# ------------------------------------------------------------------------------
# http://whitenoise.evans.io/en/latest/django.html#enable-whitenoise
MIDDLEWARE.insert(1, "whitenoise.middleware.WhiteNoiseMiddleware") # noqa F405
# LOGGING
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#logging
# See https://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s "
"%(process)d %(thread)d %(message)s"
}
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
}
},
"root": {"level": "INFO", "handlers": ["console"]},
"loggers": {
"django.db.backends": {
"level": "ERROR",
"handlers": ["console"],
"propagate": False,
},
# Errors logged by the SDK itself
"sentry_sdk": {"level": "ERROR", "handlers": ["console"], "propagate": False},
"django.security.DisallowedHost": {
"level": "ERROR",
"handlers": ["console"],
"propagate": False,
},
},
}
# Sentry
# ------------------------------------------------------------------------------
SENTRY_DSN = env("SENTRY_DSN")
SENTRY_LOG_LEVEL = env.int("DJANGO_SENTRY_LOG_LEVEL", logging.INFO)
sentry_logging = LoggingIntegration(
level=SENTRY_LOG_LEVEL, # Capture info and above as breadcrumbs
event_level=logging.ERROR, # Send errors as events
)
sentry_sdk.init(dsn=SENTRY_DSN, integrations=[sentry_logging, DjangoIntegration()])
# Your stuff...
# ------------------------------------------------------------------------------
| 38.451429 | 89 | 0.58077 |
ace4c7d9ffd920abee9fd2d300e105122c3ddbc1 | 4,913 | py | Python | kochat/proc/torch_processor.py | leebs0521/AI_TeamProject | e420795159554411ae1b542b6ac05520163c87eb | [
"Apache-2.0"
] | null | null | null | kochat/proc/torch_processor.py | leebs0521/AI_TeamProject | e420795159554411ae1b542b6ac05520163c87eb | [
"Apache-2.0"
] | null | null | null | kochat/proc/torch_processor.py | leebs0521/AI_TeamProject | e420795159554411ae1b542b6ac05520163c87eb | [
"Apache-2.0"
] | null | null | null | """
@author : Hyunwoong
@when : 5/9/2020
@homepage : https://github.com/gusdnd852
"""
import os
from abc import abstractmethod
from time import time
from typing import List
import torch
from torch import nn
from torch import Tensor
from torch.nn.parameter import Parameter
from torch.optim import Adam
from torch.optim.lr_scheduler import ReduceLROnPlateau
from kochat.proc.base_processor import BaseProcessor
from kochat.utils.metrics import Metrics
from kochat.utils.visualizer import Visualizer
class TorchProcessor(BaseProcessor):
def __init__(self, model: nn.Module, parameters: Parameter or List[Parameter]):
"""
Pytorch 모델의 Training, Testing, Inference
등을 관장하는 프로세서 클래스입니다.
:param model: Pytorch 모델을 입력해야합니다.
"""
super().__init__(model)
#self.visualizer = Visualizer(self.model_dir, self.model_file)
self.metrics = Metrics(self.logging_precision)
self.model = model.to(self.device)
self.__initialize_weights(self.model)
# Model Optimizer로 Adam Optimizer 사용
self.optimizers = [Adam(
params=parameters,
lr=self.model_lr,
weight_decay=self.weight_decay)]
# ReduceLROnPlateau Scheduler 사용
self.lr_scheduler = ReduceLROnPlateau(
optimizer=self.optimizers[0],
verbose=True,
factor=self.lr_scheduler_factor,
min_lr=self.lr_scheduler_min_lr,
patience=self.lr_scheduler_patience)
def fit(self, dataset: tuple, test: bool = True):
"""
Pytorch 모델을 학습/테스트하고
모델의 출력값을 다양한 방법으로 시각화합니다.
최종적으로 학습된 모델을 저장합니다.
:param dataset: 학습할 데이터셋
:param test: 테스트 여부
"""
# 데이터 셋 unpacking
self.train_data = dataset[0]
self.test_data = dataset[1]
if len(dataset) > 2:
self.ood_train = dataset[2]
self.ood_test = dataset[3]
for i in range(self.epochs + 1):
eta = time()
loss, label, predict = self._train_epoch(i)
# self.__visualize(loss, label, predict, mode='train')
# training epoch + visualization
if test:
loss, label, predict = self._test_epoch(i)
# self.__visualize(loss, label, predict, mode='test')
# testing epoch + visualization
if i > self.lr_scheduler_warm_up:
self.lr_scheduler.step(loss)
if i % self.save_epoch == 0:
self._save_model()
self._print('Epoch : {epoch}, ETA : {eta} sec '
.format(epoch=i, eta=round(time() - eta, 4)))
@abstractmethod
def _train_epoch(self, epoch: int):
raise NotImplementedError
@abstractmethod
def _test_epoch(self, epoch: int):
raise NotImplementedError
def _load_model(self):
"""
저장된 모델을 불러옵니다.
"""
if not os.path.exists(self.model_dir):
raise Exception("모델을 불러올 수 없습니다.")
if not self.model_loaded:
self.model_loaded = True
self.model.load_state_dict(torch.load(self.model_file + '.pth'))
def _save_model(self):
"""
모델을 저장장치에 저장합니다.
"""
if not os.path.exists(self.model_dir):
os.makedirs(self.model_dir)
torch.save(self.model.state_dict(), self.model_file + '.pth')
def __initialize_weights(self, model: nn.Module):
"""
model의 가중치를 초기화합니다.
기본값으로 He Initalization을 사용합니다.
:param model: 초기화할 모델
"""
if hasattr(model, 'weight') and model.weight.dim() > 1:
nn.init.kaiming_uniform(model.weight.data)
def __visualize(self, loss: Tensor, label: Tensor, predict: Tensor, mode: str):
"""
모델의 feed forward 결과를 다양한 방법으로 시각화합니다.
:param loss: 해당 에폭의 loss
:param label: 데이터셋의 label
:param predict: 모델의 predict
:param mode: train or test
"""
# 결과 계산하고 저장함
eval_dict = self.metrics.evaluate(label, predict, mode=mode)
report, matrix = self.metrics.report(self.label_dict, mode)
self.visualizer.save_result(loss, eval_dict, mode=mode)
# 결과를 시각화하여 출력함
self.visualizer.draw_matrix(matrix, list(self.label_dict), mode)
self.visualizer.draw_report(report, mode=mode)
self.visualizer.draw_graphs()
@abstractmethod
def _forward(self, feats: Tensor, labels: Tensor = None, lengths: Tensor = None):
raise NotImplementedError
def _backward(self, loss: Tensor):
"""
모든 trainable parameter에 대한
backpropation을 진행합니다.
:param loss: backprop 이전 loss
:return: backprop 이후 loss
"""
for opt in self.optimizers: opt.zero_grad()
loss.backward()
for opt in self.optimizers: opt.step()
return loss
| 29.071006 | 85 | 0.607775 |
ace4c81383b7310d731c5185d3c5248e83a41f08 | 5,142 | py | Python | django_youtube/models.py | laplacesdemon/django-youtube | 45b0a9d4b60e3b85c84e9106b2d27758e89d0470 | [
"BSD-3-Clause"
] | 39 | 2015-02-26T04:01:02.000Z | 2022-01-13T07:00:53.000Z | django_youtube/models.py | laplacesdemon/django-youtube | 45b0a9d4b60e3b85c84e9106b2d27758e89d0470 | [
"BSD-3-Clause"
] | 4 | 2015-10-09T10:31:54.000Z | 2020-06-05T16:56:03.000Z | django_youtube/models.py | laplacesdemon/django-youtube | 45b0a9d4b60e3b85c84e9106b2d27758e89d0470 | [
"BSD-3-Clause"
] | 17 | 2015-03-16T22:51:18.000Z | 2022-03-01T20:14:54.000Z | from django.db import models
from django_youtube.api import AccessControl, Api
import django.dispatch
from django.utils.translation import ugettext as _
from django.conf import settings
class Video(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
video_id = models.CharField(max_length=255, unique=True, null=True,
help_text=_("The Youtube id of the video"))
title = models.CharField(max_length=200, null=True, blank=True)
description = models.TextField(null=True, blank=True)
keywords = models.CharField(max_length=200, null=True, blank=True,
help_text=_("Comma seperated keywords"))
youtube_url = models.URLField(max_length=255, null=True, blank=True)
swf_url = models.URLField(max_length=255, null=True, blank=True)
access_control = models.SmallIntegerField(max_length=1,
choices=(
(AccessControl.Public,
"Public"),
(AccessControl.Unlisted,
"Unlisted"),
(AccessControl.Private,
"Private"),
),
default=AccessControl.Public)
def __unicode__(self):
return self.title
def get_absolute_url(self):
"""
Returns the swf url
"""
return self.swf_url
def entry(self):
"""
Connects to Youtube Api and retrieves the video entry object
Return:
gdata.youtube.YouTubeVideoEntry
"""
api = Api()
api.authenticate()
return api.fetch_video(self.video_id)
def save(self, *args, **kwargs):
"""
Syncronize the video information on db with the video on Youtube
The reason that I didn't use signals is to avoid saving the video instance twice.
"""
# if this is a new instance add details from api
if not self.id:
# Connect to api and get the details
entry = self.entry()
# Set the details
self.title = entry.media.title.text
self.description = entry.media.description.text
self.keywords = entry.media.keywords.text
self.youtube_url = entry.media.player.url
self.swf_url = entry.GetSwfUrl()
if entry.media.private:
self.access_control = AccessControl.Private
else:
self.access_control = AccessControl.Public
# Save the instance
super(Video, self).save(*args, **kwargs)
# show thumbnails
for thumbnail in entry.media.thumbnail:
t = Thumbnail()
t.url = thumbnail.url
t.video = self
t.save()
else:
# updating the video instance
# Connect to API and update video on youtube
api = Api()
# update method needs authentication
api.authenticate()
# Update the info on youtube, raise error on failure
api.update_video(self.video_id, self.title, self.description,
self.keywords, self.access_control)
# Save the model
return super(Video, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""
Deletes the video from youtube
Raises:
OperationError
"""
api = Api()
# Authentication is required for deletion
api.authenticate()
# Send API request, raises OperationError on unsuccessful deletion
api.delete_video(self.video_id)
# Call the super method
return super(Video, self).delete(*args, **kwargs)
def default_thumbnail(self):
"""
Returns the 1st thumbnail in thumbnails
This method can be updated as adding default attribute the Thumbnail model and return it
Returns:
Thumbnail object
"""
return self.thumbnail_set.all()[0]
class Thumbnail(models.Model):
video = models.ForeignKey(Video, null=True)
url = models.URLField(max_length=255)
def __unicode__(self):
return self.url
def get_absolute_url(self):
return self.url
class UploadedVideo(models.Model):
"""
temporary video object that is uploaded to use in direct upload
"""
file_on_server = models.FileField(upload_to='videos', null=True,
help_text=_("Temporary file on server for \
using in `direct upload` from \
your server to youtube"))
def __unicode__(self):
"""string representation"""
return self.file_on_server.url
#
# Signal Definitions
#
video_created = django.dispatch.Signal(providing_args=["video"])
| 33.607843 | 96 | 0.554842 |
ace4c99ce419f0345bc660586c07e089d3f133d6 | 608 | py | Python | scripts/test_template_bank.py | rhysjaques/ringdown | eca49a2d0da37e4d95e5b2dfa5a454c534e73ebe | [
"MIT"
] | 2 | 2020-11-12T01:51:08.000Z | 2021-08-23T11:47:39.000Z | scripts/test_template_bank.py | rhysjaques/ringdown | eca49a2d0da37e4d95e5b2dfa5a454c534e73ebe | [
"MIT"
] | null | null | null | scripts/test_template_bank.py | rhysjaques/ringdown | eca49a2d0da37e4d95e5b2dfa5a454c534e73ebe | [
"MIT"
] | 1 | 2021-01-13T14:35:20.000Z | 2021-01-13T14:35:20.000Z | """
Show how to create a template bank equivalent to that in Fig. 7.5 (p. 78) of
arXiv:0908.2085.
"""
from ringdown import RingdownTemplateBank
from matplotlib import pyplot as pl
frange = [50, 2000] # frequency range (Hz)
qrange = [2, 20] # Quality factor ranges
mm = 0.03 # maximum mismatch
tb = RingdownTemplateBank(frange, qrange=qrange, mm=mm)
print("Number of templates is {}".format(len(tb)))
fig, ax = pl.subplots()
ax.semilogx(tb.bank_freqs, tb.bank_qs, '.', color="b", ls="None")
ax.set_xlabel("Frequency (Hz)")
ax.set_ylabel("Q")
ax.grid(True, which="both", linestyle="dotted")
pl.show()
| 25.333333 | 76 | 0.700658 |
ace4c9bf5187f596eddf1211b34a76676542ca74 | 3,262 | py | Python | imdb_episode_ratings/scraper.py | elishahyousaf/Awesome-Python-Scripts | d516584517de2d94de60852f73d8f1831524fa19 | [
"MIT"
] | 1,026 | 2018-10-02T18:51:12.000Z | 2022-03-31T13:45:14.000Z | imdb_episode_ratings/scraper.py | elishahyousaf/Awesome-Python-Scripts | d516584517de2d94de60852f73d8f1831524fa19 | [
"MIT"
] | 164 | 2018-10-02T18:37:40.000Z | 2021-11-18T13:29:54.000Z | imdb_episode_ratings/scraper.py | elishahyousaf/Awesome-Python-Scripts | d516584517de2d94de60852f73d8f1831524fa19 | [
"MIT"
] | 521 | 2018-10-02T18:15:40.000Z | 2022-03-26T12:10:15.000Z | import requests
from bs4 import BeautifulSoup as BS
import xlwt
import time
def get_static_html ( search_url ) :
## create the soup object for the page
try:
r_page = requests.get ( search_url )
except:
print("Connection refused by the server..")
time.sleep(5)
soup_object = BS( r_page.content , 'html.parser' )
#print ( soup_object.prettify() )
return soup_object
def get_url () :
## convert to query url , and get raw HTML for the page
show_name = input ( " Enter show name ")
show_name = '+'.join ( show_name.split() )
search_url = "https://www.imdb.com/find?ref_=nv_sr_fn&q="+ show_name + "&s=all"
return search_url, show_name
def get_new_url ( soup_object ) :
## list of possible search results
list_queries = soup_object.find_all('td', class_ = "result_text")
show_final = None
## find the first TV show listing in the relevant searches
for show in list_queries :
if "(TV Series)" in show.text :
show_final = show
break
if show_final == None :
print( " No relevant search ")
exit()
#print ( " Show found - " , show_final )
## find the link to open the new page
hyperlink = show_final.find('a')
url_change = hyperlink['href']
show_url = "https://www.imdb.com/" + url_change + "episodes?season="
return show_url
def start() :
search_url , show_name = get_url()
soup_object = get_static_html(search_url)
show_url = get_new_url(soup_object)
result_file = xlwt.Workbook()
season_number = 1
while True :
soup_object = get_static_html( show_url + str(season_number) )
## verify if extra season exists
verify_season = soup_object.find('h3' , attrs = {'id' :'episode_top'})
curr_season = int ( verify_season.text[6:] )
if not season_number == curr_season :
break
print ("Season - ", season_number , " information extracted " )
## excel file
result_sheet = result_file.add_sheet( verify_season.text , cell_overwrite_ok=True)
result_sheet.write( 0 , 0 , " Name " )
result_sheet.write( 0 , 1 , " Rating " )
result_sheet.write( 0 , 2 , " Total votes " )
result_sheet.write( 0 , 3 , " Summary " )
result_sheet.col(3).width = 21000
result_sheet.col(0).width = 10000
episodes_season = soup_object.find_all('div' , class_ = 'info' )
curr_episode = 1
for episode in episodes_season :
## get the name of the episode
name_episode = episode.find('strong')
## get the rating of the episode
rating_episode = episode.find('span' , class_ = 'ipl-rating-star__rating' )
## total votes
votes_episode = episode.find('span' , class_ = 'ipl-rating-star__total-votes' )
## summary
summary_episode = episode.find('div' , class_ = 'item_description' )
## write to the excel file
if name_episode :
result_sheet.write( curr_episode , 0 , name_episode.text )
if rating_episode :
result_sheet.write( curr_episode , 1 , rating_episode.text )
if votes_episode :
result_sheet.write( curr_episode , 2 , votes_episode.text[1:-1] )
if summary_episode :
result_sheet.write( curr_episode , 3 , summary_episode.text )
curr_episode = curr_episode + 1
season_number = season_number + 1
print ( " Finished ")
result_file.save( show_name.replace('+' , '_') + '.xls')
start() | 30.485981 | 84 | 0.680258 |
ace4ca095cd01d74c4569973affda4bdcb22dac2 | 1,389 | py | Python | tests/browser/pages/domestic/domestic_eu_exit_contact_us_thank_you.py | mayank-sfdc/directory-tests | 6e978bc1a27c19389e99e454143122aa27e47b85 | [
"MIT"
] | 4 | 2017-06-02T09:09:10.000Z | 2018-01-25T19:06:12.000Z | tests/browser/pages/domestic/domestic_eu_exit_contact_us_thank_you.py | mayank-sfdc/directory-tests | 6e978bc1a27c19389e99e454143122aa27e47b85 | [
"MIT"
] | 53 | 2016-10-27T22:31:03.000Z | 2022-03-07T11:18:25.000Z | tests/browser/pages/domestic/domestic_eu_exit_contact_us_thank_you.py | mayank-sfdc/directory-tests | 6e978bc1a27c19389e99e454143122aa27e47b85 | [
"MIT"
] | 3 | 2017-11-22T11:42:40.000Z | 2022-02-21T01:20:04.000Z | # -*- coding: utf-8 -*-
"""Domestic - Domestic EU Exit Contact us - Thank you for your enquiry."""
import logging
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver
from directory_tests_shared import URLs
from directory_tests_shared.enums import PageType, Service
from pages.common_actions import Selector, check_url
NAME = "Brexit help"
SERVICE = Service.DOMESTIC
TYPE = PageType.THANK_YOU
URL = URLs.CONTACT_US_DOMESTIC_BREXIT_CONTACT_SUCCESS.absolute
PAGE_TITLE = "Welcome to great.gov.uk"
PDF_LINKS = Selector(By.CSS_SELECTOR, "#documents-section a.link")
SELECTORS = {
"beta bar": {
"self": Selector(By.ID, "header-beta-bar"),
"beta bar": Selector(By.CSS_SELECTOR, "#header-beta-bar strong"),
"feedback": Selector(By.CSS_SELECTOR, "#header-beta-bar a"),
},
"confirmation": {
"itself": Selector(By.ID, "confirmation-section"),
"heading": Selector(
By.CSS_SELECTOR, "#confirmation-section div.heading-container"
),
},
"report this page": {
"self": Selector(By.CSS_SELECTOR, "section.error-reporting"),
"report link": Selector(By.CSS_SELECTOR, "section.error-reporting a"),
},
}
def should_be_here(driver: WebDriver):
check_url(driver, URL, exact_match=True)
logging.debug(f"All expected elements are visible on '{URL}'")
| 33.878049 | 78 | 0.700504 |
ace4cbd5d068dfdb88a10d3245bdb3011d0852ef | 252 | py | Python | Exercicios mundo 1/ex034.py | prc3333/Exercicios--de-Phyton- | a4b54af45f6bb3a89a205b570e1cf1164e505e29 | [
"MIT"
] | null | null | null | Exercicios mundo 1/ex034.py | prc3333/Exercicios--de-Phyton- | a4b54af45f6bb3a89a205b570e1cf1164e505e29 | [
"MIT"
] | null | null | null | Exercicios mundo 1/ex034.py | prc3333/Exercicios--de-Phyton- | a4b54af45f6bb3a89a205b570e1cf1164e505e29 | [
"MIT"
] | null | null | null | salário = float(input('Qual é o salário do funcionario: '))
if salário <= 1250:
novo = salário + (salário * 15 / 100)
else:
novo = salário + (salário * 10 / 100)
print('Quem ganhava R${:.2f} passa a ganhar R${:.2f} agora'.format(salário, novo)) | 42 | 82 | 0.642857 |
ace4cbeccd5d7ddd3caf954229849ede87e17ea8 | 5,831 | py | Python | wsc_django/wsc_django/apps/config/migrations/0001_initial.py | hzh595395786/wsc_django | c0a4de1a4479fe83f36108c1fdd4d68d18348b8d | [
"MIT"
] | 2 | 2021-02-07T05:56:46.000Z | 2021-05-12T02:11:24.000Z | wsc_django/wsc_django/apps/config/migrations/0001_initial.py | hzh595395786/wsc_django | c0a4de1a4479fe83f36108c1fdd4d68d18348b8d | [
"MIT"
] | null | null | null | wsc_django/wsc_django/apps/config/migrations/0001_initial.py | hzh595395786/wsc_django | c0a4de1a4479fe83f36108c1fdd4d68d18348b8d | [
"MIT"
] | null | null | null | # Generated by Django 3.1.6 on 2021-06-06 12:54
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MsgNotify',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('order_confirm_wx', models.BooleanField(default=False, verbose_name='开始配送/等待自提-微信')),
('order_confirm_msg', models.BooleanField(default=False, verbose_name='开始配送/等待自提-短信')),
('order_finish_wx', models.BooleanField(default=False, verbose_name='订单完成-微信')),
('order_finish_msg', models.BooleanField(default=False, verbose_name='订单完成-短信')),
('order_refund_wx', models.BooleanField(default=False, verbose_name='订单退款-微信')),
('order_refund_msg', models.BooleanField(default=False, verbose_name='订单退款-短信')),
('group_success_wx', models.BooleanField(default=False, verbose_name='成团提醒-微信')),
('group_success_msg', models.BooleanField(default=False, verbose_name='成团提醒-短信')),
('group_failed_wx', models.BooleanField(default=False, verbose_name='拼团失败-微信')),
('group_failed_msg', models.BooleanField(default=False, verbose_name='拼团失败-短信')),
],
options={
'verbose_name': '消息通知',
'verbose_name_plural': '消息通知',
'db_table': 'msgnotfiy',
},
),
migrations.CreateModel(
name='Printer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('type', models.SmallIntegerField(default=1, verbose_name='打印机类型1:本地2:云, 预留')),
('brand', models.SmallIntegerField(verbose_name='打印机品牌 1:易联云, 2:飞印, 3:佛山喜讯, 4:365 S1, 5:365 S2, 6:森果')),
('code', models.CharField(default='', max_length=32, verbose_name='打印机终端号')),
('key', models.CharField(default='', max_length=32, verbose_name='打印机秘钥')),
('temp_id', models.SmallIntegerField(default=1, verbose_name='打印模板, 预留')),
('auto_print', models.SmallIntegerField(default=1, verbose_name='订单自动打印')),
('status', models.SmallIntegerField(default=1, verbose_name='打印机状态,预留')),
],
options={
'verbose_name': '打印机',
'verbose_name_plural': '打印机',
'db_table': 'printer',
},
),
migrations.CreateModel(
name='Receipt',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('bottom_msg', models.CharField(default='', max_length=128, verbose_name='小票底部信息')),
('bottom_qrcode', models.CharField(default='', max_length=128, verbose_name='小票底部二维码')),
('bottom_image', models.CharField(default='', max_length=512, verbose_name='小票底部图片,预留')),
('brcode_active', models.SmallIntegerField(default=0, verbose_name='打印订单号条码')),
('copies', models.SmallIntegerField(default=1, verbose_name='小票打印份数')),
],
options={
'verbose_name': '小票',
'verbose_name_plural': '小票',
'db_table': 'receipt',
},
),
migrations.CreateModel(
name='ShareSetup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('custom_title_name', models.CharField(default='', max_length=64, verbose_name='自定义分享标题名称')),
('custom_share_description', models.CharField(default='', max_length=64, verbose_name='自定义分享描述')),
],
options={
'verbose_name': '分享设置',
'verbose_name_plural': '分享设置',
'db_table': 'share_setup',
},
),
migrations.CreateModel(
name='SomeConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_at', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_at', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('show_off_product', models.BooleanField(default=True, verbose_name='货品板块显示已下架货品')),
('new_order_voice', models.BooleanField(default=True, verbose_name='新订单语音提醒')),
('weixin_jsapi', models.BooleanField(default=False, verbose_name='是否开启微信支付')),
('on_delivery', models.BooleanField(default=True, verbose_name='是否开启货到付款')),
],
options={
'verbose_name': '一些杂乱的配置项',
'verbose_name_plural': '一些杂乱的配置项',
'db_table': 'some_config',
},
),
]
| 53.990741 | 120 | 0.580518 |
ace4cc214b4ab63a4a8702b4938151e302f78c25 | 840 | py | Python | adv/s_ranzal.py | KingMikeXS/dl | c05f1c2e96aa7d13f6a5e92df05fb4e7b00bcebd | [
"Apache-2.0"
] | null | null | null | adv/s_ranzal.py | KingMikeXS/dl | c05f1c2e96aa7d13f6a5e92df05fb4e7b00bcebd | [
"Apache-2.0"
] | null | null | null | adv/s_ranzal.py | KingMikeXS/dl | c05f1c2e96aa7d13f6a5e92df05fb4e7b00bcebd | [
"Apache-2.0"
] | null | null | null | import adv.adv_test
from core.advbase import *
from slot.a import *
def module():
return Summer_Ranzal
class Summer_Ranzal(Adv):
a1 = ('lo',0.4)
a3 = ('primed_def', 0.08)
conf = {}
conf['slot.a'] = RR() + FRH()
conf['acl'] = """
`s1, x=5
`s2, x=5
`s3, x=5
"""
conf['afflict_res.bog'] = 100
def init(this):
this.a3_iscding = 0
if this.condition('buff all team'):
this.s2_proc = this.c_s2_proc
def s1_proc(this, e):
this.dmg_make('s1',2.16)
this.afflics.bog.on('s1', 100)
this.dmg_make('s1',6.48)
def c_s2_proc(this, e):
Teambuff('s2',0.10,15).on()
def s2_proc(this, e):
Selfbuff('s2',0.10,15).on()
if __name__ == '__main__':
conf = {}
adv.adv_test.test(module(), conf, verbose=-2)
| 21 | 49 | 0.533333 |
ace4cc8f8db3f5549d85d5d5c871c2a82589b5b8 | 5,007 | py | Python | cloudmesh_client/shell/console.py | cloudmesh/client | a5fc7dbaf2c51f1227cff346aedea4bf7f563fa9 | [
"Apache-2.0"
] | 3 | 2016-07-16T20:35:41.000Z | 2017-03-27T23:31:27.000Z | cloudmesh_client/shell/console.py | cloudmesh/client | a5fc7dbaf2c51f1227cff346aedea4bf7f563fa9 | [
"Apache-2.0"
] | 259 | 2015-06-18T19:19:14.000Z | 2021-09-23T23:22:30.000Z | cloudmesh_client/shell/console.py | cloudmesh/client | a5fc7dbaf2c51f1227cff346aedea4bf7f563fa9 | [
"Apache-2.0"
] | 19 | 2015-12-09T05:55:13.000Z | 2018-12-02T08:08:43.000Z | from __future__ import print_function
import traceback
import textwrap
from colorama import Fore, Back, Style
import colorama
colorama.init()
def indent(text, indent=2, width=128):
return "\n".join(
textwrap.wrap(text,
width=width,
initial_indent=" " * indent,
subsequent_indent=" " * indent))
class Console(object):
"""
A simple way to print in a console terminal in color. Instead of using
simply the print statement you can use special methods to indicate
warnings, errors, ok and regular messages.
Example Usage::
Console.warning("Warning")
Console.error("Error")
Console.info("Info")
Console.msg("msg")
Console.ok("Success")
One can swith the color mode off with::
Console.color = False
Console.error("Error")
The color will be switched on by default.
"""
color = True
debug = True
theme_color = {
'HEADER': Fore.MAGENTA,
'BLACK': Fore.BLACK,
'CYAN': Fore.CYAN,
'WHITE': Fore.WHITE,
'BLUE': Fore.BLUE,
'OKBLUE': Fore.BLUE,
'OKGREEN': Fore.GREEN,
'GREEN': Fore.GREEN,
'FAIL': Fore.RED,
'WARNING': Fore.MAGENTA,
'RED': Fore.RED,
'ENDC': '\033[0m',
'BOLD': "\033[1m",
}
theme_bw = {
'HEADER': '',
'BLACK': '',
'CYAN': '',
'WHITE': '',
'BLUE': '',
'OKBLUE': '',
'OKGREEN': '',
'GREEN': '',
'FAIL': '',
'WARNING': '',
'RED': '',
'ENDC': '',
'BOLD': "",
}
theme = theme_color
@classmethod
def set_debug(cls, on=True):
cls.debug = on
@staticmethod
def set_theme(color=True):
if color:
Console.theme = Console.theme_color
else:
Console.theme = Console.theme_bw
Console.color = color
@staticmethod
def get(name):
if name in Console.theme:
return Console.theme[name]
else:
return Console.theme['BLACK']
@staticmethod
def msg(message, width=90):
return textwrap.fill(message, width=width)
@staticmethod
def msg(message):
message = message or ""
print(message)
@classmethod
def error(cls, message, prefix=True, traceflag=True):
message = message or ""
if prefix:
text = "ERROR: "
else:
text = ""
if cls.color:
cls.cprint('FAIL', text, str(message))
else:
print(cls.msg(text + str(message)))
if traceflag and cls.debug:
trace = traceback.format_exc().strip()
print
print("\n".join(str(trace).splitlines()))
print
@staticmethod
def TODO(message, prefix=True, traceflag=True):
message = message or ""
if prefix:
text = "TODO: "
else:
text = ""
if Console.color:
Console.cprint('FAIL', text, str(message))
else:
print(Console.msg(text + str(message)))
trace = traceback.format_exc().strip()
if traceflag and trace != "None":
print
print("\n".join(str(trace).splitlines()))
print
@staticmethod
def debug_msg(message):
message = message or ""
if Console.color:
Console.cprint('RED', 'DEBUG: ', message)
else:
print(Console.msg('DEBUG: ' + message))
@staticmethod
def info(message):
message = message or ""
if Console.color:
Console.cprint('OKBLUE', "INFO: ", message)
else:
print(Console.msg("INFO: " + message))
@staticmethod
def warning(message):
message = message or ""
if Console.color:
Console.cprint('WARNING', "WARNING: ", message)
else:
print(Console.msg("WARNING: " + message))
@staticmethod
def ok(message):
message = message or ""
if Console.color:
Console.cprint('OKGREEN', "", message)
else:
print(Console.msg(message))
@staticmethod
def cprint(color, prefix, message):
message = message or ""
prefix = prefix or ""
print((Console.theme[color] +
prefix +
message +
Console.theme['ENDC']))
#
# Example
#
if __name__ == "__main__":
print(Console.color)
print(Console.theme)
Console.warning("Warning")
Console.error("Error")
Console.info("Info")
Console.msg("msg")
Console.ok("Ok")
Console.color = False
print(Console.color)
Console.error("Error")
print(Fore.RED + 'some red text')
print(Back.GREEN + 'and with a green background')
print(Style.DIM + 'and in dim text')
print(Fore.RESET + Back.RESET + Style.RESET_ALL)
print('back to normal now')
| 23.507042 | 74 | 0.532055 |
ace4cd11473aad721656358e94b66cce49583b1f | 7,162 | py | Python | src/com/dtmilano/android/plot.py | mowshon/AndroidViewClient | e52c1cc3e8b282fe5bec55d84771ab9707a463e6 | [
"Apache-2.0"
] | null | null | null | src/com/dtmilano/android/plot.py | mowshon/AndroidViewClient | e52c1cc3e8b282fe5bec55d84771ab9707a463e6 | [
"Apache-2.0"
] | null | null | null | src/com/dtmilano/android/plot.py | mowshon/AndroidViewClient | e52c1cc3e8b282fe5bec55d84771ab9707a463e6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Copyright (C) 2012-2018 Diego Torres Milano
Created on mar 11, 2017
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author: Diego Torres Milano
"""
import sys
import types
from math import ceil
import matplotlib.pyplot as plt
import mpl_toolkits.axisartist as AA
import numpy as np
from mpl_toolkits.axes_grid1 import host_subplot
from com.dtmilano.android.adb.dumpsys import Dumpsys
__version__ = '20.0.0b3'
DEBUG = True
NumberTypes = (int, int, float)
class Plot:
def __init__(self):
self.n = 0
self.na = []
self.va = []
self.ava = {}
''' Associative values array '''
self.aava = {}
''' (another) Associative values array '''
def append(self, value):
if DEBUG:
print('append({})'.format(value), file=sys.stderr)
self.n += 1
self.na.append(self.n)
if isinstance(value, NumberTypes):
self.va.append(value)
elif isinstance(value, Dumpsys):
if not self.ava:
self.__initAva()
if not self.aava:
self.__initAava()
dumpsys = value
self.ava[Dumpsys.TOTAL].append(dumpsys.get(Dumpsys.TOTAL))
self.ava[Dumpsys.ACTIVITIES].append(dumpsys.get(Dumpsys.ACTIVITIES))
self.ava[Dumpsys.VIEWS].append(dumpsys.get(Dumpsys.VIEWS))
# self.ava[Dumpsys.VIEW_ROOT_IMPL].append(dumpsys.get(Dumpsys.VIEW_ROOT_IMPL))
self.aava[Dumpsys.FRAMESTATS].append(dumpsys.get(Dumpsys.FRAMESTATS))
return self
def __initAva(self):
self.ava[Dumpsys.TOTAL] = []
self.ava[Dumpsys.ACTIVITIES] = []
self.ava[Dumpsys.VIEWS] = []
# self.ava[Dumpsys.VIEW_ROOT_IMPL] = []
def __initAava(self):
self.aava[Dumpsys.FRAMESTATS] = []
def plot(self, _type=Dumpsys.MEMINFO, filename=None):
title = "Dumpsys"
if _type == Dumpsys.FRAMESTATS:
subtitle = "gfxinfo " + Dumpsys.FRAMESTATS
else:
subtitle = _type
if _type == Dumpsys.MEMINFO:
if self.ava:
if DEBUG:
print("plot:", file=sys.stderr)
for k in list(self.ava.keys()):
print(" {}: {}".format(k, self.ava[k]), file=sys.stderr)
host = host_subplot(111, axes_class=AA.Axes)
plt.subplots_adjust(right=0.75)
par = {}
for k in list(self.ava.keys()):
if k != Dumpsys.TOTAL:
par[k] = host.twinx()
axis = 1
for k in list(self.ava.keys()):
if k != Dumpsys.TOTAL and k != Dumpsys.ACTIVITIES:
offset = axis * 60
axis += 1
new_fixed_axis = par[k].get_grid_helper().new_fixed_axis
par[k].axis["right"] = new_fixed_axis(loc="right",
axes=par[k],
offset=(offset, 0))
par[k].axis["right"].toggle(all=True)
if DEBUG:
print("setting host x lim {} {}".format(np.amin(self.na), np.amax(self.na)), file=sys.stderr)
minx = np.amin(self.na)
maxx = np.amax(self.na)
divx = abs(maxx - minx) / (len(self.na) * 1.0)
host.set_xlim(minx - divx, maxx + divx)
miny = np.amin(self.ava[Dumpsys.TOTAL])
maxy = np.amax(self.ava[Dumpsys.TOTAL])
divy = ceil(abs(maxy - miny) / (len(self.ava[Dumpsys.TOTAL]) * 1.0))
if DEBUG:
print("setting host y lim {} {}".format(miny - divy, maxy + divy), file=sys.stderr)
host.set_ylim(miny - divy, maxy + divy)
host.set_xlabel('N')
host.set_ylabel(Dumpsys.TOTAL)
for k in list(self.ava.keys()):
if k != Dumpsys.TOTAL:
par[k].set_ylabel(k)
plots = {}
if DEBUG:
print(" host plot {} : {}".format(self.na, self.ava[Dumpsys.TOTAL]), file=sys.stderr)
plots[Dumpsys.TOTAL], = host.plot(self.na, self.ava[Dumpsys.TOTAL], label=Dumpsys.TOTAL, linewidth=2)
for k in list(self.ava.keys()):
if k != Dumpsys.TOTAL:
if DEBUG:
print(" {} plot {} : {}".format(k, self.na, self.ava[k]), file=sys.stderr)
plots[k], = par[k].plot(self.na, self.ava[k], label=k, linewidth=2)
for k in list(self.ava.keys()):
if k != Dumpsys.TOTAL:
miny = np.amin(self.ava[k])
maxy = np.amax(self.ava[k])
divy = ceil(abs(maxy - miny) / (len(self.ava[k]) * 1.0))
if DEBUG:
print("setting {} y lim {}".format(k ,(miny - divy, maxy + divy)), file=sys.stderr)
par[k].set_ylim(miny - divy, maxy + divy)
host.legend()
# host.axis["left"].label.set_color(plots[Dumpsys.TOTAL].get_color())
# for k in self.ava.keys():
# if k != Dumpsys.TOTAL:
# par[k].axis["right"].label.set_color(plots[k].get_color())
elif self.va:
plt.xlabel('N')
plt.ylabel('V')
plt.plot(self.na, self.va, label="A")
else:
raise RuntimeError("No values to plot")
elif _type == Dumpsys.FRAMESTATS:
if DEBUG:
print(" plot: histogram {}".format(self.aava[Dumpsys.FRAMESTATS]), file=sys.stderr)
n, bins, patches = plt.hist(self.aava[Dumpsys.FRAMESTATS])
ymax = np.amax(n)
x = []
y = []
for v in range(int(ceil(ymax)) + 1):
x.append(1 / 60.0 * 10 ** 3)
y.append(v)
plt.plot(x, y, linewidth=2, color='c')
x = []
y = []
for v in range(int(ceil(ymax)) + 1):
x.append(1 / 30.0 * 10 ** 3)
y.append(v)
plt.plot(x, y, linewidth=2, color='r')
plt.xlabel('ms')
plt.ylabel('Frames')
plt.title(title + ' ' + subtitle)
plt.grid(True)
plt.draw()
if filename:
plt.savefig(filename)
else:
plt.show()
| 37.89418 | 117 | 0.502653 |
ace4cd52ea659bf12e5e85f2c083919e9942062b | 510 | py | Python | hackerearth/Algorithms/Milly and Chocolates IV/test.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | 4 | 2020-07-24T01:59:50.000Z | 2021-07-24T15:14:08.000Z | hackerearth/Algorithms/Milly and Chocolates IV/test.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | hackerearth/Algorithms/Milly and Chocolates IV/test.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | import io
import unittest
from contextlib import redirect_stdout
from unittest.mock import patch
class TestQ(unittest.TestCase):
@patch('builtins.input', side_effect=[
'1',
'2 10 10',
'1 2',
'10',
'4 2',
])
def test_case_0(self, input_mock=None):
text_trap = io.StringIO()
with redirect_stdout(text_trap):
import solution
self.assertEqual(text_trap.getvalue(), '1 20\n')
if __name__ == '__main__':
unittest.main()
| 21.25 | 56 | 0.607843 |
ace4cdcb4cd6e58115c64acbd51b41884e5c740f | 929 | py | Python | leveldb_to_recordio.py | rozim/ChessAtAGlance | c4ba60ffc08e609b0673513c4191bbb6e5b14366 | [
"Apache-2.0"
] | null | null | null | leveldb_to_recordio.py | rozim/ChessAtAGlance | c4ba60ffc08e609b0673513c4191bbb6e5b14366 | [
"Apache-2.0"
] | null | null | null | leveldb_to_recordio.py | rozim/ChessAtAGlance | c4ba60ffc08e609b0673513c4191bbb6e5b14366 | [
"Apache-2.0"
] | null | null | null | import struct
import tensorflow as tf
import leveldb
import time
import sys, os
from absl import app
from absl import flags
FLAGS = flags.FLAGS
flags.DEFINE_string('fn_in', None, '')
flags.DEFINE_string('fn_out', None, '')
def main(argv):
flags.mark_flags_as_required(['fn_in', 'fn_out'])
assert FLAGS.fn_in != FLAGS.fn_out
n = 0
mod = 64 * 1024
t1 = time.time()
opts = tf.io.TFRecordOptions(
compression_type='ZLIB',
output_buffer_size=(4 * 1024 * 1024))
with tf.io.TFRecordWriter(FLAGS.fn_out, opts) as rio:
db = leveldb.LevelDB(FLAGS.fn_in)
for ent in db.RangeIter():
# Yuck. How to decode bytearray w/o parsing Example.
rio.write(tf.train.Example().FromString(ent[1]).SerializeToString())
n += 1
if n % mod == 0:
print(n, int(time.time() - t1))
mod *= 2
print()
print('done', n, int(time.time() - t1))
if __name__ == '__main__':
app.run(main)
| 23.820513 | 74 | 0.653391 |
ace4ce34a88357dbb7ffab28ba17f493415dd485 | 1,531 | py | Python | dynamicdns/plugins/rackspace.py | damianmoore/django-dynamic-dns | e84c5b827117b02481e1a09d70fc437b3031fc93 | [
"BSD-2-Clause"
] | 18 | 2015-01-12T22:25:55.000Z | 2022-03-02T11:49:56.000Z | dynamicdns/plugins/rackspace.py | damianmoore/django-dynamic-dns | e84c5b827117b02481e1a09d70fc437b3031fc93 | [
"BSD-2-Clause"
] | 1 | 2020-05-20T18:48:06.000Z | 2020-05-20T18:48:06.000Z | dynamicdns/plugins/rackspace.py | damianmoore/django-dynamic-dns | e84c5b827117b02481e1a09d70fc437b3031fc93 | [
"BSD-2-Clause"
] | 6 | 2016-10-27T03:29:48.000Z | 2022-02-18T20:00:23.000Z | import requests
from . import DynamicDnsPlugin
class Rackspace(DynamicDnsPlugin):
def update(self, ip):
fqdn = self.domain.split('.', 1)[1]
# Authenticate to get token and tenent IDs
data = {'auth': {'RAX-KSKEY:apiKeyCredentials': {'username': self.config['username'], 'apiKey': self.config['api_key']}}}
response = requests.post('https://identity.api.rackspacecloud.com/v2.0/tokens', json=data).json()
token_id = response['access']['token']['id']
tenant_id = response['access']['token']['tenant']['id']
# Get domain ID for fetching/updateing records of
headers = {'X-Auth-Token': token_id}
response = requests.get(f'https://dns.api.rackspacecloud.com/v1.0/{tenant_id}/domains?name={fqdn}', headers=headers).json()
domain_id = response['domains'][0]['id']
# Get record for the subdomain
response = requests.get(f'https://dns.api.rackspacecloud.com/v1.0/{tenant_id}/domains/{domain_id}/records?type=A&name={self.domain}', headers=headers).json()
record_id = response['records'][0]['id']
# Update existing record
record_data = {
'records': [
{
'name': self.domain,
'id': record_id,
'data': ip,
'ttl': 300
}
]
}
requests.put(f'https://dns.api.rackspacecloud.com/v1.0/{tenant_id}/domains/{domain_id}/records', headers=headers, json=record_data).json()
| 41.378378 | 165 | 0.587851 |
ace4ce3657ebba8551f4c36385bb1d619a464b09 | 1,120 | py | Python | graying_the_box/others/Score_trans.py | tesslerc/H-DRLN | 87c643e193002fce3e1865a2e962351eff6cbdea | [
"MIT"
] | 31 | 2017-02-03T15:11:19.000Z | 2021-05-20T15:58:34.000Z | graying_the_box/others/Score_trans.py | tesslerc/H-DRLN | 87c643e193002fce3e1865a2e962351eff6cbdea | [
"MIT"
] | 1 | 2019-12-10T07:11:53.000Z | 2019-12-10T12:25:00.000Z | graying_the_box/others/Score_trans.py | tesslerc/H-DRLN | 87c643e193002fce3e1865a2e962351eff6cbdea | [
"MIT"
] | 4 | 2017-03-25T07:19:59.000Z | 2019-05-26T02:16:49.000Z | import sys
sys.path.append('/home/tom/OpenBox/bhtsne/')
import numpy as np
import h5py
import matplotlib.image as mpimg
import numpy as np
import matplotlib.pyplot as plt
numframes = 13000
ind1=6323
ind2=1315
im_size = 84
print "loading states... "
Seaquest_state_file = h5py.File('/home/tom/OpenBox/tsne_res/seaquest/13k/screens.h5', 'r')
Seaquest_state_mat = Seaquest_state_file['data']
Seaquest_states = Seaquest_state_mat[:numframes]
Seaquest_states = np.reshape(np.transpose(Seaquest_states), (3,210,160,-1))
Seaquest_states=np.transpose(Seaquest_states,(3,1,2,0))
fig, axs = plt.subplots(nrows=1, ncols=3)
for ax in axs.flat:
ax.set_xticklabels([])
ax.set_yticklabels([])
for tic in ax.xaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
for tic in ax.yaxis.get_major_ticks():
tic.tick1On = tic.tick2On = False
axs.flat[0].imshow(Seaquest_states[ind1], interpolation='none')
axs.flat[2].imshow(Seaquest_states[ind1+1], interpolation='none')
Seaquest_img=mpimg.imread('/home/tom/Desktop/score/transition1.png')
axs.flat[1].imshow(Seaquest_img)
plt.show()
| 24.888889 | 90 | 0.738393 |
ace4cfd75668d59512f439a4542d223a79dfae30 | 1,803 | py | Python | models/multiplicative_lstm.py | ShobhitLamba/Sentiment-Analysis | 100ecd81d75287fd78fdc77b5802866c60b2330e | [
"MIT"
] | null | null | null | models/multiplicative_lstm.py | ShobhitLamba/Sentiment-Analysis | 100ecd81d75287fd78fdc77b5802866c60b2330e | [
"MIT"
] | null | null | null | models/multiplicative_lstm.py | ShobhitLamba/Sentiment-Analysis | 100ecd81d75287fd78fdc77b5802866c60b2330e | [
"MIT"
] | null | null | null | # Recurrent Neural Network with Multiplicative-LSTM running over imdb dataset
# Author: Shobhit Lamba
# e-mail: slamba4@uic.edu
# Importing the libraries
from keras.models import Sequential
from keras.layers import Embedding, Dense
from keras.preprocessing import sequence
from keras.datasets import imdb
from sklearn.metrics import precision_recall_fscore_support as score
from utils.multiplicative_LSTM import MultiplicativeLSTM
MAX_FEATURES = 20000
batch_size = 32
embedding_dims = 128
MAX_SEQUENCE_LENGTH = 80
(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words = MAX_FEATURES)
x_train = sequence.pad_sequences(x_train, MAX_SEQUENCE_LENGTH)
x_test = sequence.pad_sequences(x_test, MAX_SEQUENCE_LENGTH)
# Building the network architecture
model = Sequential()
model.add(Embedding(MAX_FEATURES, embedding_dims))
model.add(MultiplicativeLSTM(128, dropout = 0.2, recurrent_dropout = 0.2))
model.add(Dense(1, activation = "sigmoid"))
# Compiling the network
model.compile(loss = "binary_crossentropy",
optimizer = "adam",
metrics = ["accuracy"])
model.summary()
# Training
model.fit(x_train, y_train,
batch_size = batch_size,
epochs = 10,
validation_data = (x_test, y_test))
# Evaluating results
predicted_result = model.predict_classes(x_test, batch_size = batch_size)
print("\n\n_________________________\nResult", y_test, '\n_________________________\n\n')
precision, recall, fscore, support = score(y_test, predicted_result)
count = 0
for i in range(len(y_test)):
if(y_test[i] == predicted_result[i]):
count+=1
print('accuracy: ', count/len(y_test))
print('precision: {}'.format(precision))
print('recall: {}'.format(recall))
print('fscore: {}'.format(fscore))
print('support: {}'.format(support))
| 30.559322 | 89 | 0.746534 |
ace4d010f6c40be998e6c09beb49a90fc65b8213 | 2,046 | py | Python | research/envs/empty.py | jhejna/research-lightning | 4c7391a4a69d1753089d8e43be19de3e6b3bfe01 | [
"MIT"
] | 2 | 2022-01-13T23:15:32.000Z | 2022-01-18T21:23:47.000Z | research/envs/empty.py | jhejna/research-lightning | 4c7391a4a69d1753089d8e43be19de3e6b3bfe01 | [
"MIT"
] | null | null | null | research/envs/empty.py | jhejna/research-lightning | 4c7391a4a69d1753089d8e43be19de3e6b3bfe01 | [
"MIT"
] | null | null | null | import gym
import numpy as np
def _get_space(low=None, high=None, shape=None, dtype=None):
all_vars = [low, high, shape, dtype]
if any([isinstance(v, dict) for v in all_vars]):
all_keys = set() # get all the keys
for v in all_vars:
if isinstance(v, dict):
all_keys.update(v.keys())
# Construct all the sets
spaces = {}
for k in all_keys:
l = low.get(k, None) if isinstance(low, dict) else low
h = high.get(k, None) if isinstance(high, dict) else high
s = shape.get(k, None) if isinstance(shape, dict) else shape
d = dtype.get(k, None) if isinstance(dtype, dict) else dtype
spaces[k] = _get_space(l, h, s, d)
# Construct the gym dict space
return gym.spaces.Dict(**spaces)
if shape == None and isinstance(high, int):
assert low is None, "Tried to specify a discrete space with both high and low."
return gym.spaces.Discrete(high)
# Otherwise assume its a box.
if low is None:
low = -np.inf
if high is None:
high = np.inf
if dtype is None:
dtype = np.float32
return gym.spaces.Box(low=low, high=high, shape=shape, dtype=dtype)
class Empty(gym.Env):
'''
An empty holder for defining supervised learning problems
It works by specifying the ranges and shapes.
'''
def __init__(self, observation_low=None, observation_high=None, observation_shape=None, observation_dtype=np.float32,
action_low=None, action_high=None, action_shape=None, action_dtype=np.float32):
self.observation_space = _get_space(observation_low, observation_high, observation_shape, observation_dtype)
self.action_space = _get_space(action_low, action_high, action_shape, action_dtype)
def step(self, action):
raise NotImplementedError("Empty Env does not have step")
def reset(self, **kwargs):
raise NotImplementedError("Empty Env does not have reset")
| 38.603774 | 121 | 0.638319 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.