hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ace5103b33130a22c13a5a3fe6b9754a49db5f46 | 3,871 | py | Python | dataprep/eda/distribution/__init__.py | yuzhenmao/dataprep | 1dbcddbf084ed49f719fe01f61d3e5e1ec418715 | [
"MIT"
] | null | null | null | dataprep/eda/distribution/__init__.py | yuzhenmao/dataprep | 1dbcddbf084ed49f719fe01f61d3e5e1ec418715 | [
"MIT"
] | null | null | null | dataprep/eda/distribution/__init__.py | yuzhenmao/dataprep | 1dbcddbf084ed49f719fe01f61d3e5e1ec418715 | [
"MIT"
] | null | null | null | """
This module implements the plot(df) function.
"""
from typing import Any, Dict, List, Optional, Union
import dask.dataframe as dd
import pandas as pd
from ..configs import Config
from ..container import Container
from ..dtypes import DTypeDef
from ...progress_bar import ProgressBar
from .compute import compute
from .render import render
__all__ = ["plot", "compute", "render"]
def plot(
df: Union[pd.DataFrame, dd.DataFrame],
x: Optional[str] = None,
y: Optional[str] = None,
z: Optional[str] = None,
*,
config: Optional[Dict[str, Any]] = None,
display: Optional[List[str]] = None,
dtype: Optional[DTypeDef] = None,
progress: bool = True,
) -> Container:
"""Generates plots for exploratory data analysis.
If no columns are specified, the distribution of
each coloumn is plotted. A histogram is plotted if the
column contains numerical values, a bar chart is plotted
if the column contains categorical values, a line chart is
plotted if the column is of type datetime.
If one column (x) is specified, the
distribution of x is plotted in various ways. If x
contains categorical values, a bar chart and pie chart are
plotted. If x contains numerical values, a histogram,
kernel density estimate plot, box plot, and qq plot are plotted.
If x contains datetime values, a line chart is plotted.
If two columns (x and y) are specified, plots depicting
the relationship between the variables will be displayed. If
x and y contain numerical values, a scatter plot, hexbin
plot, and binned box plot are plotted. If one of x and y
contain categorical values and the other contains numerical values,
a box plot and multiline histogram are plotted. If x and y
contain categorical vales, a nested bar chart, stacked bar chart, and
heat map are plotted. If one of x and y contains datetime values
and the other contains numerical values, a line chart and a box plot
are shown. If one of x and y contains datetime values and the other
contains categorical values, a multiline chart and a stacked box plot
are shown.
If x, y, and z are specified, they must be one each of type datetime,
numerical, and categorical. A multiline chart containing an aggregate
on the numerical column grouped by the categorical column over time is
plotted.
Parameters
----------
df
DataFrame from which visualizations are generated
x: Optional[str], default None
A valid column name from the dataframe
y: Optional[str], default None
A valid column name from the dataframe
z: Optional[str], default None
A valid column name from the dataframe
config
A dictionary for configuring the visualizations
E.g. config={"hist.bins": 20}
display
A list containing the names of the visualizations to display
E.g. display=["Histogram"]
dtype: str or DType or dict of str or dict of DType, default None
Specify Data Types for designated column or all columns.
E.g. dtype = {"a": Continuous, "b": "Nominal"} or
dtype = {"a": Continuous(), "b": "nominal"}
or dtype = Continuous() or dtype = "Continuous" or dtype = Continuous().
progress
Enable the progress bar.
Examples
--------
>>> import pandas as pd
>>> from dataprep.eda import *
>>> iris = pd.read_csv('https://raw.githubusercontent.com/mwaskom/seaborn-data/master/iris.csv')
>>> plot(iris)
>>> plot(iris, "petal_length")
>>> plot(iris, "petal_width", "species")
"""
cfg = Config.from_dict(display, config)
with ProgressBar(minimum=1, disable=not progress):
itmdt = compute(df, x, y, z, cfg=cfg, dtype=dtype)
to_render = render(itmdt, cfg)
return Container(to_render, itmdt.visual_type, cfg)
| 36.518868 | 100 | 0.687936 |
ace5110e4007755cafd633246024667c012ba690 | 2,296 | py | Python | vizdoomgym/envs/vizdoom_env_definitions.py | ArnaudFickinger/vizdoomgym | de001b4158d49d9eb1ae516346f05ad28b163961 | [
"MIT"
] | null | null | null | vizdoomgym/envs/vizdoom_env_definitions.py | ArnaudFickinger/vizdoomgym | de001b4158d49d9eb1ae516346f05ad28b163961 | [
"MIT"
] | null | null | null | vizdoomgym/envs/vizdoom_env_definitions.py | ArnaudFickinger/vizdoomgym | de001b4158d49d9eb1ae516346f05ad28b163961 | [
"MIT"
] | null | null | null | from vizdoomgym.envs.vizdoomenv import VizdoomEnv
class VizdoomBasic(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomBasic, self).__init__(0, **kwargs)
class VizdoomCorridor5(VizdoomEnv):
def __init__(self, no_reward, **kwargs):
super(VizdoomCorridor5, self).__init__(1, no_reward=no_reward, **kwargs)
class VizdoomCorridor1(VizdoomEnv):
def __init__(self, no_reward, **kwargs):
super(VizdoomCorridor1, self).__init__(12, no_reward=no_reward, **kwargs)
class VizdoomCorridor3(VizdoomEnv):
def __init__(self, no_reward, **kwargs):
super(VizdoomCorridor3, self).__init__(13, no_reward=no_reward, **kwargs)
class VizdoomCorridor7(VizdoomEnv):
def __init__(self, no_reward, **kwargs):
super(VizdoomCorridor7, self).__init__(14, no_reward=no_reward, **kwargs)
class VizdoomCorridorSparse5(VizdoomEnv):
def __init__(self, no_reward, **kwargs):
super(VizdoomCorridorSparse5, self).__init__(10, no_reward=no_reward, **kwargs)
class VizdoomCorridorSparse1(VizdoomEnv):
def __init__(self, no_reward, **kwargs):
super(VizdoomCorridorSparse1, self).__init__(11, no_reward=no_reward, **kwargs)
class VizdoomDeathmatch(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomDeathmatch, self).__init__(8, **kwargs)
class VizdoomDefendCenter(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomDefendCenter, self).__init__(2, **kwargs)
class VizdoomDefendLine(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomDefendLine, self).__init__(3, **kwargs)
class VizdoomHealthGathering(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomHealthGathering, self).__init__(4, **kwargs)
class VizdoomHealthGatheringSupreme(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomHealthGatheringSupreme, self).__init__(9, **kwargs)
class VizdoomMyWayHome(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomMyWayHome, self).__init__(5, **kwargs)
class VizdoomPredictPosition(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomPredictPosition, self).__init__(6, **kwargs)
class VizdoomTakeCover(VizdoomEnv):
def __init__(self, **kwargs):
super(VizdoomTakeCover, self).__init__(7, **kwargs)
| 32.8 | 87 | 0.726045 |
ace512b033d77a8c594df063273fbe1ffda1d20c | 10,880 | py | Python | forms_builder/forms/tests.py | kbe-ogawa/django-forms-builder | 6170f7c5d2f8ec7ac72f90e949fd8654f9614550 | [
"BSD-2-Clause"
] | 416 | 2015-01-07T18:06:00.000Z | 2022-03-18T02:04:57.000Z | forms_builder/forms/tests.py | kbe-ogawa/django-forms-builder | 6170f7c5d2f8ec7ac72f90e949fd8654f9614550 | [
"BSD-2-Clause"
] | 102 | 2015-01-06T13:57:22.000Z | 2022-03-02T08:50:08.000Z | forms_builder/forms/tests.py | kbe-ogawa/django-forms-builder | 6170f7c5d2f8ec7ac72f90e949fd8654f9614550 | [
"BSD-2-Clause"
] | 201 | 2015-01-05T15:25:23.000Z | 2022-03-29T21:57:42.000Z | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.sites.models import Site
from django.db import IntegrityError
from django.http import HttpResponse, HttpResponseRedirect
from django.template import Context, RequestContext, Template
from django.test import TestCase
from forms_builder.forms.fields import NAMES, FILE, SELECT
from forms_builder.forms.forms import FormForForm
from forms_builder.forms.models import (Form, Field,
STATUS_DRAFT, STATUS_PUBLISHED)
from forms_builder.forms.settings import USE_SITES
from forms_builder.forms.signals import form_invalid, form_valid
class Tests(TestCase):
def setUp(self):
self._site = Site.objects.get_current()
def test_form_fields(self):
"""
Simple 200 status check against rendering and posting to forms with
both optional and required fields.
"""
for required in (True, False):
form = Form.objects.create(title="Test", status=STATUS_PUBLISHED)
if USE_SITES:
form.sites.add(self._site)
form.save()
for (field, _) in NAMES:
form.fields.create(label=field, field_type=field,
required=required, visible=True)
response = self.client.get(form.get_absolute_url())
self.assertEqual(response.status_code, 200)
fields = form.fields.visible()
data = dict([(f.slug, "test") for f in fields])
response = self.client.post(form.get_absolute_url(), data=data)
self.assertEqual(response.status_code, 200)
def test_draft_form(self):
"""
Test that a form with draft status is only visible to staff.
"""
settings.DEBUG = True # Don't depend on having a 404 template.
username = "test"
password = "test"
User.objects.create_superuser(username, "", password)
self.client.logout()
draft = Form.objects.create(title="Draft", status=STATUS_DRAFT)
if USE_SITES:
draft.sites.add(self._site)
draft.save()
response = self.client.get(draft.get_absolute_url())
self.assertEqual(response.status_code, 404)
self.client.login(username=username, password=password)
response = self.client.get(draft.get_absolute_url())
self.assertEqual(response.status_code, 200)
def test_form_signals(self):
"""
Test that each of the signals are sent.
"""
events = ["valid", "invalid"]
invalid = lambda **kwargs: events.remove("invalid")
form_invalid.connect(invalid)
valid = lambda **kwargs: events.remove("valid")
form_valid.connect(valid)
form = Form.objects.create(title="Signals", status=STATUS_PUBLISHED)
if USE_SITES:
form.sites.add(self._site)
form.save()
form.fields.create(label="field", field_type=NAMES[0][0],
required=True, visible=True)
self.client.post(form.get_absolute_url(), data={})
data = {form.fields.visible()[0].slug: "test"}
self.client.post(form.get_absolute_url(), data=data)
self.assertEqual(len(events), 0)
def test_tag(self):
"""
Test that the different formats for the ``render_built_form``
tag all work.
"""
form = Form.objects.create(title="Tags", status=STATUS_PUBLISHED)
request = type(str(""), (), {"META": {}, "user": AnonymousUser()})()
context = RequestContext(request, {"form": form})
template = "{%% load forms_builder_tags %%}{%% render_built_form %s %%}"
formats = ("form", "form=form", "id=form.id", "slug=form.slug")
for format in formats:
t = Template(template % format).render(context)
self.assertTrue(form.get_absolute_url(), t)
def test_optional_filefield(self):
form = Form.objects.create(title="Test", status=STATUS_PUBLISHED)
if USE_SITES:
form.sites.add(self._site)
form.save()
form.fields.create(label="file field",
field_type=FILE,
required=False,
visible=True)
fields = form.fields.visible()
data = {'field_%s' % fields[0].id: ''}
context = Context({})
form_for_form = FormForForm(form, context, data=data)
# Should not raise IntegrityError: forms_fieldentry.value
# may not be NULL
form_for_form.save()
def test_field_validate_slug_names(self):
form = Form.objects.create(title="Test")
field = Field(form=form,
label="First name", field_type=NAMES[0][0])
field.save()
self.assertEqual(field.slug, 'first_name')
field_2 = Field(form=form,
label="First name", field_type=NAMES[0][0])
try:
field_2.save()
except IntegrityError:
self.fail("Slugs were not auto-unique")
def test_field_validate_slug_length(self):
max_slug_length = 2000
form = Form.objects.create(title="Test")
field = Field(form=form,
label='x' * (max_slug_length + 1), field_type=NAMES[0][0])
field.save()
self.assertLessEqual(len(field.slug), max_slug_length)
def test_field_default_ordering(self):
form = Form.objects.create(title="Test")
form.fields.create(label="second field",
field_type=NAMES[0][0], order=2)
f1 = form.fields.create(label="first field",
field_type=NAMES[0][0], order=1)
self.assertEqual(form.fields.all()[0], f1)
def test_form_errors(self):
from future.builtins import str
form = Form.objects.create(title="Test")
if USE_SITES:
form.sites.add(self._site)
form.save()
form.fields.create(label="field", field_type=NAMES[0][0],
required=True, visible=True)
response = self.client.post(form.get_absolute_url(), {"foo": "bar"})
self.assertTrue("This field is required" in str(response.content))
def test_form_redirect(self):
redirect_url = 'http://example.com/foo'
form = Form.objects.create(title='Test', redirect_url=redirect_url)
if USE_SITES:
form.sites.add(self._site)
form.save()
form.fields.create(label='field', field_type=NAMES[3][0],
required=True, visible=True)
form_absolute_url = form.get_absolute_url()
response = self.client.post(form_absolute_url, {'field': '0'})
self.assertEqual(response["location"], redirect_url)
response = self.client.post(form_absolute_url, {'field': 'bar'})
self.assertFalse(isinstance(response, HttpResponseRedirect))
def test_input_dropdown_not_required(self):
form = Form.objects.create(title="Test")
form.fields.create(label="Foo", field_type=SELECT, required=False, choices="one, two, three")
form_for_form = FormForForm(form, Context())
self.assertContains(HttpResponse(form_for_form), """
<select name="foo" class="choicefield" id="id_foo">
<option value="" selected></option>
<option value="one">one</option>
<option value="two">two</option>
<option value="three">three</option>
</select>""", html=True)
def test_input_dropdown_not_required_with_placeholder(self):
form = Form.objects.create(title="Test")
form.fields.create(label="Foo", placeholder_text="choose item", field_type=SELECT,
required=False, choices="one, two, three")
form_for_form = FormForForm(form, Context())
self.assertContains(HttpResponse(form_for_form), """
<select name="foo" class="choicefield" id="id_foo">
<option value="" selected>choose item</option>
<option value="one">one</option>
<option value="two">two</option>
<option value="three">three</option>
</select>""", html=True)
def test_input_dropdown_required(self):
form = Form.objects.create(title="Test")
form.fields.create(label="Foo", field_type=SELECT, choices="one, two, three")
form_for_form = FormForForm(form, Context())
self.assertContains(HttpResponse(form_for_form), """
<select name="foo" required class="choicefield required" id="id_foo">
<option value="" selected></option>
<option value="one">one</option>
<option value="two">two</option>
<option value="three">three</option>
</select>""", html=True)
def test_input_dropdown_required_with_placeholder(self):
form = Form.objects.create(title="Test")
form.fields.create(label="Foo", placeholder_text="choose item", field_type=SELECT,
choices="one, two, three")
form_for_form = FormForForm(form, Context())
self.assertContains(HttpResponse(form_for_form), """
<select name="foo" required class="choicefield required" id="id_foo">
<option value="" selected>choose item</option>
<option value="one">one</option>
<option value="two">two</option>
<option value="three">three</option>
</select>""", html=True)
def test_input_dropdown_required_with_placeholder_and_default(self):
form = Form.objects.create(title="Test")
form.fields.create(label="Foo", placeholder_text="choose item", field_type=SELECT,
choices="one, two, three", default="two")
form_for_form = FormForForm(form, Context())
self.assertContains(HttpResponse(form_for_form), """
<select name="foo" required class="choicefield required" id="id_foo">
<option value="one">one</option>
<option value="two" selected>two</option>
<option value="three">three</option>
</select>""", html=True)
def test_input_dropdown_required_with_default(self):
form = Form.objects.create(title="Test")
form.fields.create(label="Foo", field_type=SELECT, choices="one, two, three", default="two")
form_for_form = FormForForm(form, Context())
self.assertContains(HttpResponse(form_for_form), """
<select name="foo" required class="choicefield required" id="id_foo">
<option value="one">one</option>
<option value="two" selected>two</option>
<option value="three">three</option>
</select>""", html=True)
| 44.958678 | 101 | 0.61204 |
ace513cc3f7356dcd14669d0dfc93cc7aa5ac80e | 48 | py | Python | xtool/__init__.py | xianpf/xtool | beb6c89eb1a19d386569ab5c428dbc991a3e014c | [
"MIT"
] | 3 | 2019-01-14T12:19:25.000Z | 2019-10-10T13:27:47.000Z | xtool/__init__.py | xianpf/xtool | beb6c89eb1a19d386569ab5c428dbc991a3e014c | [
"MIT"
] | null | null | null | xtool/__init__.py | xianpf/xtool | beb6c89eb1a19d386569ab5c428dbc991a3e014c | [
"MIT"
] | null | null | null | # Version
from xtool.version import __version__
| 16 | 37 | 0.833333 |
ace5151eea91e710ba316e8779ce177369c7083a | 3,140 | py | Python | WeChatSpider/utils/proxy_core.py | Times125/wechatSpider | 141a37efe6877962da78297132cc06cfcfa04d65 | [
"BSD-3-Clause"
] | 18 | 2018-09-09T11:55:02.000Z | 2019-11-09T08:53:09.000Z | WeChatSpider/utils/proxy_core.py | Times125/wechatSpider | 141a37efe6877962da78297132cc06cfcfa04d65 | [
"BSD-3-Clause"
] | 1 | 2019-07-22T01:06:51.000Z | 2019-11-16T13:10:48.000Z | WeChatSpider/utils/proxy_core.py | Times125/wechatSpider | 141a37efe6877962da78297132cc06cfcfa04d65 | [
"BSD-3-Clause"
] | 6 | 2018-10-16T01:30:59.000Z | 2020-05-08T09:56:48.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author:_defined
@Time: 2018/12/16 21:53
@Description:
"""
import requests
import json
from WeChatSpider.logger import other_logger
__all__ = ['get_proxy', 'get_proxies', 'refresh_proxy', 'del_proxy']
def get_proxy():
"""
获取一个代理
:return:
"""
try:
resp = requests.get('http://10.0.12.1:5000/proxy/get/https') # 获取通过https校验器的单个ip
if resp.text:
res = json.loads(resp.text.encode('utf8').decode('utf8'))
if res['status_code'] == 200:
proxy = res['proxy']
return proxy
return ''
except TimeoutError as e:
other_logger.error('get proxy failed,look {}'.format(e))
return ''
def refresh_proxy():
"""
刷新代理并返回新的代理
:return:
"""
try:
resp = requests.get('http://10.0.12.1:5000/pool/refresh/https') # 刷新代理ip
if resp.text:
res = json.loads(resp.text.encode('utf8').decode('utf8'))
if res['status_code'] == 200:
proxy_pool = res['pool']
return proxy_pool
return []
except TimeoutError as e:
other_logger.error('get proxy failed,look {}'.format(e))
return []
def get_proxies(batch_size=10):
"""
获取批量代理ip,蘑菇代理
:param batch_size:
:return:
"""
try:
resp = requests.get('http://piping.mogumiao.com/proxy/api/get_ip_al?appKey=45cce39ad9a448e99cdbf0cf821a1fed&'
'count={}&expiryDate=0&format=1&newLine=2'.format(batch_size)) # 获取通过https校验器的20个ip
if resp.text:
res = json.loads(resp.text.encode('utf8').decode('utf8'))
if res['code'] == '0':
proxy_pool = []
for item in res['msg']:
proxy = 'http://{}:{}'.format(item['ip'], item['port'])
proxy_pool.append(proxy)
return proxy_pool
else:
other_logger.error('代理ip获取异常吗,错误代码{}'.format(res['code']))
return []
except TimeoutError as e:
other_logger.error('get proxies failed,look {}'.format(e))
return []
# try:
# resp = requests.get('http://10.0.12.1:5000/pool/get/https/{}'.format(batch_size)) # 获取通过https校验器的20个ip
# if resp.text:
# res = json.loads(resp.text.encode('utf8').decode('utf8'))
# if res['status_code'] == 200:
# proxy_pool = res['pool']
# return proxy_pool
# return []
# except TimeoutError as e:
# other_logger.error('get proxies failed,look {}'.format(e))
# return []
def del_proxy(proxy):
"""
删除代理ip
:param proxy:
:return:
"""
try:
resp = requests.get('http://10.0.12.1:5000/proxy/delete?usage=https&proxy={}'.format(proxy))
if resp.text:
res = json.loads(resp.text.encode('utf8').decode('utf8'))
if res['result'] == 'ok':
return True
return False
except TimeoutError as e:
other_logger.error('del proxy failed,look {}'.format(e))
return False
| 30.192308 | 117 | 0.545541 |
ace5153659d1ea08150e8a449812324871dbb0c0 | 473 | py | Python | packages/python/plotly/plotly/validators/scatterternary/marker/colorbar/_tickprefix.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/scatterternary/marker/colorbar/_tickprefix.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/scatterternary/marker/colorbar/_tickprefix.py | mastermind88/plotly.py | efa70710df1af22958e1be080e105130042f1839 | [
"MIT"
] | null | null | null | import _plotly_utils.basevalidators
class TickprefixValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="tickprefix",
parent_name="scatterternary.marker.colorbar",
**kwargs,
):
super(TickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
**kwargs,
)
| 27.823529 | 72 | 0.638478 |
ace5172c1d75c6a1888e778c2392e12cc38407b4 | 282 | py | Python | music_bg/img_processors/load_img.py | music-bg/music_bg | 5b79eacb32506b6eda5861df4b5f71b611c5dfa3 | [
"MIT"
] | 3 | 2021-09-17T12:27:46.000Z | 2021-09-18T19:03:01.000Z | music_bg/img_processors/load_img.py | music-bg/music_bg | 5b79eacb32506b6eda5861df4b5f71b611c5dfa3 | [
"MIT"
] | null | null | null | music_bg/img_processors/load_img.py | music-bg/music_bg | 5b79eacb32506b6eda5861df4b5f71b611c5dfa3 | [
"MIT"
] | null | null | null | from PIL import Image
def load_img(_image: Image.Image, path: str) -> Image.Image:
"""
Load image from disk.
:param _image: input image (ignored).
:param path: path to image to load.
:returns: loaded image.
"""
return Image.open(path).convert("RGBA")
| 21.692308 | 60 | 0.641844 |
ace5173195ce27719fc18dd540b8950c30fee2ff | 7,216 | py | Python | accelbyte_py_sdk/api/platform/models/store_backup_info.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | null | null | null | accelbyte_py_sdk/api/platform/models/store_backup_info.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | 1 | 2021-10-13T03:46:58.000Z | 2021-10-13T03:46:58.000Z | accelbyte_py_sdk/api/platform/models/store_backup_info.py | AccelByte/accelbyte-python-sdk | dcd311fad111c59da828278975340fb92e0f26f7 | [
"MIT"
] | null | null | null | # Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# justice-platform-service (4.10.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
class StoreBackupInfo(Model):
"""Store backup info (StoreBackupInfo)
Properties:
auto_backup: (autoBackup) REQUIRED bool
created_at: (createdAt) REQUIRED str
id_: (id) REQUIRED str
name: (name) REQUIRED str
store_id: (storeId) REQUIRED str
updated_at: (updatedAt) REQUIRED str
"""
# region fields
auto_backup: bool # REQUIRED
created_at: str # REQUIRED
id_: str # REQUIRED
name: str # REQUIRED
store_id: str # REQUIRED
updated_at: str # REQUIRED
# endregion fields
# region with_x methods
def with_auto_backup(self, value: bool) -> StoreBackupInfo:
self.auto_backup = value
return self
def with_created_at(self, value: str) -> StoreBackupInfo:
self.created_at = value
return self
def with_id(self, value: str) -> StoreBackupInfo:
self.id_ = value
return self
def with_name(self, value: str) -> StoreBackupInfo:
self.name = value
return self
def with_store_id(self, value: str) -> StoreBackupInfo:
self.store_id = value
return self
def with_updated_at(self, value: str) -> StoreBackupInfo:
self.updated_at = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "auto_backup"):
result["autoBackup"] = bool(self.auto_backup)
elif include_empty:
result["autoBackup"] = False
if hasattr(self, "created_at"):
result["createdAt"] = str(self.created_at)
elif include_empty:
result["createdAt"] = ""
if hasattr(self, "id_"):
result["id"] = str(self.id_)
elif include_empty:
result["id"] = ""
if hasattr(self, "name"):
result["name"] = str(self.name)
elif include_empty:
result["name"] = ""
if hasattr(self, "store_id"):
result["storeId"] = str(self.store_id)
elif include_empty:
result["storeId"] = ""
if hasattr(self, "updated_at"):
result["updatedAt"] = str(self.updated_at)
elif include_empty:
result["updatedAt"] = ""
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls,
auto_backup: bool,
created_at: str,
id_: str,
name: str,
store_id: str,
updated_at: str,
) -> StoreBackupInfo:
instance = cls()
instance.auto_backup = auto_backup
instance.created_at = created_at
instance.id_ = id_
instance.name = name
instance.store_id = store_id
instance.updated_at = updated_at
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> StoreBackupInfo:
instance = cls()
if not dict_:
return instance
if "autoBackup" in dict_ and dict_["autoBackup"] is not None:
instance.auto_backup = bool(dict_["autoBackup"])
elif include_empty:
instance.auto_backup = False
if "createdAt" in dict_ and dict_["createdAt"] is not None:
instance.created_at = str(dict_["createdAt"])
elif include_empty:
instance.created_at = ""
if "id" in dict_ and dict_["id"] is not None:
instance.id_ = str(dict_["id"])
elif include_empty:
instance.id_ = ""
if "name" in dict_ and dict_["name"] is not None:
instance.name = str(dict_["name"])
elif include_empty:
instance.name = ""
if "storeId" in dict_ and dict_["storeId"] is not None:
instance.store_id = str(dict_["storeId"])
elif include_empty:
instance.store_id = ""
if "updatedAt" in dict_ and dict_["updatedAt"] is not None:
instance.updated_at = str(dict_["updatedAt"])
elif include_empty:
instance.updated_at = ""
return instance
@classmethod
def create_many_from_dict(cls, dict_: dict, include_empty: bool = False) -> Dict[str, StoreBackupInfo]:
return {k: cls.create_from_dict(v, include_empty=include_empty) for k, v in dict_} if dict_ else {}
@classmethod
def create_many_from_list(cls, list_: list, include_empty: bool = False) -> List[StoreBackupInfo]:
return [cls.create_from_dict(i, include_empty=include_empty) for i in list_] if list_ else []
@classmethod
def create_from_any(cls, any_: any, include_empty: bool = False, many: bool = False) -> Union[StoreBackupInfo, List[StoreBackupInfo], Dict[Any, StoreBackupInfo]]:
if many:
if isinstance(any_, dict):
return cls.create_many_from_dict(any_, include_empty=include_empty)
elif isinstance(any_, list):
return cls.create_many_from_list(any_, include_empty=include_empty)
else:
raise ValueError()
else:
return cls.create_from_dict(any_, include_empty=include_empty)
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"autoBackup": "auto_backup",
"createdAt": "created_at",
"id": "id_",
"name": "name",
"storeId": "store_id",
"updatedAt": "updated_at",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"autoBackup": True,
"createdAt": True,
"id": True,
"name": True,
"storeId": True,
"updatedAt": True,
}
# endregion static methods
| 33.719626 | 166 | 0.571231 |
ace5174cae41722274c3b709d909a12acb64b480 | 1,301 | py | Python | hbmqtt/mqtt/pubcomp.py | petnet-independence-project/hbmq | 72947007235040720cc2d1ecf51fa61ffa63c347 | [
"MIT"
] | null | null | null | hbmqtt/mqtt/pubcomp.py | petnet-independence-project/hbmq | 72947007235040720cc2d1ecf51fa61ffa63c347 | [
"MIT"
] | 2 | 2020-12-31T06:37:20.000Z | 2021-01-07T21:36:02.000Z | hbmqtt/mqtt/pubcomp.py | petnet-independence-project/hbmqtt | 72947007235040720cc2d1ecf51fa61ffa63c347 | [
"MIT"
] | null | null | null | # Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from hbmqtt.mqtt.packet import (
MQTTPacket,
MQTTFixedHeader,
PUBCOMP,
PacketIdVariableHeader,
)
from hbmqtt.errors import HBMQTTException
class PubcompPacket(MQTTPacket):
VARIABLE_HEADER = PacketIdVariableHeader
PAYLOAD = None
@property
def packet_id(self):
return self.variable_header.packet_id
@packet_id.setter
def packet_id(self, val: int):
self.variable_header.packet_id = val
def __init__(
self,
fixed: MQTTFixedHeader = None,
variable_header: PacketIdVariableHeader = None,
):
if fixed is None:
header = MQTTFixedHeader(PUBCOMP, 0x00)
else:
if fixed.packet_type is not PUBCOMP:
raise HBMQTTException(
"Invalid fixed packet type %s for PubcompPacket init"
% fixed.packet_type
)
header = fixed
super().__init__(header)
self.variable_header = variable_header
self.payload = None
@classmethod
def build(cls, packet_id: int):
v_header = PacketIdVariableHeader(packet_id)
packet = PubcompPacket(variable_header=v_header)
return packet
| 27.104167 | 73 | 0.638739 |
ace517fda0c3fe77202732ac877509a987d80d3d | 2,617 | py | Python | dash_daq/StopButton.py | yordiverbeeck/dash-daq | 7450d92f7c60d42a42f62dbaa0e5053f4432a3a1 | [
"MIT"
] | null | null | null | dash_daq/StopButton.py | yordiverbeeck/dash-daq | 7450d92f7c60d42a42f62dbaa0e5053f4432a3a1 | [
"MIT"
] | null | null | null | dash_daq/StopButton.py | yordiverbeeck/dash-daq | 7450d92f7c60d42a42f62dbaa0e5053f4432a3a1 | [
"MIT"
] | null | null | null | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class StopButton(Component):
"""A StopButton component.
A Stop button component
Keyword arguments:
- children (a list of or a singular dash component, string or number; optional):
The children of the button.
- id (string; optional):
The ID used to identify this compnent in Dash callbacks.
- buttonText (string; default 'Stop'):
Text displayed in the button.
- className (string; optional):
Class to apply to the root component element.
- disabled (boolean; optional):
If True, button cannot be pressesd.
- label (dict; optional):
Description to be displayed alongside the button. To control
styling, pass an object with label and style properties.
`label` is a string | dict with keys:
- label (string; optional)
- style (dict; optional)
- labelPosition (a value equal to: 'top', 'bottom'; default 'top'):
Where the label is positioned.
- n_clicks (number; default 0):
Number of times the button was clicked.
- size (number; default 92):
The size (width) of the stop button in pixels.
- style (dict; optional):
Style to apply to the root component element.
- theme (dict; optional):
Theme configuration to be set by a ThemeProvider."""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, size=Component.UNDEFINED, buttonText=Component.UNDEFINED, n_clicks=Component.UNDEFINED, disabled=Component.UNDEFINED, theme=Component.UNDEFINED, label=Component.UNDEFINED, labelPosition=Component.UNDEFINED, className=Component.UNDEFINED, style=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'buttonText', 'className', 'disabled', 'label', 'labelPosition', 'n_clicks', 'size', 'style', 'theme']
self._type = 'StopButton'
self._namespace = 'dash_daq'
self._valid_wildcard_attributes = []
self.available_properties = ['children', 'id', 'buttonText', 'className', 'disabled', 'label', 'labelPosition', 'n_clicks', 'size', 'style', 'theme']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(StopButton, self).__init__(children=children, **args)
| 38.485294 | 337 | 0.680168 |
ace51891d9e961dc6295cae9430aa962cafd117b | 1,652 | py | Python | venv/Lib/site-packages/StreamDeck_Client/backend/client.py | philliphqs/StreamDeck | 0edc20cc5dd35238c0a6ec8988c92934c3c613b0 | [
"MIT"
] | null | null | null | venv/Lib/site-packages/StreamDeck_Client/backend/client.py | philliphqs/StreamDeck | 0edc20cc5dd35238c0a6ec8988c92934c3c613b0 | [
"MIT"
] | 9 | 2021-08-14T01:27:36.000Z | 2021-08-24T18:10:18.000Z | venv/Lib/site-packages/StreamDeck_Client/backend/client.py | philliphqs/StreamDeck | 0edc20cc5dd35238c0a6ec8988c92934c3c613b0 | [
"MIT"
] | null | null | null | import requests
import json
with open('resources/client.json', 'r') as c:
client = json.load(c)
ip = client['ip'] + ':' + client['port']
def interact(path):
requests.get('http://' + ip + path)
class start:
@staticmethod
def wt():
interact('/start/wt')
@staticmethod
def ds4windows():
interact('/start/ds4windows')
@staticmethod
def bluetooth_settings():
interact('/start/bluetooth_settings')
@staticmethod
def coding_folder():
interact('/start/coding_folder')
@staticmethod
def design_folder():
interact('/start/design_folder')
@staticmethod
def virtualbox():
interact('/start/virtualbox')
@staticmethod
def signal():
interact('/start/signal')
@staticmethod
def whatsapp():
interact('/start/whatsapp')
@staticmethod
def leagueoflegends():
interact('/start/leagueoflegends')
@staticmethod
def kodi():
interact('/start/kodi')
@staticmethod
def netflix():
interact('/start/netflix')
@staticmethod
def reddit():
interact('/start/reddit')
@staticmethod
def _3utools():
interact('/start/3utools')
@staticmethod
def calculator():
interact('/start/calculator')
class discord:
@staticmethod
def mute_mic():
interact('/discord/mute_mic')
@staticmethod
def mute_headset():
interact('/discord/mute_headset')
@staticmethod
def overlay():
interact('/discord/show_overlay_chat')
@staticmethod
def screenshare():
interact('/discord/screenshare') | 19.209302 | 46 | 0.608354 |
ace518a6df7f589252351f1755a5a6cd2f6a7761 | 1,717 | py | Python | tglog/tglog.py | sdurivau25/logfunc | cce962742385c460e49e3c5b7cf43d358149ee25 | [
"MIT"
] | null | null | null | tglog/tglog.py | sdurivau25/logfunc | cce962742385c460e49e3c5b7cf43d358149ee25 | [
"MIT"
] | null | null | null | tglog/tglog.py | sdurivau25/logfunc | cce962742385c460e49e3c5b7cf43d358149ee25 | [
"MIT"
] | null | null | null | #coding:utf-8
import requests
import traceback
import datetime
class logger():
"""Log errors in a log.txt file and receive them by telegram."""
def __init__(self, chat_id:str=None, bot_token:str=None):
self.chat_id = str(chat_id) # the telegram chat_id you want to receive the logs on
self.bot_token = str(bot_token) # the token of the telegram bot which sends you the logs
self.isdebugging = True
def log_func(self, message, disable_notification=False):
try :
with open('log.txt','a') as f:
written = str(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + "\n" + message + "\n"
f.write('{}\n'.format(written))
without_notification = "true" if disable_notification else "false"
send_text = 'https://api.telegram.org/bot' + self.bot_token + '/sendMessage?chat_id=' + self.chat_id + '&text=' + str(message) + "&disable_notification=" + str(without_notification).lower()
requests.get(send_text)
except :
try :
message = "WARNING : Error in sending log by telegram, {}.\n".format(traceback.format_exc()) + message
with open('log.txt','a') as f:
f.write('{}\n'.format(message))
except : pass
def debug(self, message) :
if not self.isdebugging : return None
else :
message = "- DEBUG -\n" + message
self.log_func(message, disable_notification=True)
def info(self, message) :
message = "- INFO -\n" + message
self.log_func(message)
def error(self, message) :
message = "- ERROR -\n" + message
self.log_func(message)
| 40.880952 | 201 | 0.592895 |
ace5198afc5a457dec5b919b03b22bdc81446db8 | 6,043 | py | Python | espnet2/asr/streaming_u2/encoder_layer.py | jinggaizi/test-2 | 155aeddac495ad05088c15f2a0a51a8ce55ed49c | [
"Apache-2.0"
] | null | null | null | espnet2/asr/streaming_u2/encoder_layer.py | jinggaizi/test-2 | 155aeddac495ad05088c15f2a0a51a8ce55ed49c | [
"Apache-2.0"
] | null | null | null | espnet2/asr/streaming_u2/encoder_layer.py | jinggaizi/test-2 | 155aeddac495ad05088c15f2a0a51a8ce55ed49c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Johns Hopkins University (Shinji Watanabe)
# Northwestern Polytechnical University (Pengcheng Guo)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""Encoder self-attention layer definition."""
import torch
from torch import nn
from espnet.nets.pytorch_backend.transformer.layer_norm import LayerNorm
class EncoderLayer(nn.Module):
"""Encoder layer module.
Args:
size (int): Input dimension.
self_attn (torch.nn.Module): Self-attention module instance.
`MultiHeadedAttention` or `RelPositionMultiHeadedAttention` instance
can be used as the argument.
feed_forward (torch.nn.Module): Feed-forward module instance.
`PositionwiseFeedForward`, `MultiLayeredConv1d`, or `Conv1dLinear` instance
can be used as the argument.
feed_forward_macaron (torch.nn.Module): Additional feed-forward module instance.
`PositionwiseFeedForward`, `MultiLayeredConv1d`, or `Conv1dLinear` instance
can be used as the argument.
conv_module (torch.nn.Module): Convolution module instance.
`ConvlutionModule` instance can be used as the argument.
dropout_rate (float): Dropout rate.
normalize_before (bool): Whether to use layer_norm before the first block.
concat_after (bool): Whether to concat attention layer's input and output.
if True, additional linear will be applied.
i.e. x -> x + linear(concat(x, att(x)))
if False, no additional linear will be applied. i.e. x -> x + att(x)
"""
def __init__(
self,
size,
self_attn,
feed_forward,
feed_forward_macaron,
conv_module,
dropout_rate,
normalize_before=True,
concat_after=False,
):
"""Construct an EncoderLayer object."""
super(EncoderLayer, self).__init__()
self.self_attn = self_attn
self.feed_forward = feed_forward
self.feed_forward_macaron = feed_forward_macaron
self.conv_module = conv_module
self.norm_ff = LayerNorm(size) # for the FNN module
self.norm_mha = LayerNorm(size) # for the MHA module
if feed_forward_macaron is not None:
self.norm_ff_macaron = LayerNorm(size)
self.ff_scale = 0.5
else:
self.ff_scale = 1.0
if self.conv_module is not None:
self.norm_conv = LayerNorm(size) # for the CNN module
self.norm_final = LayerNorm(size) # for the final output of the block
self.dropout = nn.Dropout(dropout_rate)
self.size = size
self.normalize_before = normalize_before
self.concat_after = concat_after
if self.concat_after:
self.concat_linear = nn.Linear(size + size, size)
def forward(
self,
x: torch.Tensor,
mask: torch.Tensor,
pos_emb: torch.Tensor,
mask_pad=None,
cache=None,
cnn_cache=None,
decoding_right_frames=0):
"""Compute encoded features.
Args:
x_input (Union[Tuple, torch.Tensor]): Input tensor w/ or w/o pos emb.
- w/ pos emb: Tuple of tensors [(#batch, time, size), (1, time, size)].
- w/o pos emb: Tensor (#batch, time, size).
mask (torch.Tensor): Mask tensor for the input (#batch, time).
cache (torch.Tensor): Cache tensor of the input (#batch, time - 1, size).
Returns:
torch.Tensor: Output tensor (#batch, time, size).
torch.Tensor: Mask tensor (#batch, time).
"""
# whether to use macaron style
if self.feed_forward_macaron is not None:
residual = x
if self.normalize_before:
x = self.norm_ff_macaron(x)
x = residual + self.ff_scale * self.dropout(self.feed_forward_macaron(x))
if not self.normalize_before:
x = self.norm_ff_macaron(x)
# multi-headed self-attention module
residual = x
if self.normalize_before:
x = self.norm_mha(x)
if cache is None:
x_q = x
else:
assert cache.size(0) == x.size(0)
assert cache.size(2) == self.size
assert cache.size(1) < x.size(1)
chunk = x.size(1) - cache.size(1)
x_q = x[:, -chunk:, :]
residual = residual[:, -chunk:, :]
mask = mask[:, -chunk:, :]
x_att = self.self_attn(x_q, x, x, mask, pos_emb)
if self.concat_after:
x_concat = torch.cat((x, x_att), dim=-1)
x = residual + self.concat_linear(x_concat)
else:
x = residual + self.dropout(x_att)
if not self.normalize_before:
x = self.norm_mha(x)
# convolution module
new_cnn_cache = torch.tensor([0.0], dtype=x.dtype, device=x.device)
if self.conv_module is not None:
residual = x
if self.normalize_before:
x = self.norm_conv(x)
x, new_cnn_cache = self.conv_module(x, mask_pad, cnn_cache, decoding_right_frames)
x = residual + self.dropout(x)
if not self.normalize_before:
x = self.norm_conv(x)
# feed forward module
residual = x
if self.normalize_before:
x = self.norm_ff(x)
x = residual + self.ff_scale * self.dropout(self.feed_forward(x))
if not self.normalize_before:
x = self.norm_ff(x)
if self.conv_module is not None:
x = self.norm_final(x)
if cache is not None:
x = torch.cat([cache, x], dim=1)
return x, mask, new_cnn_cache
| 37.76875 | 95 | 0.570908 |
ace519c2f77713b9cf8cd62932fd40178d035af7 | 7,991 | py | Python | mayan/apps/converter/views.py | prezi/mayan-edms | e9bc10a056c3379b57115c6e83022f48c6298e1d | [
"Apache-2.0"
] | 4 | 2019-02-17T08:35:42.000Z | 2019-03-28T06:02:11.000Z | mayan/apps/converter/views.py | zhoubear/mayan-edms | e9bc10a056c3379b57115c6e83022f48c6298e1d | [
"Apache-2.0"
] | 1 | 2018-10-11T13:01:34.000Z | 2018-10-11T13:01:34.000Z | mayan/apps/converter/views.py | prezi/mayan-edms | e9bc10a056c3379b57115c6e83022f48c6298e1d | [
"Apache-2.0"
] | 3 | 2019-01-29T13:21:57.000Z | 2019-10-27T03:20:15.000Z | from __future__ import absolute_import, unicode_literals
import logging
from django.contrib.contenttypes.models import ContentType
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from acls.models import AccessControlList
from common.views import (
SingleObjectCreateView, SingleObjectDeleteView, SingleObjectEditView,
SingleObjectListView
)
from .icons import icon_transformation
from .links import link_transformation_create
from .models import Transformation
from .permissions import (
permission_transformation_create, permission_transformation_delete,
permission_transformation_edit, permission_transformation_view
)
logger = logging.getLogger(__name__)
class TransformationDeleteView(SingleObjectDeleteView):
model = Transformation
def dispatch(self, request, *args, **kwargs):
self.transformation = get_object_or_404(
Transformation, pk=self.kwargs['pk']
)
AccessControlList.objects.check_access(
permissions=permission_transformation_delete, user=request.user,
obj=self.transformation.content_object
)
return super(TransformationDeleteView, self).dispatch(
request, *args, **kwargs
)
def get_post_action_redirect(self):
return reverse(
'converter:transformation_list', args=(
self.transformation.content_type.app_label,
self.transformation.content_type.model,
self.transformation.object_id
)
)
def get_extra_context(self):
return {
'content_object': self.transformation.content_object,
'navigation_object_list': ('content_object', 'transformation'),
'previous': reverse(
'converter:transformation_list', args=(
self.transformation.content_type.app_label,
self.transformation.content_type.model,
self.transformation.object_id
)
),
'title': _(
'Delete transformation "%(transformation)s" for: '
'%(content_object)s?'
) % {
'transformation': self.transformation,
'content_object': self.transformation.content_object
},
'transformation': self.transformation,
}
class TransformationCreateView(SingleObjectCreateView):
fields = ('name', 'arguments')
def dispatch(self, request, *args, **kwargs):
content_type = get_object_or_404(
ContentType, app_label=self.kwargs['app_label'],
model=self.kwargs['model']
)
try:
self.content_object = content_type.get_object_for_this_type(
pk=self.kwargs['object_id']
)
except content_type.model_class().DoesNotExist:
raise Http404
AccessControlList.objects.check_access(
permissions=permission_transformation_create, user=request.user,
obj=self.content_object
)
return super(TransformationCreateView, self).dispatch(
request, *args, **kwargs
)
def form_valid(self, form):
instance = form.save(commit=False)
instance.content_object = self.content_object
try:
instance.full_clean()
instance.save()
except Exception as exception:
logger.debug('Invalid form, exception: %s', exception)
return super(TransformationCreateView, self).form_invalid(form)
else:
return super(TransformationCreateView, self).form_valid(form)
def get_extra_context(self):
return {
'content_object': self.content_object,
'navigation_object_list': ('content_object',),
'title': _(
'Create new transformation for: %s'
) % self.content_object,
}
def get_post_action_redirect(self):
return reverse(
'converter:transformation_list', args=(
self.kwargs['app_label'], self.kwargs['model'],
self.kwargs['object_id']
)
)
def get_queryset(self):
return Transformation.objects.get_for_model(self.content_object)
class TransformationEditView(SingleObjectEditView):
fields = ('name', 'arguments', 'order')
model = Transformation
def dispatch(self, request, *args, **kwargs):
self.transformation = get_object_or_404(
Transformation, pk=self.kwargs['pk']
)
AccessControlList.objects.check_access(
permissions=permission_transformation_edit, user=request.user,
obj=self.transformation.content_object
)
return super(TransformationEditView, self).dispatch(
request, *args, **kwargs
)
def form_valid(self, form):
instance = form.save(commit=False)
try:
instance.full_clean()
instance.save()
except Exception as exception:
logger.debug('Invalid form, exception: %s', exception)
return super(TransformationEditView, self).form_invalid(form)
else:
return super(TransformationEditView, self).form_valid(form)
def get_extra_context(self):
return {
'content_object': self.transformation.content_object,
'navigation_object_list': ('content_object', 'transformation'),
'title': _(
'Edit transformation "%(transformation)s" for: %(content_object)s'
) % {
'transformation': self.transformation,
'content_object': self.transformation.content_object
},
'transformation': self.transformation,
}
def get_post_action_redirect(self):
return reverse(
'converter:transformation_list', args=(
self.transformation.content_type.app_label,
self.transformation.content_type.model,
self.transformation.object_id
)
)
class TransformationListView(SingleObjectListView):
def dispatch(self, request, *args, **kwargs):
content_type = get_object_or_404(
ContentType, app_label=self.kwargs['app_label'],
model=self.kwargs['model']
)
try:
self.content_object = content_type.get_object_for_this_type(
pk=self.kwargs['object_id']
)
except content_type.model_class().DoesNotExist:
raise Http404
AccessControlList.objects.check_access(
permissions=permission_transformation_view, user=request.user,
obj=self.content_object
)
return super(TransformationListView, self).dispatch(
request, *args, **kwargs
)
def get_extra_context(self):
return {
'content_object': self.content_object,
'hide_link': True,
'hide_object': True,
'navigation_object_list': ('content_object',),
'no_results_icon': icon_transformation,
'no_results_main_link': link_transformation_create.resolve(
context=RequestContext(
self.request, {'content_object': self.content_object}
)
),
'no_results_text': _(
'Transformations allow changing the visual appearance '
'of documents without making permanent changes to the '
'document file themselves.'
),
'no_results_title': _('No transformations'),
'title': _('Transformations for: %s') % self.content_object,
}
def get_object_list(self):
return Transformation.objects.get_for_model(self.content_object)
| 34.296137 | 82 | 0.624453 |
ace51a5a50c1dec06c42aba2a767dd9cf5dc3e1c | 1,413 | py | Python | frontends/pytorch/test/acap_regression/test_jit_lenet_fwd.py | marbre/mlir-npcomp | 30adf9e6b0c1e94db38050a9e143f20a5a461d17 | [
"Apache-2.0"
] | null | null | null | frontends/pytorch/test/acap_regression/test_jit_lenet_fwd.py | marbre/mlir-npcomp | 30adf9e6b0c1e94db38050a9e143f20a5a461d17 | [
"Apache-2.0"
] | null | null | null | frontends/pytorch/test/acap_regression/test_jit_lenet_fwd.py | marbre/mlir-npcomp | 30adf9e6b0c1e94db38050a9e143f20a5a461d17 | [
"Apache-2.0"
] | null | null | null | # -*- Python -*-
# This file is licensed under a pytorch-style license
# See frontends/pytorch/LICENSE for license information.
from __future__ import print_function
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
import npcomp.frontends.pytorch as torch_mlir
import npcomp.frontends.pytorch.test as test
# RUN: %PYTHON %s | FileCheck %s
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 32, 3, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1)
self.maxpool2d = nn.MaxPool2d(2,2)
#self.dropout1 = nn.Dropout2d(0.25)
#self.dropout2 = nn.Dropout2d(0.5)
self.fc1 = nn.Linear(9216, 128)
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = self.maxpool2d(x)
#x = self.dropout1(x)
x = x.view((4,9216))
x = self.fc1(x)
x = F.relu(x)
#x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
def main():
model = Net()
tensor = torch.randn((4, 1, 28, 28))
# CHECK: PASS! fwd check
fwd_path = test.check_fwd(model, tensor)
if __name__ == '__main__':
main()
| 26.166667 | 56 | 0.620665 |
ace51abf655d5e7b85303ff381569c0ce1705641 | 16,851 | py | Python | core/domain/question_domain.py | steve7158/oppia | e2cae72fa5d3503c64d195f09d3460507697730c | [
"Apache-2.0"
] | null | null | null | core/domain/question_domain.py | steve7158/oppia | e2cae72fa5d3503c64d195f09d3460507697730c | [
"Apache-2.0"
] | 5 | 2018-06-09T02:05:45.000Z | 2018-09-20T13:53:42.000Z | core/domain/question_domain.py | steve7158/oppia | e2cae72fa5d3503c64d195f09d3460507697730c | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domain objects relating to questions."""
from constants import constants
from core.domain import html_cleaner
from core.domain import interaction_registry
from core.domain import state_domain
from core.platform import models
import feconf
import utils
(question_models,) = models.Registry.import_models([models.NAMES.question])
# Do not modify the values of these constants. This is to preserve backwards
# compatibility with previous change dicts.
QUESTION_PROPERTY_LANGUAGE_CODE = 'language_code'
QUESTION_PROPERTY_QUESTION_STATE_DATA = 'question_state_data'
# This takes additional 'property_name' and 'new_value' parameters and,
# optionally, 'old_value'.
CMD_UPDATE_QUESTION_PROPERTY = 'update_question_property'
CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION = 'create_new_fully_specified_question'
CMD_MIGRATE_STATE_SCHEMA_TO_LATEST_VERSION = (
'migrate_state_schema_to_latest_version')
# The following commands are deprecated, as these functionalities will be
# handled by a QuestionSkillLink class in the future.
CMD_ADD_QUESTION_SKILL = 'add_question_skill'
CMD_REMOVE_QUESTION_SKILL = 'remove_question_skill'
CMD_CREATE_NEW = 'create_new'
class QuestionChange(object):
"""Domain object for changes made to question object."""
QUESTION_PROPERTIES = (
QUESTION_PROPERTY_QUESTION_STATE_DATA,
QUESTION_PROPERTY_LANGUAGE_CODE)
OPTIONAL_CMD_ATTRIBUTE_NAMES = [
'property_name', 'new_value', 'old_value', 'question_dict',
'skill_id', 'from_version', 'to_version'
]
def __init__(self, change_dict):
"""Initialize a QuestionChange object from a dict.
Args:
change_dict: dict. Represents a command. It should have a 'cmd'
key, and one or more other keys. The keys depend on what the
value for 'cmd' is. The possible values for 'cmd' are listed
below, together with the other keys in the dict:
- 'update question property' (with property_name, new_value
and old_value)
- 'create_new_fully_specified_question' (with question_dict,
skill_id)
- 'migrate_state_schema_to_latest_version' (with from_version
and to_version)
Raises:
Exception: The given change dict is not valid.
"""
if 'cmd' not in change_dict:
raise Exception('Invalid change_dict: %s' % change_dict)
self.cmd = change_dict['cmd']
if self.cmd == CMD_UPDATE_QUESTION_PROPERTY:
if (change_dict['property_name'] in
self.QUESTION_PROPERTIES):
self.property_name = change_dict['property_name']
self.new_value = change_dict['new_value']
self.old_value = change_dict['old_value']
else:
raise Exception('Invalid change_dict: %s' % change_dict)
elif self.cmd == CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION:
self.question_dict = change_dict['question_dict']
# Note that change_dict['skill_id'] may be None if this change is
# being done in the context of a suggestion.
self.skill_id = change_dict['skill_id']
elif self.cmd == CMD_MIGRATE_STATE_SCHEMA_TO_LATEST_VERSION:
self.from_version = change_dict['from_version']
self.to_version = change_dict['to_version']
else:
raise Exception('Invalid change_dict: %s' % change_dict)
def to_dict(self):
"""Returns a dict representing the QuestionChange domain object.
Returns:
A dict, mapping all fields of QuestionChange instance.
"""
question_change_dict = {}
question_change_dict['cmd'] = self.cmd
for attribute_name in self.OPTIONAL_CMD_ATTRIBUTE_NAMES:
if hasattr(self, attribute_name):
question_change_dict[attribute_name] = getattr(
self, attribute_name)
return question_change_dict
class Question(object):
"""Domain object for a question."""
def __init__(
self, question_id, question_state_data,
question_state_data_schema_version, language_code, version,
created_on=None, last_updated=None):
"""Constructs a Question domain object.
Args:
question_id: str. The unique ID of the question.
question_state_data: State. An object representing the question
state data.
question_state_data_schema_version: int. The schema version of the
question states (equivalent to the states schema version of
explorations).
language_code: str. The ISO 639-1 code for the language this
question is written in.
version: int. The version of the question.
created_on: datetime.datetime. Date and time when the question was
created.
last_updated: datetime.datetime. Date and time when the
question was last updated.
"""
self.id = question_id
self.question_state_data = question_state_data
self.language_code = language_code
self.question_state_data_schema_version = (
question_state_data_schema_version)
self.version = version
self.created_on = created_on
self.last_updated = last_updated
def to_dict(self):
"""Returns a dict representing this Question domain object.
Returns:
dict. A dict representation of the Question instance.
"""
return {
'id': self.id,
'question_state_data': self.question_state_data.to_dict(),
'question_state_data_schema_version': (
self.question_state_data_schema_version),
'language_code': self.language_code,
'version': self.version
}
@classmethod
def create_default_question_state(cls):
"""Return a State domain object with default value for being used as
question state data.
Returns:
State. The corresponding State domain object.
"""
return state_domain.State.create_default_state(
None, is_initial_state=True)
@classmethod
def update_state_from_model(
cls, versioned_question_state, current_state_schema_version):
"""Converts the state object contained in the given
versioned_question_state dict from current_state_schema_version to
current_state_schema_version + 1.
Note that the versioned_question_state being passed in is modified
in-place.
Args:
versioned_question_state: dict. A dict with two keys:
- state_schema_version: int. The state schema version for the
question.
- state: The State domain object representing the question
state data.
current_state_schema_version: int. The current state
schema version.
"""
versioned_question_state['state_schema_version'] = (
current_state_schema_version + 1)
conversion_fn = getattr(cls, '_convert_state_v%s_dict_to_v%s_dict' % (
current_state_schema_version, current_state_schema_version + 1))
versioned_question_state['state'] = conversion_fn(
versioned_question_state['state'])
def partial_validate(self):
"""Validates the Question domain object, but doesn't require the
object to contain an ID and a version. To be used to validate the
question before it is finalized.
"""
if not isinstance(self.language_code, basestring):
raise utils.ValidationError(
'Expected language_code to be a string, received %s' %
self.language_code)
if not isinstance(self.question_state_data_schema_version, int):
raise utils.ValidationError(
'Expected schema version to be an integer, received %s' %
self.question_state_data_schema_version)
if not isinstance(self.question_state_data, state_domain.State):
raise utils.ValidationError(
'Expected question state data to be a State object, '
'received %s' % self.question_state_data)
if not utils.is_valid_language_code(self.language_code):
raise utils.ValidationError(
'Invalid language code: %s' % self.language_code)
interaction_specs = interaction_registry.Registry.get_all_specs()
at_least_one_correct_answer = False
dest_is_specified = False
interaction = self.question_state_data.interaction
for answer_group in interaction.answer_groups:
if answer_group.outcome.labelled_as_correct:
at_least_one_correct_answer = True
if answer_group.outcome.dest is not None:
dest_is_specified = True
if interaction.default_outcome.labelled_as_correct:
at_least_one_correct_answer = True
if interaction.default_outcome.dest is not None:
dest_is_specified = True
if not at_least_one_correct_answer:
raise utils.ValidationError(
'Expected at least one answer group to have a correct ' +
'answer.'
)
if dest_is_specified:
raise utils.ValidationError(
'Expected all answer groups to have destination as None.'
)
if not interaction.hints:
raise utils.ValidationError(
'Expected the question to have at least one hint')
if (
(interaction.solution is None) and
(interaction_specs[interaction.id]['can_have_solution'])):
raise utils.ValidationError(
'Expected the question to have a solution'
)
self.question_state_data.validate({}, False)
def validate(self):
"""Validates the Question domain object before it is saved."""
if not isinstance(self.id, basestring):
raise utils.ValidationError(
'Expected ID to be a string, received %s' % self.id)
if not isinstance(self.version, int):
raise utils.ValidationError(
'Expected version to be an integer, received %s' %
self.version)
self.partial_validate()
@classmethod
def from_dict(cls, question_dict):
"""Returns a Question domain object from dict.
Returns:
Question. The corresponding Question domain object.
"""
question = cls(
question_dict['id'],
state_domain.State.from_dict(question_dict['question_state_data']),
question_dict['question_state_data_schema_version'],
question_dict['language_code'], question_dict['version'])
return question
@classmethod
def create_default_question(cls, question_id):
"""Returns a Question domain object with default values.
Args:
question_id: str. The unique ID of the question.
Returns:
Question. A Question domain object with default values.
"""
default_question_state_data = cls.create_default_question_state()
return cls(
question_id, default_question_state_data,
feconf.CURRENT_STATES_SCHEMA_VERSION,
constants.DEFAULT_LANGUAGE_CODE, 0)
def update_language_code(self, language_code):
"""Updates the language code of the question.
Args:
language_code: str. The ISO 639-1 code for the language this
question is written in.
"""
self.language_code = language_code
def update_question_state_data(self, question_state_data_dict):
"""Updates the question data of the question.
Args:
question_state_data_dict: dict. A dict representing the question
state data.
"""
self.question_state_data = state_domain.State.from_dict(
question_state_data_dict)
class QuestionSummary(object):
"""Domain object for Question Summary."""
def __init__(
self, creator_id, question_id, question_content,
question_model_created_on=None, question_model_last_updated=None):
"""Constructs a Question Summary domain object.
Args:
creator_id: str. The user ID of the creator of the question.
question_id: str. The ID of the question.
question_content: str. The static HTML of the question shown to
the learner.
question_model_created_on: datetime.datetime. Date and time when
the question model is created.
question_model_last_updated: datetime.datetime. Date and time
when the question model was last updated.
"""
self.id = question_id
self.creator_id = creator_id
self.question_content = html_cleaner.clean(question_content)
self.created_on = question_model_created_on
self.last_updated = question_model_last_updated
def to_dict(self):
"""Returns a dictionary representation of this domain object.
Returns:
dict. A dict representing this QuestionSummary object.
"""
return {
'id': self.id,
'creator_id': self.creator_id,
'question_content': self.question_content,
'last_updated_msec': utils.get_time_in_millisecs(self.last_updated),
'created_on_msec': utils.get_time_in_millisecs(self.created_on)
}
class QuestionSkillLink(object):
"""Domain object for Question Skill Link.
Attributes:
question_id: str. The ID of the question.
skill_id: str. The ID of the skill to which the
question is linked.
skill_description: str. The description of the corresponding skill.
"""
def __init__(
self, question_id, skill_id, skill_description, skill_difficulty):
"""Constructs a Question Skill Link domain object.
Args:
question_id: str. The ID of the question.
skill_id: str. The ID of the skill to which the question is linked.
skill_description: str. The description of the corresponding skill.
skill_difficulty: float. The difficulty between [0, 1] of the skill.
"""
self.question_id = question_id
self.skill_id = skill_id
self.skill_description = skill_description
self.skill_difficulty = skill_difficulty
def to_dict(self):
"""Returns a dictionary representation of this domain object.
Returns:
dict. A dict representing this QuestionSkillLink object.
"""
return {
'question_id': self.question_id,
'skill_id': self.skill_id,
'skill_description': self.skill_description,
'skill_difficulty': self.skill_difficulty,
}
class QuestionRights(object):
"""Domain object for question rights."""
def __init__(self, question_id, creator_id):
"""Constructs a QuestionRights domain object.
Args:
question_id: str. The id of the question.
creator_id: str. The id of the user who has initially created
the question.
"""
self.id = question_id
self.creator_id = creator_id
def to_dict(self):
"""Returns a dict suitable for use by the frontend.
Returns:
dict. A dict representation of QuestionRights suitable for use
by the frontend.
"""
return {
'question_id': self.id,
'creator_id': self.creator_id
}
def is_creator(self, user_id):
"""Checks whether given user is a creator of the question.
Args:
user_id: str or None. ID of the user.
Returns:
bool. Whether the user is creator of this question.
"""
return bool(user_id == self.creator_id)
| 37.782511 | 80 | 0.645837 |
ace51bbda22cdcbce92c7bb2ab31f6cbe968e301 | 2,252 | py | Python | aliyun-python-sdk-alidns/aliyunsdkalidns/request/v20150109/UpdateGtmRecoveryPlanRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-alidns/aliyunsdkalidns/request/v20150109/UpdateGtmRecoveryPlanRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-alidns/aliyunsdkalidns/request/v20150109/UpdateGtmRecoveryPlanRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkalidns.endpoint import endpoint_data
class UpdateGtmRecoveryPlanRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Alidns', '2015-01-09', 'UpdateGtmRecoveryPlan','alidns')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_FaultAddrPool(self):
return self.get_query_params().get('FaultAddrPool')
def set_FaultAddrPool(self,FaultAddrPool):
self.add_query_param('FaultAddrPool',FaultAddrPool)
def get_Remark(self):
return self.get_query_params().get('Remark')
def set_Remark(self,Remark):
self.add_query_param('Remark',Remark)
def get_UserClientIp(self):
return self.get_query_params().get('UserClientIp')
def set_UserClientIp(self,UserClientIp):
self.add_query_param('UserClientIp',UserClientIp)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_RecoveryPlanId(self):
return self.get_query_params().get('RecoveryPlanId')
def set_RecoveryPlanId(self,RecoveryPlanId):
self.add_query_param('RecoveryPlanId',RecoveryPlanId)
def get_Lang(self):
return self.get_query_params().get('Lang')
def set_Lang(self,Lang):
self.add_query_param('Lang',Lang) | 33.61194 | 85 | 0.762433 |
ace51be6b52e336ff502b232fd8509e8641605e3 | 31,836 | py | Python | numba/cuda/tests/cudapy/test_atomics.py | marceloFA/numba | 626b40eddc9a29a1e89bfb6b8706e4f390cfcc4a | [
"BSD-2-Clause"
] | null | null | null | numba/cuda/tests/cudapy/test_atomics.py | marceloFA/numba | 626b40eddc9a29a1e89bfb6b8706e4f390cfcc4a | [
"BSD-2-Clause"
] | null | null | null | numba/cuda/tests/cudapy/test_atomics.py | marceloFA/numba | 626b40eddc9a29a1e89bfb6b8706e4f390cfcc4a | [
"BSD-2-Clause"
] | null | null | null | import random
import numpy as np
from textwrap import dedent
from numba import cuda, uint32, uint64, float32, float64
from numba.cuda.testing import unittest, CUDATestCase
from numba.core import config
def cc_X_or_above(major, minor):
if not config.ENABLE_CUDASIM:
return cuda.current_context().device.compute_capability >= (major, minor)
else:
return True
def skip_unless_cc_32(fn):
return unittest.skipUnless(cc_X_or_above(3, 2), "require cc >= 3.2")(fn)
def skip_unless_cc_50(fn):
return unittest.skipUnless(cc_X_or_above(5, 0), "require cc >= 5.0")(fn)
def atomic_add(ary):
tid = cuda.threadIdx.x
sm = cuda.shared.array(32, uint32)
sm[tid] = 0
cuda.syncthreads()
bin = ary[tid] % 32
cuda.atomic.add(sm, bin, 1)
cuda.syncthreads()
ary[tid] = sm[tid]
def atomic_add2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), uint32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.add(sm, (tx, ty), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_add3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), uint32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.add(sm, (tx, uint64(ty)), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_add_float(ary):
tid = cuda.threadIdx.x
sm = cuda.shared.array(32, float32)
sm[tid] = 0
cuda.syncthreads()
bin = int(ary[tid] % 32)
cuda.atomic.add(sm, bin, 1.0)
cuda.syncthreads()
ary[tid] = sm[tid]
def atomic_add_float_2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.add(sm, (tx, ty), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_add_float_3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.add(sm, (tx, uint64(ty)), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_add_double_global(idx, ary):
tid = cuda.threadIdx.x
bin = idx[tid] % 32
cuda.atomic.add(ary, bin, 1.0)
def atomic_add_double_global_2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
cuda.atomic.add(ary, (tx, ty), 1)
def atomic_add_double_global_3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
cuda.atomic.add(ary, (tx, uint64(ty)), 1)
def atomic_add_double(idx, ary):
tid = cuda.threadIdx.x
sm = cuda.shared.array(32, float64)
sm[tid] = 0.0
cuda.syncthreads()
bin = idx[tid] % 32
cuda.atomic.add(sm, bin, 1.0)
cuda.syncthreads()
ary[tid] = sm[tid]
def atomic_add_double_2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float64)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.add(sm, (tx, ty), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_add_double_3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float64)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.add(sm, (tx, uint64(ty)), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub(ary):
tid = cuda.threadIdx.x
sm = cuda.shared.array(32, uint32)
sm[tid] = 0
cuda.syncthreads()
bin = ary[tid] % 32
cuda.atomic.sub(sm, bin, 1)
cuda.syncthreads()
ary[tid] = sm[tid]
def atomic_sub2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), uint32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.sub(sm, (tx, ty), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), uint32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.sub(sm, (tx, uint64(ty)), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub_float(ary):
tid = cuda.threadIdx.x
sm = cuda.shared.array(32, float32)
sm[tid] = 0
cuda.syncthreads()
bin = int(ary[tid] % 32)
cuda.atomic.sub(sm, bin, 1.0)
cuda.syncthreads()
ary[tid] = sm[tid]
def atomic_sub_float_2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.sub(sm, (tx, ty), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub_float_3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float32)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.sub(sm, (tx, uint64(ty)), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub_double(idx, ary):
tid = cuda.threadIdx.x
sm = cuda.shared.array(32, float64)
sm[tid] = 0.0
cuda.syncthreads()
bin = idx[tid] % 32
cuda.atomic.sub(sm, bin, 1.0)
cuda.syncthreads()
ary[tid] = sm[tid]
def atomic_sub_double_2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float64)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.sub(sm, (tx, ty), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub_double_3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
sm = cuda.shared.array((4, 8), float64)
sm[tx, ty] = ary[tx, ty]
cuda.syncthreads()
cuda.atomic.sub(sm, (tx, uint64(ty)), 1)
cuda.syncthreads()
ary[tx, ty] = sm[tx, ty]
def atomic_sub_double_global(idx, ary):
tid = cuda.threadIdx.x
bin = idx[tid] % 32
cuda.atomic.sub(ary, bin, 1.0)
def atomic_sub_double_global_2(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
cuda.atomic.sub(ary, (tx, ty), 1)
def atomic_sub_double_global_3(ary):
tx = cuda.threadIdx.x
ty = cuda.threadIdx.y
cuda.atomic.sub(ary, (tx, uint64(ty)), 1)
def gen_atomic_extreme_funcs(func):
fns = dedent(
"""
def atomic(res, ary):
tx = cuda.threadIdx.x
bx = cuda.blockIdx.x
{func}(res, 0, ary[tx, bx])
def atomic_double_normalizedindex(res, ary):
tx = cuda.threadIdx.x
bx = cuda.blockIdx.x
{func}(res, 0, ary[tx, uint64(bx)])
def atomic_double_oneindex(res, ary):
tx = cuda.threadIdx.x
{func}(res, 0, ary[tx])
def atomic_double_shared(res, ary):
tid = cuda.threadIdx.x
smary = cuda.shared.array(32, float64)
smary[tid] = ary[tid]
smres = cuda.shared.array(1, float64)
if tid == 0:
smres[0] = res[0]
cuda.syncthreads()
{func}(smres, 0, smary[tid])
cuda.syncthreads()
if tid == 0:
res[0] = smres[0]
""").format(func=func)
ld = {}
exec(fns, {'cuda': cuda, 'float64': float64, 'uint64': uint64}, ld)
return (ld['atomic'], ld['atomic_double_normalizedindex'],
ld['atomic_double_oneindex'], ld['atomic_double_shared'])
(atomic_max, atomic_max_double_normalizedindex, atomic_max_double_oneindex,
atomic_max_double_shared) = gen_atomic_extreme_funcs('cuda.atomic.max')
(atomic_min, atomic_min_double_normalizedindex, atomic_min_double_oneindex,
atomic_min_double_shared) = gen_atomic_extreme_funcs('cuda.atomic.min')
(atomic_nanmax, atomic_nanmax_double_normalizedindex,
atomic_nanmax_double_oneindex, atomic_nanmax_double_shared) = \
gen_atomic_extreme_funcs('cuda.atomic.nanmax')
(atomic_nanmin, atomic_nanmin_double_normalizedindex,
atomic_nanmin_double_oneindex, atomic_nanmin_double_shared) = \
gen_atomic_extreme_funcs('cuda.atomic.nanmin')
def atomic_compare_and_swap(res, old, ary):
gid = cuda.grid(1)
if gid < res.size:
out = cuda.atomic.compare_and_swap(res[gid:], -99, ary[gid])
old[gid] = out
class TestCudaAtomics(CUDATestCase):
def test_atomic_add(self):
ary = np.random.randint(0, 32, size=32).astype(np.uint32)
orig = ary.copy()
cuda_atomic_add = cuda.jit('void(uint32[:])')(atomic_add)
cuda_atomic_add[1, 32](ary)
gold = np.zeros(32, dtype=np.uint32)
for i in range(orig.size):
gold[orig[i]] += 1
self.assertTrue(np.all(ary == gold))
def test_atomic_add2(self):
ary = np.random.randint(0, 32, size=32).astype(np.uint32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_add2 = cuda.jit('void(uint32[:,:])')(atomic_add2)
cuda_atomic_add2[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig + 1))
def test_atomic_add3(self):
ary = np.random.randint(0, 32, size=32).astype(np.uint32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_add3 = cuda.jit('void(uint32[:,:])')(atomic_add3)
cuda_atomic_add3[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig + 1))
def test_atomic_add_float(self):
ary = np.random.randint(0, 32, size=32).astype(np.float32)
orig = ary.copy().astype(np.intp)
cuda_atomic_add_float = cuda.jit('void(float32[:])')(atomic_add_float)
cuda_atomic_add_float[1, 32](ary)
gold = np.zeros(32, dtype=np.uint32)
for i in range(orig.size):
gold[orig[i]] += 1.0
self.assertTrue(np.all(ary == gold))
def test_atomic_add_float_2(self):
ary = np.random.randint(0, 32, size=32).astype(np.float32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_add2 = cuda.jit('void(float32[:,:])')(atomic_add_float_2)
cuda_atomic_add2[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig + 1))
def test_atomic_add_float_3(self):
ary = np.random.randint(0, 32, size=32).astype(np.float32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_add3 = cuda.jit('void(float32[:,:])')(atomic_add_float_3)
cuda_atomic_add3[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig + 1))
def assertCorrectFloat64Atomics(self, kernel, shared=True):
if config.ENABLE_CUDASIM:
return
asm = kernel.inspect_asm()
if cc_X_or_above(6, 0):
if shared:
self.assertIn('atom.shared.add.f64', asm)
else:
self.assertIn('atom.add.f64', asm)
else:
if shared:
self.assertIn('atom.shared.cas.b64', asm)
else:
self.assertIn('atom.cas.b64', asm)
@skip_unless_cc_50
def test_atomic_add_double(self):
idx = np.random.randint(0, 32, size=32, dtype=np.int64)
ary = np.zeros(32, np.float64)
cuda_func = cuda.jit('void(int64[:], float64[:])')(atomic_add_double)
cuda_func[1, 32](idx, ary)
gold = np.zeros(32, dtype=np.uint32)
for i in range(idx.size):
gold[idx[i]] += 1.0
np.testing.assert_equal(ary, gold)
self.assertCorrectFloat64Atomics(cuda_func)
def test_atomic_add_double_2(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_add_double_2)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig + 1)
self.assertCorrectFloat64Atomics(cuda_func)
def test_atomic_add_double_3(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_add_double_3)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig + 1)
self.assertCorrectFloat64Atomics(cuda_func)
@skip_unless_cc_50
def test_atomic_add_double_global(self):
idx = np.random.randint(0, 32, size=32, dtype=np.int64)
ary = np.zeros(32, np.float64)
cuda_func = cuda.jit('void(int64[:], float64[:])')(atomic_add_double_global)
cuda_func[1, 32](idx, ary)
gold = np.zeros(32, dtype=np.uint32)
for i in range(idx.size):
gold[idx[i]] += 1.0
np.testing.assert_equal(ary, gold)
self.assertCorrectFloat64Atomics(cuda_func, shared=False)
def test_atomic_add_double_global_2(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_add_double_global_2)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig + 1)
self.assertCorrectFloat64Atomics(cuda_func, shared=False)
def test_atomic_add_double_global_3(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_add_double_global_3)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig + 1)
self.assertCorrectFloat64Atomics(cuda_func, shared=False)
def test_atomic_sub(self):
ary = np.random.randint(0, 32, size=32).astype(np.uint32)
orig = ary.copy()
cuda_atomic_sub = cuda.jit('void(uint32[:])')(atomic_sub)
cuda_atomic_sub[1, 32](ary)
gold = np.zeros(32, dtype=np.uint32)
for i in range(orig.size):
gold[orig[i]] -= 1
self.assertTrue(np.all(ary == gold))
def test_atomic_sub2(self):
ary = np.random.randint(0, 32, size=32).astype(np.uint32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_sub2 = cuda.jit('void(uint32[:,:])')(atomic_sub2)
cuda_atomic_sub2[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig - 1))
def test_atomic_sub3(self):
ary = np.random.randint(0, 32, size=32).astype(np.uint32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_sub3 = cuda.jit('void(uint32[:,:])')(atomic_sub3)
cuda_atomic_sub3[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig - 1))
def test_atomic_sub_float(self):
ary = np.random.randint(0, 32, size=32).astype(np.float32)
orig = ary.copy().astype(np.intp)
cuda_atomic_sub_float = cuda.jit('void(float32[:])')(atomic_sub_float)
cuda_atomic_sub_float[1, 32](ary)
gold = np.zeros(32, dtype=np.float32)
for i in range(orig.size):
gold[orig[i]] -= 1.0
self.assertTrue(np.all(ary == gold))
def test_atomic_sub_float_2(self):
ary = np.random.randint(0, 32, size=32).astype(np.float32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_sub2 = cuda.jit('void(float32[:,:])')(atomic_sub_float_2)
cuda_atomic_sub2[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig - 1))
def test_atomic_sub_float_3(self):
ary = np.random.randint(0, 32, size=32).astype(np.float32).reshape(4, 8)
orig = ary.copy()
cuda_atomic_sub3 = cuda.jit('void(float32[:,:])')(atomic_sub_float_3)
cuda_atomic_sub3[1, (4, 8)](ary)
self.assertTrue(np.all(ary == orig - 1))
def test_atomic_sub_double(self):
idx = np.random.randint(0, 32, size=32, dtype=np.int64)
ary = np.zeros(32, np.float64)
cuda_func = cuda.jit('void(int64[:], float64[:])')(atomic_sub_double)
cuda_func[1, 32](idx, ary)
gold = np.zeros(32, dtype=np.float64)
for i in range(idx.size):
gold[idx[i]] -= 1.0
np.testing.assert_equal(ary, gold)
def test_atomic_sub_double_2(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_sub_double_2)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig - 1)
def test_atomic_sub_double_3(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_sub_double_3)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig - 1)
def test_atomic_sub_double_global(self):
idx = np.random.randint(0, 32, size=32, dtype=np.int64)
ary = np.zeros(32, np.float64)
cuda_func = cuda.jit('void(int64[:], float64[:])')(atomic_sub_double_global)
cuda_func[1, 32](idx, ary)
gold = np.zeros(32, dtype=np.float64)
for i in range(idx.size):
gold[idx[i]] -= 1.0
np.testing.assert_equal(ary, gold)
def test_atomic_sub_double_global_2(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_sub_double_global_2)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig - 1)
def test_atomic_sub_double_global_3(self):
ary = np.random.randint(0, 32, size=32).astype(np.float64).reshape(4, 8)
orig = ary.copy()
cuda_func = cuda.jit('void(float64[:,:])')(atomic_sub_double_global_3)
cuda_func[1, (4, 8)](ary)
np.testing.assert_equal(ary, orig - 1)
def check_atomic_max(self, dtype, lo, hi):
vals = np.random.randint(lo, hi, size=(32, 32)).astype(dtype)
res = np.zeros(1, dtype=vals.dtype)
cuda_func = cuda.jit(atomic_max)
cuda_func[32, 32](res, vals)
gold = np.max(vals)
np.testing.assert_equal(res, gold)
def test_atomic_max_int32(self):
self.check_atomic_max(dtype=np.int32, lo=-65535, hi=65535)
def test_atomic_max_uint32(self):
self.check_atomic_max(dtype=np.uint32, lo=0, hi=65535)
@skip_unless_cc_32
def test_atomic_max_int64(self):
self.check_atomic_max(dtype=np.int64, lo=-65535, hi=65535)
@skip_unless_cc_32
def test_atomic_max_uint64(self):
self.check_atomic_max(dtype=np.uint64, lo=0, hi=65535)
def test_atomic_max_float32(self):
self.check_atomic_max(dtype=np.float32, lo=-65535, hi=65535)
def test_atomic_max_double(self):
self.check_atomic_max(dtype=np.float64, lo=-65535, hi=65535)
def test_atomic_max_double_normalizedindex(self):
vals = np.random.randint(0, 65535, size=(32, 32)).astype(np.float64)
res = np.zeros(1, np.float64)
cuda_func = cuda.jit('void(float64[:], float64[:,:])')(
atomic_max_double_normalizedindex)
cuda_func[32, 32](res, vals)
gold = np.max(vals)
np.testing.assert_equal(res, gold)
def test_atomic_max_double_oneindex(self):
vals = np.random.randint(0, 128, size=32).astype(np.float64)
res = np.zeros(1, np.float64)
cuda_func = cuda.jit('void(float64[:], float64[:])')(
atomic_max_double_oneindex)
cuda_func[1, 32](res, vals)
gold = np.max(vals)
np.testing.assert_equal(res, gold)
def check_atomic_min(self, dtype, lo, hi):
vals = np.random.randint(lo, hi, size=(32, 32)).astype(dtype)
res = np.array([65535], dtype=vals.dtype)
cuda_func = cuda.jit(atomic_min)
cuda_func[32, 32](res, vals)
gold = np.min(vals)
np.testing.assert_equal(res, gold)
def test_atomic_min_int32(self):
self.check_atomic_min(dtype=np.int32, lo=-65535, hi=65535)
def test_atomic_min_uint32(self):
self.check_atomic_min(dtype=np.uint32, lo=0, hi=65535)
@skip_unless_cc_32
def test_atomic_min_int64(self):
self.check_atomic_min(dtype=np.int64, lo=-65535, hi=65535)
@skip_unless_cc_32
def test_atomic_min_uint64(self):
self.check_atomic_min(dtype=np.uint64, lo=0, hi=65535)
def test_atomic_min_float(self):
self.check_atomic_min(dtype=np.float32, lo=-65535, hi=65535)
def test_atomic_min_double(self):
self.check_atomic_min(dtype=np.float64, lo=-65535, hi=65535)
def test_atomic_min_double_normalizedindex(self):
vals = np.random.randint(0, 65535, size=(32, 32)).astype(np.float64)
res = np.ones(1, np.float64) * 65535
cuda_func = cuda.jit('void(float64[:], float64[:,:])')(
atomic_min_double_normalizedindex)
cuda_func[32, 32](res, vals)
gold = np.min(vals)
np.testing.assert_equal(res, gold)
def test_atomic_min_double_oneindex(self):
vals = np.random.randint(0, 128, size=32).astype(np.float64)
res = np.ones(1, np.float64) * 128
cuda_func = cuda.jit('void(float64[:], float64[:])')(
atomic_min_double_oneindex)
cuda_func[1, 32](res, vals)
gold = np.min(vals)
np.testing.assert_equal(res, gold)
# Taken together, _test_atomic_minmax_nan_location and
# _test_atomic_minmax_nan_val check that NaNs are treated similarly to the
# way they are in Python / NumPy - that is, {min,max}(a, b) == a if either
# a or b is a NaN. For the atomics, this means that the max is taken as the
# value stored in the memory location rather than the value supplied - i.e.
# for:
#
# cuda.atomic.{min,max}(ary, idx, val)
#
# the result will be ary[idx] for either of ary[idx] or val being NaN.
def _test_atomic_minmax_nan_location(self, func):
cuda_func = cuda.jit('void(float64[:], float64[:,:])')(func)
vals = np.random.randint(0, 128, size=(1,1)).astype(np.float64)
res = np.zeros(1, np.float64) + np.nan
cuda_func[1, 1](res, vals)
np.testing.assert_equal(res, [np.nan])
def _test_atomic_minmax_nan_val(self, func):
cuda_func = cuda.jit('void(float64[:], float64[:,:])')(func)
res = np.random.randint(0, 128, size=1).astype(np.float64)
gold = res.copy()
vals = np.zeros((1, 1), np.float64) + np.nan
cuda_func[1, 1](res, vals)
np.testing.assert_equal(res, gold)
def test_atomic_min_nan_location(self):
self._test_atomic_minmax_nan_location(atomic_min)
def test_atomic_max_nan_location(self):
self._test_atomic_minmax_nan_location(atomic_max)
def test_atomic_min_nan_val(self):
self._test_atomic_minmax_nan_val(atomic_min)
def test_atomic_max_nan_val(self):
self._test_atomic_minmax_nan_val(atomic_max)
def test_atomic_max_double_shared(self):
vals = np.random.randint(0, 32, size=32).astype(np.float64)
res = np.zeros(1, np.float64)
cuda_func = cuda.jit('void(float64[:], float64[:])')(atomic_max_double_shared)
cuda_func[1, 32](res, vals)
gold = np.max(vals)
np.testing.assert_equal(res, gold)
def test_atomic_min_double_shared(self):
vals = np.random.randint(0, 32, size=32).astype(np.float64)
res = np.ones(1, np.float64) * 32
cuda_func = cuda.jit('void(float64[:], float64[:])')(atomic_min_double_shared)
cuda_func[1, 32](res, vals)
gold = np.min(vals)
np.testing.assert_equal(res, gold)
def test_atomic_compare_and_swap(self):
n = 100
res = [-99] * (n // 2) + [-1] * (n // 2)
random.shuffle(res)
res = np.asarray(res, dtype=np.int32)
out = np.zeros_like(res)
ary = np.random.randint(1, 10, size=res.size).astype(res.dtype)
fill_mask = res == -99
unfill_mask = res == -1
expect_res = np.zeros_like(res)
expect_res[fill_mask] = ary[fill_mask]
expect_res[unfill_mask] = -1
expect_out = np.zeros_like(out)
expect_out[fill_mask] = res[fill_mask]
expect_out[unfill_mask] = -1
cuda_func = cuda.jit(atomic_compare_and_swap)
cuda_func[10, 10](res, out, ary)
np.testing.assert_array_equal(expect_res, res)
np.testing.assert_array_equal(expect_out, out)
# Tests that the atomic add, min, and max operations return the old value -
# in the simulator, they did not (see Issue #5458). The max and min have
# special handling for NaN values, so we explicitly test with a NaN in the
# array being modified and the value provided.
def _test_atomic_returns_old(self, kernel, initial):
x = np.zeros(2, dtype=np.float32)
x[0] = initial
kernel[1, 1](x)
if np.isnan(initial):
self.assertTrue(np.isnan(x[1]))
else:
self.assertEqual(x[1], initial)
def test_atomic_add_returns_old(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.add(x, 0, 1)
self._test_atomic_returns_old(kernel, 10)
def test_atomic_max_returns_no_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.max(x, 0, 1)
self._test_atomic_returns_old(kernel, 10)
def test_atomic_max_returns_old_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.max(x, 0, 10)
self._test_atomic_returns_old(kernel, 1)
def test_atomic_max_returns_old_nan_in_array(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.max(x, 0, 1)
self._test_atomic_returns_old(kernel, np.nan)
def test_atomic_max_returns_old_nan_val(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.max(x, 0, np.nan)
self._test_atomic_returns_old(kernel, 10)
def test_atomic_min_returns_old_no_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.min(x, 0, 11)
self._test_atomic_returns_old(kernel, 10)
def test_atomic_min_returns_old_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.min(x, 0, 10)
self._test_atomic_returns_old(kernel, 11)
def test_atomic_min_returns_old_nan_in_array(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.min(x, 0, 11)
self._test_atomic_returns_old(kernel, np.nan)
def test_atomic_min_returns_old_nan_val(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.min(x, 0, np.nan)
self._test_atomic_returns_old(kernel, 11)
# Tests for atomic nanmin/nanmax
# nanmax tests
def check_atomic_nanmax(self, dtype, lo, hi):
vals = np.random.randint(lo, hi, size=(32, 32)).astype(dtype)
vals[1::2] = np.nan
res = np.zeros(1, dtype=vals.dtype)
cuda_func = cuda.jit(atomic_nanmax)
cuda_func[32, 32](res, vals)
gold = np.nanmax(vals)
np.testing.assert_equal(res, gold)
def test_atomic_nanmax_int32(self):
self.check_atomic_nanmax(dtype=np.int32, lo=-65535, hi=65535)
def test_atomic_nanmax_uint32(self):
self.check_atomic_nanmax(dtype=np.uint32, lo=0, hi=65535)
@skip_unless_cc_32
def test_atomic_nanmax_int64(self):
self.check_atomic_nanmax(dtype=np.int64, lo=-65535, hi=65535)
@skip_unless_cc_32
def test_atomic_nanmax_uint64(self):
self.check_atomic_nanmax(dtype=np.uint64, lo=0, hi=65535)
def test_atomic_nanmax_float32(self):
self.check_atomic_nanmax(dtype=np.float32, lo=-65535, hi=65535)
def test_atomic_nanmax_double(self):
self.check_atomic_nanmax(dtype=np.float64, lo=-65535, hi=65535)
def test_atomic_nanmax_double_shared(self):
vals = np.random.randint(0, 32, size=32).astype(np.float64)
vals[1::2] = np.nan
res = np.array([0], dtype=vals.dtype)
cuda_func = cuda.jit('void(float64[:], float64[:])')(atomic_nanmax_double_shared)
cuda_func[1, 32](res, vals)
gold = np.nanmax(vals)
np.testing.assert_equal(res, gold)
def test_atomic_nanmax_double_oneindex(self):
vals = np.random.randint(0, 128, size=32).astype(np.float64)
vals[1::2] = np.nan
res = np.zeros(1, np.float64)
cuda_func = cuda.jit('void(float64[:], float64[:])')(
atomic_max_double_oneindex)
cuda_func[1, 32](res, vals)
gold = np.nanmax(vals)
np.testing.assert_equal(res, gold)
# nanmin tests
def check_atomic_nanmin(self, dtype, lo, hi):
vals = np.random.randint(lo, hi, size=(32, 32)).astype(dtype)
vals[1::2] = np.nan
res = np.array([65535], dtype=vals.dtype)
cuda_func = cuda.jit(atomic_nanmin)
cuda_func[32, 32](res, vals)
gold = np.nanmin(vals)
np.testing.assert_equal(res, gold)
def test_atomic_nanmin_int32(self):
self.check_atomic_nanmin(dtype=np.int32, lo=-65535, hi=65535)
def test_atomic_nanmin_uint32(self):
self.check_atomic_nanmin(dtype=np.uint32, lo=0, hi=65535)
@skip_unless_cc_32
def test_atomic_nanmin_int64(self):
self.check_atomic_nanmin(dtype=np.int64, lo=-65535, hi=65535)
@skip_unless_cc_32
def test_atomic_nanmin_uint64(self):
self.check_atomic_nanmin(dtype=np.uint64, lo=0, hi=65535)
def test_atomic_nanmin_float(self):
self.check_atomic_nanmin(dtype=np.float32, lo=-65535, hi=65535)
def test_atomic_nanmin_double(self):
self.check_atomic_nanmin(dtype=np.float64, lo=-65535, hi=65535)
def test_atomic_nanmin_double_shared(self):
vals = np.random.randint(0, 32, size=32).astype(np.float64)
vals[1::2] = np.nan
res = np.array([32], dtype=vals.dtype)
cuda_func = cuda.jit('void(float64[:], float64[:])')(atomic_nanmin_double_shared)
cuda_func[1, 32](res, vals)
gold = np.nanmin(vals)
np.testing.assert_equal(res, gold)
def test_atomic_nanmin_double_oneindex(self):
vals = np.random.randint(0, 128, size=32).astype(np.float64)
vals[1::2] = np.nan
res = np.array([128], np.float64)
cuda_func = cuda.jit('void(float64[:], float64[:])')(
atomic_min_double_oneindex)
cuda_func[1, 32](res, vals)
gold = np.nanmin(vals)
np.testing.assert_equal(res, gold)
# Returning old value tests
def _test_atomic_nan_returns_old(self, kernel, initial):
x = np.zeros(2, dtype=np.float32)
x[0] = initial
x[1] = np.nan
kernel[1, 1](x)
if np.isnan(initial):
self.assertFalse(np.isnan(x[0]))
self.assertTrue(np.isnan(x[1]))
else:
self.assertEqual(x[1], initial)
def test_atomic_nanmax_returns_old_no_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmax(x, 0, 1)
self._test_atomic_nan_returns_old(kernel, 10)
def test_atomic_nanmax_returns_old_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmax(x, 0, 10)
self._test_atomic_nan_returns_old(kernel, 1)
def test_atomic_nanmax_returns_old_nan_in_array(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmax(x, 0, 1)
self._test_atomic_nan_returns_old(kernel, np.nan)
def test_atomic_nanmax_returns_old_nan_val(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmax(x, 0, np.nan)
self._test_atomic_nan_returns_old(kernel, 10)
def test_atomic_nanmin_returns_old_no_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmin(x, 0, 11)
self._test_atomic_nan_returns_old(kernel, 10)
def test_atomic_nanmin_returns_old_replace(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmin(x, 0, 10)
self._test_atomic_nan_returns_old(kernel, 11)
def test_atomic_nanmin_returns_old_nan_in_array(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmin(x, 0, 11)
self._test_atomic_nan_returns_old(kernel, np.nan)
def test_atomic_nanmin_returns_old_nan_val(self):
@cuda.jit
def kernel(x):
x[1] = cuda.atomic.nanmin(x, 0, np.nan)
self._test_atomic_nan_returns_old(kernel, 11)
if __name__ == '__main__':
unittest.main()
| 32.652308 | 89 | 0.623539 |
ace51c614985a0943ef1a7a01206a594c5edc6e6 | 3,575 | py | Python | imap2gmail.py | plbrault/Imap2Gmail | 9dfd82aacae274d3eaa76c5611336510edd2ff53 | [
"MIT"
] | null | null | null | imap2gmail.py | plbrault/Imap2Gmail | 9dfd82aacae274d3eaa76c5611336510edd2ff53 | [
"MIT"
] | null | null | null | imap2gmail.py | plbrault/Imap2Gmail | 9dfd82aacae274d3eaa76c5611336510edd2ff53 | [
"MIT"
] | 1 | 2022-02-18T19:40:37.000Z | 2022-02-18T19:40:37.000Z | """
Copyright (c) Pier-Luc Brault
plbrault@gmail.com
This file is part of Imap2Gmail.
Please read LICENSE.txt for complete copyright and license notice.
"""
from imap_fetcher import ImapFetcher
import smtplib
from datetime import datetime
import settings
def log(text):
"""Print a string to system output with current date and time"""
print("[%0s] %1s" % (datetime.now().isoformat(), text))
###### MAIN ######
log("************************************")
log("A new execution begins")
# Settings file validation
log("Validating settings file")
valid_settings = True
if not hasattr(settings, 'IMAP_ACCOUNTS'):
log("IMAP_ACCOUNTS setting is missing")
valid_settings = False
pass
elif not hasattr(settings, 'MAIL_TO'):
log("MAIL_TO setting is missing")
valid_settings = False
pass
elif not hasattr(settings, 'SMTP_SERVER'):
log("SMTP_SERVER setting is missing")
valid_settings = False
pass
elif type(settings.IMAP_ACCOUNTS) != list:
log("IMAP_ACCOUNTS setting must be a list")
valid_settings = False
elif type(settings.MAIL_TO) != str:
log("MAIL_TO setting must be a string")
valid_settings = False
elif type(settings.SMTP_SERVER) != dict:
log("SMTP_SERVER setting must be a dictionary")
valid_settings = False
elif ("host" not in settings.SMTP_SERVER
or "port" not in settings.SMTP_SERVER
or type(settings.SMTP_SERVER["host"]) != str
or type(settings.SMTP_SERVER["port"]) != int):
log("SMTP_SERVER setting is not formatted correctly")
valid_settings = False
else:
for account in settings.IMAP_ACCOUNTS:
if ("host" not in account
or "port" not in account
or "login" not in account
or "password" not in account
or "ssl" not in account
or type(account["host"]) != str
or type(account["port"]) != int
or type(account["login"]) != str
or type(account["password"]) != str
or type(account["ssl"]) != bool):
log("IMAP_ACCOUNTS setting is not formatted correctly")
valid_settings = False
break
# If settings file is valid, proceed.
if valid_settings:
log("Settings file validation passed")
smtp = smtplib.SMTP(settings.SMTP_SERVER["host"], settings.SMTP_SERVER["port"])
for imap_account in settings.IMAP_ACCOUNTS:
fetcher = ImapFetcher()
log("Connecting to IMAP account %0s at host %1s" % (imap_account["login"], imap_account["host"]))
try:
fetcher.open_connection(imap_account['host'], imap_account['port'], imap_account['login'], imap_account['password'], ssl=imap_account['ssl'])
except Exception as e:
log("Connection failed : %1s" % (e))
continue
log("Fetching unread messages")
messages = fetcher.fetch_new_messages()
log("%0i message(s) fetched" % (len(messages)))
log("Closing connection")
fetcher.close_connection()
for message in messages:
log('Transferring message "%0s" from %1s' % (message["Subject"], message["from"]))
try:
smtp.sendmail(message["from"], settings.MAIL_TO, message.as_string())
except Exception as e:
log("Message sending failed: %0s" % (e))
smtp.quit()
log("Execution is completed")
# If settings file is invalid, terminate.
else:
log("Settings file validation failed. Terminate.")
| 33.411215 | 153 | 0.622098 |
ace51ce3c31be15a43c6198d26757c3adcaa378d | 4,829 | py | Python | alipay/aop/api/domain/AssetDeliveryAddress.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/domain/AssetDeliveryAddress.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/domain/AssetDeliveryAddress.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AssetDeliveryAddress(object):
def __init__(self):
self._address = None
self._city = None
self._contact_name = None
self._contact_phone = None
self._district = None
self._province = None
self._warehouse_id = None
self._warehouse_name = None
self._zip_code = None
@property
def address(self):
return self._address
@address.setter
def address(self, value):
self._address = value
@property
def city(self):
return self._city
@city.setter
def city(self, value):
self._city = value
@property
def contact_name(self):
return self._contact_name
@contact_name.setter
def contact_name(self, value):
self._contact_name = value
@property
def contact_phone(self):
return self._contact_phone
@contact_phone.setter
def contact_phone(self, value):
self._contact_phone = value
@property
def district(self):
return self._district
@district.setter
def district(self, value):
self._district = value
@property
def province(self):
return self._province
@province.setter
def province(self, value):
self._province = value
@property
def warehouse_id(self):
return self._warehouse_id
@warehouse_id.setter
def warehouse_id(self, value):
self._warehouse_id = value
@property
def warehouse_name(self):
return self._warehouse_name
@warehouse_name.setter
def warehouse_name(self, value):
self._warehouse_name = value
@property
def zip_code(self):
return self._zip_code
@zip_code.setter
def zip_code(self, value):
self._zip_code = value
def to_alipay_dict(self):
params = dict()
if self.address:
if hasattr(self.address, 'to_alipay_dict'):
params['address'] = self.address.to_alipay_dict()
else:
params['address'] = self.address
if self.city:
if hasattr(self.city, 'to_alipay_dict'):
params['city'] = self.city.to_alipay_dict()
else:
params['city'] = self.city
if self.contact_name:
if hasattr(self.contact_name, 'to_alipay_dict'):
params['contact_name'] = self.contact_name.to_alipay_dict()
else:
params['contact_name'] = self.contact_name
if self.contact_phone:
if hasattr(self.contact_phone, 'to_alipay_dict'):
params['contact_phone'] = self.contact_phone.to_alipay_dict()
else:
params['contact_phone'] = self.contact_phone
if self.district:
if hasattr(self.district, 'to_alipay_dict'):
params['district'] = self.district.to_alipay_dict()
else:
params['district'] = self.district
if self.province:
if hasattr(self.province, 'to_alipay_dict'):
params['province'] = self.province.to_alipay_dict()
else:
params['province'] = self.province
if self.warehouse_id:
if hasattr(self.warehouse_id, 'to_alipay_dict'):
params['warehouse_id'] = self.warehouse_id.to_alipay_dict()
else:
params['warehouse_id'] = self.warehouse_id
if self.warehouse_name:
if hasattr(self.warehouse_name, 'to_alipay_dict'):
params['warehouse_name'] = self.warehouse_name.to_alipay_dict()
else:
params['warehouse_name'] = self.warehouse_name
if self.zip_code:
if hasattr(self.zip_code, 'to_alipay_dict'):
params['zip_code'] = self.zip_code.to_alipay_dict()
else:
params['zip_code'] = self.zip_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AssetDeliveryAddress()
if 'address' in d:
o.address = d['address']
if 'city' in d:
o.city = d['city']
if 'contact_name' in d:
o.contact_name = d['contact_name']
if 'contact_phone' in d:
o.contact_phone = d['contact_phone']
if 'district' in d:
o.district = d['district']
if 'province' in d:
o.province = d['province']
if 'warehouse_id' in d:
o.warehouse_id = d['warehouse_id']
if 'warehouse_name' in d:
o.warehouse_name = d['warehouse_name']
if 'zip_code' in d:
o.zip_code = d['zip_code']
return o
| 29.993789 | 79 | 0.580451 |
ace51dfa93f0d8d28bf04f9a64da5501c6a08111 | 2,978 | py | Python | game/flappy_bird_utils.py | chaotiaor/mybird | 729aa453aa8d4d35eaad76353e8e65fffd9cdace | [
"MIT"
] | null | null | null | game/flappy_bird_utils.py | chaotiaor/mybird | 729aa453aa8d4d35eaad76353e8e65fffd9cdace | [
"MIT"
] | null | null | null | game/flappy_bird_utils.py | chaotiaor/mybird | 729aa453aa8d4d35eaad76353e8e65fffd9cdace | [
"MIT"
] | null | null | null | import pygame
import sys
def load():
# path of player with different states
PLAYER_PATH = (
'assets/sprites/redbird-upflap.png',
'assets/sprites/redbird-midflap.png',
'assets/sprites/redbird-downflap.png'
)
# path of background
BACKGROUND_PATH = 'assets/sprites/background-black.png'
# path of pipe
PIPE_PATH = 'assets/sprites/pipe-green.png'
IMAGES, SOUNDS, HITMASKS = {}, {}, {}
# numbers sprites for score display
IMAGES['numbers'] = (
pygame.image.load('assets/sprites/0.png').convert_alpha(),
pygame.image.load('assets/sprites/1.png').convert_alpha(),
pygame.image.load('assets/sprites/2.png').convert_alpha(),
pygame.image.load('assets/sprites/3.png').convert_alpha(),
pygame.image.load('assets/sprites/4.png').convert_alpha(),
pygame.image.load('assets/sprites/5.png').convert_alpha(),
pygame.image.load('assets/sprites/6.png').convert_alpha(),
pygame.image.load('assets/sprites/7.png').convert_alpha(),
pygame.image.load('assets/sprites/8.png').convert_alpha(),
pygame.image.load('assets/sprites/9.png').convert_alpha()
)
# base (ground) sprite
IMAGES['base'] = pygame.image.load('assets/sprites/base.png').convert_alpha()
# sounds
if 'win' in sys.platform:
soundExt = '.wav'
else:
soundExt = '.ogg'
SOUNDS['die'] = pygame.mixer.Sound('assets/audio/die' + soundExt)
SOUNDS['hit'] = pygame.mixer.Sound('assets/audio/hit' + soundExt)
SOUNDS['point'] = pygame.mixer.Sound('assets/audio/point' + soundExt)
SOUNDS['swoosh'] = pygame.mixer.Sound('assets/audio/swoosh' + soundExt)
SOUNDS['wing'] = pygame.mixer.Sound('assets/audio/wing' + soundExt)
# select random background sprites
IMAGES['background'] = pygame.image.load(BACKGROUND_PATH).convert()
# select random player sprites
IMAGES['player'] = (
pygame.image.load(PLAYER_PATH[0]).convert_alpha(),
pygame.image.load(PLAYER_PATH[1]).convert_alpha(),
pygame.image.load(PLAYER_PATH[2]).convert_alpha(),
)
# select random pipe sprites
IMAGES['pipe'] = (
pygame.transform.rotate(
pygame.image.load(PIPE_PATH).convert_alpha(), 180),
pygame.image.load(PIPE_PATH).convert_alpha(),
)
# hismask for pipes
HITMASKS['pipe'] = (
getHitmask(IMAGES['pipe'][0]),
getHitmask(IMAGES['pipe'][1]),
)
# hitmask for player
HITMASKS['player'] = (
getHitmask(IMAGES['player'][0]),
getHitmask(IMAGES['player'][1]),
getHitmask(IMAGES['player'][2]),
)
return IMAGES, SOUNDS, HITMASKS
def getHitmask(image):
"""returns a hitmask using an image's alpha."""
mask = []
for x in range(image.get_width()):
mask.append([])
for y in range(image.get_height()):
mask[x].append(bool(image.get_at((x,y))[3]))
return mask
| 32.725275 | 81 | 0.627267 |
ace51edaf782624ecf85441b990a86e69999f8b2 | 394 | py | Python | a3.py | markjin123/dots-and-boxes | 634492a6bc17e55fed5497a70d8ea19b0f091a0c | [
"MIT"
] | null | null | null | a3.py | markjin123/dots-and-boxes | 634492a6bc17e55fed5497a70d8ea19b0f091a0c | [
"MIT"
] | null | null | null | a3.py | markjin123/dots-and-boxes | 634492a6bc17e55fed5497a70d8ea19b0f091a0c | [
"MIT"
] | null | null | null | import numpy as np
import math
def restrict(factor,variable):
newFactor = {}
for eachFactor in factor.keys():
if variable in eachFactor: #if the variable is found then we basically replace it from that line of the table(dict)
newFactorKey = eachFactor.replace(variable,"")
newFactor[newFactorKey] = factor[eachFactor]
#printing part
print("")
| 28.142857 | 123 | 0.677665 |
ace51f189a359d1402cbe660f4b4d94e5e6e72a5 | 15,512 | py | Python | PhysicsTools/NanoAOD/python/muons_cff.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 6 | 2017-09-08T14:12:56.000Z | 2022-03-09T23:57:01.000Z | PhysicsTools/NanoAOD/python/muons_cff.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 545 | 2017-09-19T17:10:19.000Z | 2022-03-07T16:55:27.000Z | PhysicsTools/NanoAOD/python/muons_cff.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 14 | 2017-10-04T09:47:21.000Z | 2019-10-23T18:04:45.000Z | import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Modifier_run2_muon_2016_cff import run2_muon_2016
from Configuration.Eras.Modifier_run2_miniAOD_80XLegacy_cff import run2_miniAOD_80XLegacy
from Configuration.Eras.Modifier_run2_nanoAOD_94X2016_cff import run2_nanoAOD_94X2016
from Configuration.Eras.Modifier_run2_nanoAOD_94XMiniAODv1_cff import run2_nanoAOD_94XMiniAODv1
from Configuration.Eras.Modifier_run2_nanoAOD_94XMiniAODv2_cff import run2_nanoAOD_94XMiniAODv2
from Configuration.Eras.Modifier_run2_nanoAOD_102Xv1_cff import run2_nanoAOD_102Xv1
from PhysicsTools.NanoAOD.common_cff import *
import PhysicsTools.PatAlgos.producersLayer1.muonProducer_cfi
# this below is used only in some eras
slimmedMuonsUpdated = cms.EDProducer("PATMuonUpdater",
src = cms.InputTag("slimmedMuons"),
vertices = cms.InputTag("offlineSlimmedPrimaryVertices"),
computeMiniIso = cms.bool(False),
fixDxySign = cms.bool(True),
pfCandsForMiniIso = cms.InputTag("packedPFCandidates"),
miniIsoParams = PhysicsTools.PatAlgos.producersLayer1.muonProducer_cfi.patMuons.miniIsoParams, # so they're in sync
recomputeMuonBasicSelectors = cms.bool(False),
)
run2_miniAOD_80XLegacy.toModify( slimmedMuonsUpdated, computeMiniIso = True, recomputeMuonBasicSelectors = True )
isoForMu = cms.EDProducer("MuonIsoValueMapProducer",
src = cms.InputTag("slimmedMuonsUpdated"),
relative = cms.bool(False),
rho_MiniIso = cms.InputTag("fixedGridRhoFastjetAll"),
EAFile_MiniIso = cms.FileInPath("PhysicsTools/NanoAOD/data/effAreaMuons_cone03_pfNeuHadronsAndPhotons_94X.txt"),
)
run2_miniAOD_80XLegacy.toModify(isoForMu, EAFile_MiniIso = "PhysicsTools/NanoAOD/data/effAreaMuons_cone03_pfNeuHadronsAndPhotons_80X.txt")
run2_nanoAOD_94X2016.toModify(isoForMu, EAFile_MiniIso = "PhysicsTools/NanoAOD/data/effAreaMuons_cone03_pfNeuHadronsAndPhotons_80X.txt")
ptRatioRelForMu = cms.EDProducer("MuonJetVarProducer",
srcJet = cms.InputTag("updatedJets"),
srcLep = cms.InputTag("slimmedMuonsUpdated"),
srcVtx = cms.InputTag("offlineSlimmedPrimaryVertices"),
)
slimmedMuonsWithUserData = cms.EDProducer("PATMuonUserDataEmbedder",
src = cms.InputTag("slimmedMuonsUpdated"),
userFloats = cms.PSet(
miniIsoChg = cms.InputTag("isoForMu:miniIsoChg"),
miniIsoAll = cms.InputTag("isoForMu:miniIsoAll"),
ptRatio = cms.InputTag("ptRatioRelForMu:ptRatio"),
ptRel = cms.InputTag("ptRatioRelForMu:ptRel"),
jetNDauChargedMVASel = cms.InputTag("ptRatioRelForMu:jetNDauChargedMVASel"),
),
userCands = cms.PSet(
jetForLepJetVar = cms.InputTag("ptRatioRelForMu:jetForLepJetVar") # warning: Ptr is null if no match is found
),
)
finalMuons = cms.EDFilter("PATMuonRefSelector",
src = cms.InputTag("slimmedMuonsWithUserData"),
cut = cms.string("pt > 3 && (passed('CutBasedIdLoose') || passed('SoftCutBasedId') || passed('SoftMvaId') || passed('CutBasedIdGlobalHighPt') || passed('CutBasedIdTrkHighPt'))")
)
finalLooseMuons = cms.EDFilter("PATMuonRefSelector", # for isotrack cleaning
src = cms.InputTag("slimmedMuonsWithUserData"),
cut = cms.string("pt > 3 && track.isNonnull && isLooseMuon")
)
muonMVATTH= cms.EDProducer("MuonBaseMVAValueMapProducer",
src = cms.InputTag("linkedObjects","muons"),
weightFile = cms.FileInPath("PhysicsTools/NanoAOD/data/mu_BDTG_2017.weights.xml"),
name = cms.string("muonMVATTH"),
isClassifier = cms.bool(True),
variablesOrder = cms.vstring(["LepGood_pt","LepGood_eta","LepGood_jetNDauChargedMVASel","LepGood_miniRelIsoCharged","LepGood_miniRelIsoNeutral","LepGood_jetPtRelv2","LepGood_jetDF","LepGood_jetPtRatio","LepGood_dxy","LepGood_sip3d","LepGood_dz","LepGood_segmentComp"]),
variables = cms.PSet(
LepGood_pt = cms.string("pt"),
LepGood_eta = cms.string("eta"),
LepGood_jetNDauChargedMVASel = cms.string("?userCand('jetForLepJetVar').isNonnull()?userFloat('jetNDauChargedMVASel'):0"),
LepGood_miniRelIsoCharged = cms.string("userFloat('miniIsoChg')/pt"),
LepGood_miniRelIsoNeutral = cms.string("(userFloat('miniIsoAll')-userFloat('miniIsoChg'))/pt"),
LepGood_jetPtRelv2 = cms.string("?userCand('jetForLepJetVar').isNonnull()?userFloat('ptRel'):0"),
LepGood_jetDF = cms.string("?userCand('jetForLepJetVar').isNonnull()?max(userCand('jetForLepJetVar').bDiscriminator('pfDeepFlavourJetTags:probbb')+userCand('jetForLepJetVar').bDiscriminator('pfDeepFlavourJetTags:probb')+userCand('jetForLepJetVar').bDiscriminator('pfDeepFlavourJetTags:problepb'),0.0):0.0"),
LepGood_jetPtRatio = cms.string("?userCand('jetForLepJetVar').isNonnull()?min(userFloat('ptRatio'),1.5):1.0/(1.0+(pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - pfIsolationR04().sumPUPt/2,0.0))/pt)"),
LepGood_dxy = cms.string("log(abs(dB('PV2D')))"),
LepGood_sip3d = cms.string("abs(dB('PV3D')/edB('PV3D'))"),
LepGood_dz = cms.string("log(abs(dB('PVDZ')))"),
LepGood_segmentComp = cms.string("segmentCompatibility"),
)
)
muonMVALowPt = muonMVATTH.clone(
weightFile = cms.FileInPath("PhysicsTools/NanoAOD/data/mu_BDTG_lowpt.weights.xml"),
name = cms.string("muonMVALowPt"),
)
run2_muon_2016.toModify(muonMVATTH,
weightFile = "PhysicsTools/NanoAOD/data/mu_BDTG_2016.weights.xml",
)
from MuonAnalysis.MuonAssociators.muonFSRProducer_cfi import muonFSRProducer
muonFSRphotons = muonFSRProducer.clone(
packedPFCandidates = cms.InputTag("packedPFCandidates"),
slimmedElectrons = cms.InputTag("slimmedElectrons"),
muons = cms.InputTag("linkedObjects","muons"),
)
from MuonAnalysis.MuonAssociators.muonFSRAssociator_cfi import muonFSRAssociator
muonFSRassociation = muonFSRAssociator.clone(
photons = cms.InputTag("muonFSRphotons"),
muons = cms.InputTag("linkedObjects","muons"),
)
fsrTable = cms.EDProducer("SimpleCandidateFlatTableProducer",
src = cms.InputTag("muonFSRphotons"),
cut = cms.string(""), #we should not filter on cross linked collections
name = cms.string("FsrPhoton"),
doc = cms.string("Final state radiation photons emitted by muons"),
singleton = cms.bool(False), # the number of entries is variable
extension = cms.bool(False), # this is the main table for the muons
variables = cms.PSet(P3Vars,
relIso03 = Var("userFloat('relIso03')",float,doc="relative isolation in a 0.3 cone without CHS"),
dROverEt2 = Var("userFloat('dROverEt2')",float,doc="deltaR to associated muon divided by photon et2"),
muonIdx = Var("?hasUserCand('associatedMuon')?userCand('associatedMuon').key():-1",int, doc="index of associated muon")
)
)
muonTable = cms.EDProducer("SimpleCandidateFlatTableProducer",
src = cms.InputTag("linkedObjects","muons"),
cut = cms.string(""), #we should not filter on cross linked collections
name = cms.string("Muon"),
doc = cms.string("slimmedMuons after basic selection (" + finalMuons.cut.value()+")"),
singleton = cms.bool(False), # the number of entries is variable
extension = cms.bool(False), # this is the main table for the muons
variables = cms.PSet(CandVars,
ptErr = Var("bestTrack().ptError()", float, doc = "ptError of the muon track", precision=6),
tunepRelPt = Var("tunePMuonBestTrack().pt/pt",float,doc="TuneP relative pt, tunePpt/pt",precision=6),
dz = Var("dB('PVDZ')",float,doc="dz (with sign) wrt first PV, in cm",precision=10),
dzErr = Var("abs(edB('PVDZ'))",float,doc="dz uncertainty, in cm",precision=6),
dxybs = Var("dB('BS2D')",float,doc="dxy (with sign) wrt the beam spot, in cm",precision=10),
dxy = Var("dB('PV2D')",float,doc="dxy (with sign) wrt first PV, in cm",precision=10),
dxyErr = Var("edB('PV2D')",float,doc="dxy uncertainty, in cm",precision=6),
ip3d = Var("abs(dB('PV3D'))",float,doc="3D impact parameter wrt first PV, in cm",precision=10),
sip3d = Var("abs(dB('PV3D')/edB('PV3D'))",float,doc="3D impact parameter significance wrt first PV",precision=10),
segmentComp = Var("segmentCompatibility()", float, doc = "muon segment compatibility", precision=14), # keep higher precision since people have cuts with 3 digits on this
nStations = Var("numberOfMatchedStations", int, doc = "number of matched stations with default arbitration (segment & track)"),
nTrackerLayers = Var("?track.isNonnull?innerTrack().hitPattern().trackerLayersWithMeasurement():0", int, doc = "number of layers in the tracker"),
highPurity = Var("?track.isNonnull?innerTrack().quality('highPurity'):0", bool, doc = "inner track is high purity"),
jetIdx = Var("?hasUserCand('jet')?userCand('jet').key():-1", int, doc="index of the associated jet (-1 if none)"),
tkRelIso = Var("isolationR03().sumPt/tunePMuonBestTrack().pt",float,doc="Tracker-based relative isolation dR=0.3 for highPt, trkIso/tunePpt",precision=6),
miniPFRelIso_chg = Var("userFloat('miniIsoChg')/pt",float,doc="mini PF relative isolation, charged component"),
miniPFRelIso_all = Var("userFloat('miniIsoAll')/pt",float,doc="mini PF relative isolation, total (with scaled rho*EA PU corrections)"),
pfRelIso03_chg = Var("pfIsolationR03().sumChargedHadronPt/pt",float,doc="PF relative isolation dR=0.3, charged component"),
pfRelIso03_all = Var("(pfIsolationR03().sumChargedHadronPt + max(pfIsolationR03().sumNeutralHadronEt + pfIsolationR03().sumPhotonEt - pfIsolationR03().sumPUPt/2,0.0))/pt",float,doc="PF relative isolation dR=0.3, total (deltaBeta corrections)"),
pfRelIso04_all = Var("(pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - pfIsolationR04().sumPUPt/2,0.0))/pt",float,doc="PF relative isolation dR=0.4, total (deltaBeta corrections)"),
jetRelIso = Var("?userCand('jetForLepJetVar').isNonnull()?(1./userFloat('ptRatio'))-1.:(pfIsolationR04().sumChargedHadronPt + max(pfIsolationR04().sumNeutralHadronEt + pfIsolationR04().sumPhotonEt - pfIsolationR04().sumPUPt/2,0.0))/pt",float,doc="Relative isolation in matched jet (1/ptRatio-1, pfRelIso04_all if no matched jet)",precision=8),
jetPtRelv2 = Var("?userCand('jetForLepJetVar').isNonnull()?userFloat('ptRel'):0",float,doc="Relative momentum of the lepton with respect to the closest jet after subtracting the lepton",precision=8),
tightCharge = Var("?(muonBestTrack().ptError()/muonBestTrack().pt() < 0.2)?2:0",int,doc="Tight charge criterion using pterr/pt of muonBestTrack (0:fail, 2:pass)"),
looseId = Var("passed('CutBasedIdLoose')",bool, doc="muon is loose muon"),
isPFcand = Var("isPFMuon",bool,doc="muon is PF candidate"),
isGlobal = Var("isGlobalMuon",bool,doc="muon is global muon"),
isTracker = Var("isTrackerMuon",bool,doc="muon is tracker muon"),
mediumId = Var("passed('CutBasedIdMedium')",bool,doc="cut-based ID, medium WP"),
mediumPromptId = Var("passed('CutBasedIdMediumPrompt')",bool,doc="cut-based ID, medium prompt WP"),
tightId = Var("passed('CutBasedIdTight')",bool,doc="cut-based ID, tight WP"),
softId = Var("passed('SoftCutBasedId')",bool,doc="soft cut-based ID"),
softMvaId = Var("passed('SoftMvaId')",bool,doc="soft MVA ID"),
softMva = Var("softMvaValue()",float,doc="soft MVA ID score",precision=6),
highPtId = Var("?passed('CutBasedIdGlobalHighPt')?2:passed('CutBasedIdTrkHighPt')","uint8",doc="high-pT cut-based ID (1 = tracker high pT, 2 = global high pT, which includes tracker high pT)"),
pfIsoId = Var("passed('PFIsoVeryLoose')+passed('PFIsoLoose')+passed('PFIsoMedium')+passed('PFIsoTight')+passed('PFIsoVeryTight')+passed('PFIsoVeryVeryTight')","uint8",doc="PFIso ID from miniAOD selector (1=PFIsoVeryLoose, 2=PFIsoLoose, 3=PFIsoMedium, 4=PFIsoTight, 5=PFIsoVeryTight, 6=PFIsoVeryVeryTight)"),
tkIsoId = Var("?passed('TkIsoTight')?2:passed('TkIsoLoose')","uint8",doc="TkIso ID (1=TkIsoLoose, 2=TkIsoTight)"),
mvaId = Var("passed('MvaLoose')+passed('MvaMedium')+passed('MvaTight')+passed('MvaVTight')+passed('MvaVVTight')","uint8",doc="Mva ID from miniAOD selector (1=MvaLoose, 2=MvaMedium, 3=MvaTight, 4=MvaVTight, 5=MvaVVTight)"),
mvaLowPtId = Var("passed('LowPtMvaLoose')+passed('LowPtMvaMedium')","uint8", doc="Low Pt Mva ID from miniAOD selector (1=LowPtMvaLoose, 2=LowPtMvaMedium)"),
miniIsoId = Var("passed('MiniIsoLoose')+passed('MiniIsoMedium')+passed('MiniIsoTight')+passed('MiniIsoVeryTight')","uint8",doc="MiniIso ID from miniAOD selector (1=MiniIsoLoose, 2=MiniIsoMedium, 3=MiniIsoTight, 4=MiniIsoVeryTight)"),
multiIsoId = Var("?passed('MultiIsoMedium')?2:passed('MultiIsoLoose')","uint8",doc="MultiIsoId from miniAOD selector (1=MultiIsoLoose, 2=MultiIsoMedium)"),
puppiIsoId = Var("passed('PuppiIsoLoose')+passed('PuppiIsoMedium')+passed('PuppiIsoTight')", "uint8", doc="PuppiIsoId from miniAOD selector (1=Loose, 2=Medium, 3=Tight)"),
triggerIdLoose = Var("passed('TriggerIdLoose')",bool,doc="TriggerIdLoose ID"),
inTimeMuon = Var("passed('InTimeMuon')",bool,doc="inTimeMuon ID"),
),
externalVariables = cms.PSet(
mvaTTH = ExtVar(cms.InputTag("muonMVATTH"),float, doc="TTH MVA lepton ID score",precision=14),
mvaLowPt = ExtVar(cms.InputTag("muonMVALowPt"),float, doc="Low pt muon ID score",precision=14),
fsrPhotonIdx = ExtVar(cms.InputTag("muonFSRassociation:fsrIndex"),int, doc="Index of the associated FSR photon"),
),
)
for modifier in run2_miniAOD_80XLegacy, run2_nanoAOD_94X2016, run2_nanoAOD_94XMiniAODv1, run2_nanoAOD_94XMiniAODv2:
modifier.toModify(muonTable.variables, puppiIsoId = None, softMva = None)
run2_nanoAOD_102Xv1.toModify(muonTable.variables, puppiIsoId = None)
muonsMCMatchForTable = cms.EDProducer("MCMatcher", # cut on deltaR, deltaPt/Pt; pick best by deltaR
src = muonTable.src, # final reco collection
matched = cms.InputTag("finalGenParticles"), # final mc-truth particle collection
mcPdgId = cms.vint32(13), # one or more PDG ID (13 = mu); absolute values (see below)
checkCharge = cms.bool(False), # True = require RECO and MC objects to have the same charge
mcStatus = cms.vint32(1), # PYTHIA status code (1 = stable, 2 = shower, 3 = hard scattering)
maxDeltaR = cms.double(0.3), # Minimum deltaR for the match
maxDPtRel = cms.double(0.5), # Minimum deltaPt/Pt for the match
resolveAmbiguities = cms.bool(True), # Forbid two RECO objects to match to the same GEN object
resolveByMatchQuality = cms.bool(True), # False = just match input in order; True = pick lowest deltaR pair first
)
muonMCTable = cms.EDProducer("CandMCMatchTableProducer",
src = muonTable.src,
mcMap = cms.InputTag("muonsMCMatchForTable"),
objName = muonTable.name,
objType = muonTable.name, #cms.string("Muon"),
branchName = cms.string("genPart"),
docString = cms.string("MC matching to status==1 muons"),
)
muonSequence = cms.Sequence(slimmedMuonsUpdated+isoForMu + ptRatioRelForMu + slimmedMuonsWithUserData + finalMuons + finalLooseMuons )
muonMC = cms.Sequence(muonsMCMatchForTable + muonMCTable)
muonTables = cms.Sequence(muonFSRphotons + muonFSRassociation + muonMVATTH + muonMVALowPt + muonTable + fsrTable)
| 73.516588 | 351 | 0.721957 |
ace51f555104095a7bec7643ce9a16b53efd09e1 | 451 | py | Python | QCS/Question/urls.py | udcymen/Question-Catalog-System | 89860cf73458fbbaa0edea37981abc41e4cb17d1 | [
"MIT"
] | null | null | null | QCS/Question/urls.py | udcymen/Question-Catalog-System | 89860cf73458fbbaa0edea37981abc41e4cb17d1 | [
"MIT"
] | 6 | 2020-02-12T02:42:58.000Z | 2021-06-02T00:36:25.000Z | QCS/Question/urls.py | udcymen/Question-Catalog-System | 89860cf73458fbbaa0edea37981abc41e4cb17d1 | [
"MIT"
] | null | null | null | from django.urls import path
from .views.QuestionIndex import quetion_index
from .views.QuestionCreate import quetion_create
from .views.QuestionDetails import question_detail
urlpatterns = [
path('', quetion_index, name='quetion_index'),
path('create', quetion_create, name='quetion_create'),
path('<int:question_ref>', question_detail, name='question_detail'),
path('<slug:question_ref>', question_detail, name='question_detail')
] | 37.583333 | 72 | 0.769401 |
ace520a875619c2387888bff36b7d4010135e8d4 | 414 | py | Python | docs/components_page/components/card/header_footer.py | glsdown/dash-bootstrap-components | 0ebea4f7de43975f6e3a2958359c4480ae1d4927 | [
"Apache-2.0"
] | 776 | 2019-02-07T19:36:59.000Z | 2022-03-31T05:53:04.000Z | docs/components_page/components/card/header_footer.py | glsdown/dash-bootstrap-components | 0ebea4f7de43975f6e3a2958359c4480ae1d4927 | [
"Apache-2.0"
] | 350 | 2019-02-05T10:42:19.000Z | 2022-03-31T19:23:35.000Z | docs/components_page/components/card/header_footer.py | glsdown/dash-bootstrap-components | 0ebea4f7de43975f6e3a2958359c4480ae1d4927 | [
"Apache-2.0"
] | 219 | 2019-02-10T13:46:25.000Z | 2022-03-23T17:03:39.000Z | import dash_bootstrap_components as dbc
from dash import html
card = dbc.Card(
[
dbc.CardHeader("This is the header"),
dbc.CardBody(
[
html.H4("Card title", className="card-title"),
html.P("This is some card text", className="card-text"),
]
),
dbc.CardFooter("This is the footer"),
],
style={"width": "18rem"},
)
| 24.352941 | 72 | 0.531401 |
ace5212802058e8030f663f8d487fc94b6738e9e | 10,247 | py | Python | uncoverml/scripts/learn_cli.py | GeoscienceAustralia/uncoverml | 672914377afa4ad1c069fcd4845bc45f80132e36 | [
"Apache-2.0"
] | 34 | 2017-03-14T23:59:58.000Z | 2022-03-03T18:04:25.000Z | uncoverml/scripts/learn_cli.py | GeoscienceAustralia/uncoverml | 672914377afa4ad1c069fcd4845bc45f80132e36 | [
"Apache-2.0"
] | 106 | 2017-03-22T00:26:10.000Z | 2022-03-12T00:19:08.000Z | uncoverml/scripts/learn_cli.py | GeoscienceAustralia/uncoverml | 672914377afa4ad1c069fcd4845bc45f80132e36 | [
"Apache-2.0"
] | 21 | 2017-05-04T04:02:39.000Z | 2022-02-04T00:55:18.000Z | """
Run the uncoverml pipeline for clustering, supervised learning and prediction.
.. program-output:: uncoverml --help
"""
from collections import namedtuple
import logging
import pickle
from os.path import isfile, splitext, exists
import os
import shutil
import warnings
import click
import numpy as np
import uncoverml as ls
import uncoverml.cluster
import uncoverml.config
import uncoverml.features
import uncoverml.geoio
import uncoverml.learn
import uncoverml.mllog
import uncoverml.mpiops
import uncoverml.predict
import uncoverml.validate
import uncoverml.targets
import uncoverml.models
import uncoverml.scripts
from uncoverml.transforms import StandardiseTransform
_logger = logging.getLogger(__name__)
warnings.filterwarnings(action='ignore', category=FutureWarning)
warnings.filterwarnings(action='ignore', category=DeprecationWarning)
def main(config_file, partitions):
config = ls.config.Config(config_file, learning=True)
training_data, oos_data = _load_data(config, partitions)
targets_all = training_data.targets_all
x_all = training_data.x_all
if config.cross_validate:
crossval_results = ls.validate.local_crossval(x_all, targets_all, config)
if crossval_results:
ls.mpiops.run_once(crossval_results.export_crossval, config)
_logger.info("Learning full {} model".format(config.algorithm))
model = ls.learn.local_learn_model(x_all, targets_all, config)
ls.mpiops.run_once(ls.geoio.export_model, model, config)
# use trained model
if config.permutation_importance:
ls.mpiops.run_once(ls.validate.permutation_importance, model, x_all, targets_all, config)
if config.out_of_sample_validation and oos_data is not None:
oos_targets = oos_data.targets_all
oos_features = oos_data.x_all
oos_results = ls.validate.out_of_sample_validation(model, oos_targets, oos_features, config)
if oos_results:
oos_results.export_scores(config)
if config.extents:
ls.mpiops.run_once(_clean_temp_cropfiles, config)
ls.geoio.deallocate_shared_training_data(training_data)
if oos_data is not None:
ls.geoio.deallocate_shared_training_data(oos_data)
_logger.info("Finished! Total mem = {:.1f} GB".format(ls.scripts.total_gb()))
def _load_data(config, partitions):
if config.pk_load:
if ls.mpiops.chunk_index == 0:
x_all = pickle.load(open(config.pk_covariates, 'rb'))
targets_all = pickle.load(open(config.pk_targets, 'rb'))
if config.cubist or config.multicubist:
config.algorithm_args['feature_type'] = \
pickle.load(open(config.pk_featurevec, 'rb'))
_logger.warning("Using pickled targets and covariates. Make sure you have"
" not changed targets file and/or covariates.")
else:
x_all = None
targets_all = None
if config.cubist or config.multicubist:
config.algorithm_args['feature_type'] = None
if config.out_of_sample_validation:
_logger.warning("Can't perform out-of-sample validation when loading from pickled data")
oos_data = None
else:
if not config.tabular_prediction:
bounds = ls.geoio.get_image_bounds(config)
if config.extents:
if config.extents_are_pixel_coordinates:
pw, ph = ls.geoio.get_image_pixel_res(config)
xmin, ymin, xmax, ymax = config.extents
xmin = xmin * pw + bounds[0][0] if xmin is not None else bounds[0][0]
ymin = ymin * ph + bounds[1][0] if ymin is not None else bounds[1][0]
xmax = xmax * pw + bounds[0][0] if xmax is not None else bounds[0][1]
ymax = ymax * ph + bounds[1][0] if ymax is not None else bounds[1][1]
target_extents = xmin, ymin, xmax, ymax
else:
target_extents = config.extents
ls.geoio.crop_covariates(config)
else:
target_extents = bounds[0][0], bounds[1][0], bounds[0][1], bounds[1][1]
covariate_crs=ls.geoio.get_image_crs(config)
else:
target_extents = None
covariate_crs = None
config.n_subchunks = partitions
# Make the targets
_logger.info("Intersecting targets as pickled train data was not "
"available")
targets = ls.geoio.load_targets(shapefile=config.target_file,
targetfield=config.target_property,
covariate_crs=covariate_crs,
extents=target_extents)
_logger.info(f":mpi:Assigned {targets.observations.shape[0]} targets")
if config.target_search:
if ls.mpiops.chunk_index == 0:
# Include targets and covariates from target search
with open(config.targetsearch_result_data, 'rb') as f:
ts_t = pickle.load(f)
pos, obs, fields = ts_t.positions, ts_t.observations, ts_t.fields
else:
pos, obs, fields = None, None, None
ts_t = ls.geoio.distribute_targets(pos, obs, fields)
targets = ls.targets.merge_targets(targets, ts_t)
# TODO: refactor out-of-sample out of script module
# If using out-of-sample validation, split off a percentage of data before transformation
if config.out_of_sample_validation:
if config.oos_percentage is not None:
num_targets = int(np.around(len(targets.observations) * config.oos_percentage))
inds = np.zeros(targets.observations.shape, dtype=bool)
inds[:num_targets] = True
np.random.shuffle(inds)
oos_pos = targets.positions[inds]
oos_obs = targets.observations[inds]
oos_fields = {}
for k, v in targets.fields.items():
oos_fields[k] = v[inds]
oos_targets = ls.targets.Targets(oos_pos, oos_obs, oos_fields)
targets.positions = targets.positions[~inds]
targets.observations = targets.observations[~inds]
for k, v in targets.fields.items():
targets.fields[k] = v[~inds]
elif config.oos_shapefile is not None:
oos_targets = ls.geoio.load_targets(shapefile=config.oos_shapefile,
targetfield=config.oos_property,
covariate_crs=covariate_crs,
extents=target_extents)
else:
_logger.info("Out-of-sample validation being skipped as no 'percentage' or "
"'shapefile' parameter was provided.")
if config.tabular_prediction:
oos_feature_chunks, _ = ls.features.intersect_shapefile_features(oos_targets, config.feature_sets, config.target_drop_values)
else:
oos_feature_chunks = ls.geoio.image_feature_sets(oos_targets, config)
if config.tabular_prediction:
feature_chunks, _ = ls.features.intersect_shapefile_features(targets, config.feature_sets, config.target_drop_values)
else:
feature_chunks = ls.geoio.image_feature_sets(targets, config)
transform_sets = [k.transform_set for k in config.feature_sets]
if config.raw_covariates:
_logger.info("Saving raw data before any processing")
ls.features.save_intersected_features_and_targets(feature_chunks,
transform_sets, targets, config,
impute=False)
if config.rank_features:
_logger.info("Ranking features...")
measures, features, scores = \
ls.validate.local_rank_features(feature_chunks, transform_sets, targets, config)
ls.mpiops.run_once(ls.geoio.export_feature_ranks, measures, features, scores, config)
features, keep = ls.features.transform_features(feature_chunks,
transform_sets,
config.final_transform,
config)
x_all = ls.features.gather_features(features[keep], node=0)
targets_all = ls.targets.gather_targets(targets, keep, node=0)
# Transform out-of-sample features after training data transform is performed so we use
# the same statistics.
if config.out_of_sample_validation:
oos_features, keep = ls.features.transform_features(oos_feature_chunks, transform_sets,
config.final_transform, config)
oos_targets = ls.targets.gather_targets(oos_targets, keep, node=0)
oos_features = ls.features.gather_features(oos_features[keep], node=0)
oos_data = ls.geoio.create_shared_training_data(oos_targets, oos_features)
if ls.mpiops.chunk_index == 0 and config.oos_percentage:
_logger.info(f"{oos_targets.observations.shape[0]} targets withheld for "
f"out-of-sample validation. Saved to {config.oos_targets_file}")
oos_targets.to_geodataframe().to_file(config.oos_targets_file)
else:
oos_data = None
# Pickle data if requested.
if ls.mpiops.chunk_index == 0:
if config.pk_covariates and not os.path.exists(config.pk_covariates):
pickle.dump(x_all, open(config.pk_covariates, 'wb'))
if config.pk_targets and not os.path.exists(config.pk_targets):
pickle.dump(targets_all, open(config.pk_targets, 'wb'))
return ls.geoio.create_shared_training_data(targets_all, x_all), oos_data
def _clean_temp_cropfiles(config):
shutil.rmtree(config.tmpdir)
| 45.340708 | 141 | 0.619694 |
ace52132e566a00d329d457afe17b3062d2a1794 | 1,551 | py | Python | iatransfer/research/paper/stats.py | KamilPiechowiak/iatransfer | d7607662a2d2f7d1a16164c813e8721a0563552b | [
"Apache-2.0"
] | 4 | 2021-02-05T01:51:57.000Z | 2022-01-11T13:59:33.000Z | iatransfer/research/paper/stats.py | KamilPiechowiak/iatransfer | d7607662a2d2f7d1a16164c813e8721a0563552b | [
"Apache-2.0"
] | 3 | 2021-02-05T20:05:05.000Z | 2021-02-05T20:05:33.000Z | iatransfer/research/paper/stats.py | KamilPiechowiak/iatransfer | d7607662a2d2f7d1a16164c813e8721a0563552b | [
"Apache-2.0"
] | null | null | null | from typing import Dict, List
import argparse
from iatransfer.utils.file_utils import read_json
from iatransfer.research.paper.plots import draw_epochs_plot
from iatransfer.research.paper.table import create_table
from iatransfer.research.transfer.utils import get_transfer_method_name
PLOT = 'plot'
TABLE = 'table'
def plot(models: List[Dict], methods: List[Dict], path: str, **kwargs) -> None:
for model in models:
for method in methods:
draw_epochs_plot(model, get_transfer_method_name(method), path)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('mode', choices=[PLOT, TABLE])
parser.add_argument('-t', '--teacher-models',
help="Path to configuration of teacher models")
parser.add_argument('-s', '--student-models', required=True,
help="Path to configuration of student models for transfer")
parser.add_argument('-i', '--ia-methods', required=True,
help="Path to iatransfer methods configuration")
parser.add_argument('-p', '--path',
help="Path to data")
args = parser.parse_args()
if args.path is not None:
path = args.path
else:
path = "./stats"
if args.teacher_models is not None:
teachers = read_json(args.teacher_models)["models"]
else:
teachers = None
models = read_json(args.student_models)["models"]
methods = read_json(args.ia_methods)["methods"]
{TABLE: create_table, PLOT: plot}[args.mode](models, methods, path, teacher_models = teachers)
| 36.928571 | 98 | 0.691812 |
ace5217d2713921d2b03c1a956a0f23ed0bdbccb | 14,784 | py | Python | Lib/binhex.py | pxeger/cpython | 959580bd9ff8824590e8b24895bc2276f3f10b35 | [
"0BSD"
] | 6,989 | 2017-07-18T06:23:18.000Z | 2022-03-31T15:58:36.000Z | Lib/binhex.py | pxeger/cpython | 959580bd9ff8824590e8b24895bc2276f3f10b35 | [
"0BSD"
] | 1,978 | 2017-07-18T09:17:58.000Z | 2022-03-31T14:28:43.000Z | Lib/binhex.py | pxeger/cpython | 959580bd9ff8824590e8b24895bc2276f3f10b35 | [
"0BSD"
] | 1,228 | 2017-07-18T09:03:13.000Z | 2022-03-29T05:57:40.000Z | """Macintosh binhex compression/decompression.
easy interface:
binhex(inputfilename, outputfilename)
hexbin(inputfilename, outputfilename)
"""
#
# Jack Jansen, CWI, August 1995.
#
# The module is supposed to be as compatible as possible. Especially the
# easy interface should work "as expected" on any platform.
# XXXX Note: currently, textfiles appear in mac-form on all platforms.
# We seem to lack a simple character-translate in python.
# (we should probably use ISO-Latin-1 on all but the mac platform).
# XXXX The simple routines are too simple: they expect to hold the complete
# files in-core. Should be fixed.
# XXXX It would be nice to handle AppleDouble format on unix
# (for servers serving macs).
# XXXX I don't understand what happens when you get 0x90 times the same byte on
# input. The resulting code (xx 90 90) would appear to be interpreted as an
# escaped *value* of 0x90. All coders I've seen appear to ignore this nicety...
#
import binascii
import contextlib
import io
import os
import struct
import warnings
warnings.warn('the binhex module is deprecated', DeprecationWarning,
stacklevel=2)
__all__ = ["binhex","hexbin","Error"]
class Error(Exception):
pass
# States (what have we written)
_DID_HEADER = 0
_DID_DATA = 1
# Various constants
REASONABLY_LARGE = 32768 # Minimal amount we pass the rle-coder
LINELEN = 64
RUNCHAR = b"\x90"
#
# This code is no longer byte-order dependent
class FInfo:
def __init__(self):
self.Type = '????'
self.Creator = '????'
self.Flags = 0
def getfileinfo(name):
finfo = FInfo()
with io.open(name, 'rb') as fp:
# Quick check for textfile
data = fp.read(512)
if 0 not in data:
finfo.Type = 'TEXT'
fp.seek(0, 2)
dsize = fp.tell()
dir, file = os.path.split(name)
file = file.replace(':', '-', 1)
return file, finfo, dsize, 0
class openrsrc:
def __init__(self, *args):
pass
def read(self, *args):
return b''
def write(self, *args):
pass
def close(self):
pass
# DeprecationWarning is already emitted on "import binhex". There is no need
# to repeat the warning at each call to deprecated binascii functions.
@contextlib.contextmanager
def _ignore_deprecation_warning():
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '', DeprecationWarning)
yield
class _Hqxcoderengine:
"""Write data to the coder in 3-byte chunks"""
def __init__(self, ofp):
self.ofp = ofp
self.data = b''
self.hqxdata = b''
self.linelen = LINELEN - 1
def write(self, data):
self.data = self.data + data
datalen = len(self.data)
todo = (datalen // 3) * 3
data = self.data[:todo]
self.data = self.data[todo:]
if not data:
return
with _ignore_deprecation_warning():
self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
self._flush(0)
def _flush(self, force):
first = 0
while first <= len(self.hqxdata) - self.linelen:
last = first + self.linelen
self.ofp.write(self.hqxdata[first:last] + b'\r')
self.linelen = LINELEN
first = last
self.hqxdata = self.hqxdata[first:]
if force:
self.ofp.write(self.hqxdata + b':\r')
def close(self):
if self.data:
with _ignore_deprecation_warning():
self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
self._flush(1)
self.ofp.close()
del self.ofp
class _Rlecoderengine:
"""Write data to the RLE-coder in suitably large chunks"""
def __init__(self, ofp):
self.ofp = ofp
self.data = b''
def write(self, data):
self.data = self.data + data
if len(self.data) < REASONABLY_LARGE:
return
with _ignore_deprecation_warning():
rledata = binascii.rlecode_hqx(self.data)
self.ofp.write(rledata)
self.data = b''
def close(self):
if self.data:
with _ignore_deprecation_warning():
rledata = binascii.rlecode_hqx(self.data)
self.ofp.write(rledata)
self.ofp.close()
del self.ofp
class BinHex:
def __init__(self, name_finfo_dlen_rlen, ofp):
name, finfo, dlen, rlen = name_finfo_dlen_rlen
close_on_error = False
if isinstance(ofp, str):
ofname = ofp
ofp = io.open(ofname, 'wb')
close_on_error = True
try:
ofp.write(b'(This file must be converted with BinHex 4.0)\r\r:')
hqxer = _Hqxcoderengine(ofp)
self.ofp = _Rlecoderengine(hqxer)
self.crc = 0
if finfo is None:
finfo = FInfo()
self.dlen = dlen
self.rlen = rlen
self._writeinfo(name, finfo)
self.state = _DID_HEADER
except:
if close_on_error:
ofp.close()
raise
def _writeinfo(self, name, finfo):
nl = len(name)
if nl > 63:
raise Error('Filename too long')
d = bytes([nl]) + name.encode("latin-1") + b'\0'
tp, cr = finfo.Type, finfo.Creator
if isinstance(tp, str):
tp = tp.encode("latin-1")
if isinstance(cr, str):
cr = cr.encode("latin-1")
d2 = tp + cr
# Force all structs to be packed with big-endian
d3 = struct.pack('>h', finfo.Flags)
d4 = struct.pack('>ii', self.dlen, self.rlen)
info = d + d2 + d3 + d4
self._write(info)
self._writecrc()
def _write(self, data):
self.crc = binascii.crc_hqx(data, self.crc)
self.ofp.write(data)
def _writecrc(self):
# XXXX Should this be here??
# self.crc = binascii.crc_hqx('\0\0', self.crc)
if self.crc < 0:
fmt = '>h'
else:
fmt = '>H'
self.ofp.write(struct.pack(fmt, self.crc))
self.crc = 0
def write(self, data):
if self.state != _DID_HEADER:
raise Error('Writing data at the wrong time')
self.dlen = self.dlen - len(data)
self._write(data)
def close_data(self):
if self.dlen != 0:
raise Error('Incorrect data size, diff=%r' % (self.rlen,))
self._writecrc()
self.state = _DID_DATA
def write_rsrc(self, data):
if self.state < _DID_DATA:
self.close_data()
if self.state != _DID_DATA:
raise Error('Writing resource data at the wrong time')
self.rlen = self.rlen - len(data)
self._write(data)
def close(self):
if self.state is None:
return
try:
if self.state < _DID_DATA:
self.close_data()
if self.state != _DID_DATA:
raise Error('Close at the wrong time')
if self.rlen != 0:
raise Error("Incorrect resource-datasize, diff=%r" % (self.rlen,))
self._writecrc()
finally:
self.state = None
ofp = self.ofp
del self.ofp
ofp.close()
def binhex(inp, out):
"""binhex(infilename, outfilename): create binhex-encoded copy of a file"""
finfo = getfileinfo(inp)
ofp = BinHex(finfo, out)
with io.open(inp, 'rb') as ifp:
# XXXX Do textfile translation on non-mac systems
while True:
d = ifp.read(128000)
if not d: break
ofp.write(d)
ofp.close_data()
ifp = openrsrc(inp, 'rb')
while True:
d = ifp.read(128000)
if not d: break
ofp.write_rsrc(d)
ofp.close()
ifp.close()
class _Hqxdecoderengine:
"""Read data via the decoder in 4-byte chunks"""
def __init__(self, ifp):
self.ifp = ifp
self.eof = 0
def read(self, totalwtd):
"""Read at least wtd bytes (or until EOF)"""
decdata = b''
wtd = totalwtd
#
# The loop here is convoluted, since we don't really now how
# much to decode: there may be newlines in the incoming data.
while wtd > 0:
if self.eof: return decdata
wtd = ((wtd + 2) // 3) * 4
data = self.ifp.read(wtd)
#
# Next problem: there may not be a complete number of
# bytes in what we pass to a2b. Solve by yet another
# loop.
#
while True:
try:
with _ignore_deprecation_warning():
decdatacur, self.eof = binascii.a2b_hqx(data)
break
except binascii.Incomplete:
pass
newdata = self.ifp.read(1)
if not newdata:
raise Error('Premature EOF on binhex file')
data = data + newdata
decdata = decdata + decdatacur
wtd = totalwtd - len(decdata)
if not decdata and not self.eof:
raise Error('Premature EOF on binhex file')
return decdata
def close(self):
self.ifp.close()
class _Rledecoderengine:
"""Read data via the RLE-coder"""
def __init__(self, ifp):
self.ifp = ifp
self.pre_buffer = b''
self.post_buffer = b''
self.eof = 0
def read(self, wtd):
if wtd > len(self.post_buffer):
self._fill(wtd - len(self.post_buffer))
rv = self.post_buffer[:wtd]
self.post_buffer = self.post_buffer[wtd:]
return rv
def _fill(self, wtd):
self.pre_buffer = self.pre_buffer + self.ifp.read(wtd + 4)
if self.ifp.eof:
with _ignore_deprecation_warning():
self.post_buffer = self.post_buffer + \
binascii.rledecode_hqx(self.pre_buffer)
self.pre_buffer = b''
return
#
# Obfuscated code ahead. We have to take care that we don't
# end up with an orphaned RUNCHAR later on. So, we keep a couple
# of bytes in the buffer, depending on what the end of
# the buffer looks like:
# '\220\0\220' - Keep 3 bytes: repeated \220 (escaped as \220\0)
# '?\220' - Keep 2 bytes: repeated something-else
# '\220\0' - Escaped \220: Keep 2 bytes.
# '?\220?' - Complete repeat sequence: decode all
# otherwise: keep 1 byte.
#
mark = len(self.pre_buffer)
if self.pre_buffer[-3:] == RUNCHAR + b'\0' + RUNCHAR:
mark = mark - 3
elif self.pre_buffer[-1:] == RUNCHAR:
mark = mark - 2
elif self.pre_buffer[-2:] == RUNCHAR + b'\0':
mark = mark - 2
elif self.pre_buffer[-2:-1] == RUNCHAR:
pass # Decode all
else:
mark = mark - 1
with _ignore_deprecation_warning():
self.post_buffer = self.post_buffer + \
binascii.rledecode_hqx(self.pre_buffer[:mark])
self.pre_buffer = self.pre_buffer[mark:]
def close(self):
self.ifp.close()
class HexBin:
def __init__(self, ifp):
if isinstance(ifp, str):
ifp = io.open(ifp, 'rb')
#
# Find initial colon.
#
while True:
ch = ifp.read(1)
if not ch:
raise Error("No binhex data found")
# Cater for \r\n terminated lines (which show up as \n\r, hence
# all lines start with \r)
if ch == b'\r':
continue
if ch == b':':
break
hqxifp = _Hqxdecoderengine(ifp)
self.ifp = _Rledecoderengine(hqxifp)
self.crc = 0
self._readheader()
def _read(self, len):
data = self.ifp.read(len)
self.crc = binascii.crc_hqx(data, self.crc)
return data
def _checkcrc(self):
filecrc = struct.unpack('>h', self.ifp.read(2))[0] & 0xffff
#self.crc = binascii.crc_hqx('\0\0', self.crc)
# XXXX Is this needed??
self.crc = self.crc & 0xffff
if filecrc != self.crc:
raise Error('CRC error, computed %x, read %x'
% (self.crc, filecrc))
self.crc = 0
def _readheader(self):
len = self._read(1)
fname = self._read(ord(len))
rest = self._read(1 + 4 + 4 + 2 + 4 + 4)
self._checkcrc()
type = rest[1:5]
creator = rest[5:9]
flags = struct.unpack('>h', rest[9:11])[0]
self.dlen = struct.unpack('>l', rest[11:15])[0]
self.rlen = struct.unpack('>l', rest[15:19])[0]
self.FName = fname
self.FInfo = FInfo()
self.FInfo.Creator = creator
self.FInfo.Type = type
self.FInfo.Flags = flags
self.state = _DID_HEADER
def read(self, *n):
if self.state != _DID_HEADER:
raise Error('Read data at wrong time')
if n:
n = n[0]
n = min(n, self.dlen)
else:
n = self.dlen
rv = b''
while len(rv) < n:
rv = rv + self._read(n-len(rv))
self.dlen = self.dlen - n
return rv
def close_data(self):
if self.state != _DID_HEADER:
raise Error('close_data at wrong time')
if self.dlen:
dummy = self._read(self.dlen)
self._checkcrc()
self.state = _DID_DATA
def read_rsrc(self, *n):
if self.state == _DID_HEADER:
self.close_data()
if self.state != _DID_DATA:
raise Error('Read resource data at wrong time')
if n:
n = n[0]
n = min(n, self.rlen)
else:
n = self.rlen
self.rlen = self.rlen - n
return self._read(n)
def close(self):
if self.state is None:
return
try:
if self.rlen:
dummy = self.read_rsrc(self.rlen)
self._checkcrc()
finally:
self.state = None
self.ifp.close()
def hexbin(inp, out):
"""hexbin(infilename, outfilename) - Decode binhexed file"""
ifp = HexBin(inp)
finfo = ifp.FInfo
if not out:
out = ifp.FName
with io.open(out, 'wb') as ofp:
# XXXX Do translation on non-mac systems
while True:
d = ifp.read(128000)
if not d: break
ofp.write(d)
ifp.close_data()
d = ifp.read_rsrc(128000)
if d:
ofp = openrsrc(out, 'wb')
ofp.write(d)
while True:
d = ifp.read_rsrc(128000)
if not d: break
ofp.write(d)
ofp.close()
ifp.close()
| 29.39165 | 82 | 0.547348 |
ace5224addda1c5333720f5f1b5939b828732955 | 618 | py | Python | client/verta/verta/_swagger/_public/modeldb/model/ModeldbGetDatasetByIdResponse.py | stefan-petrov-toptal/modeldb | a8a9b9da6ed964c91351230b2f0d2703c75794de | [
"Apache-2.0"
] | 835 | 2017-02-08T20:14:24.000Z | 2020-03-12T17:37:49.000Z | client/verta/verta/_swagger/_public/modeldb/model/ModeldbGetDatasetByIdResponse.py | stefan-petrov-toptal/modeldb | a8a9b9da6ed964c91351230b2f0d2703c75794de | [
"Apache-2.0"
] | 651 | 2019-04-18T12:55:07.000Z | 2022-03-31T23:45:09.000Z | client/verta/verta/_swagger/_public/modeldb/model/ModeldbGetDatasetByIdResponse.py | stefan-petrov-toptal/modeldb | a8a9b9da6ed964c91351230b2f0d2703c75794de | [
"Apache-2.0"
] | 170 | 2017-02-13T14:49:22.000Z | 2020-02-19T17:59:12.000Z | # THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class ModeldbGetDatasetByIdResponse(BaseType):
def __init__(self, dataset=None):
required = {
"dataset": False,
}
self.dataset = dataset
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
from .ModeldbDataset import ModeldbDataset
tmp = d.get('dataset', None)
if tmp is not None:
d['dataset'] = ModeldbDataset.from_json(tmp)
return ModeldbGetDatasetByIdResponse(**d)
| 24.72 | 62 | 0.68123 |
ace522acc744a7bbb5df3ce2eb0904569dc7e61a | 594 | py | Python | factura/forms.py | ScastrillonE/sistema_compra_facturacion | 1d6bf725920fc5c2c7edd472bdaac74e6b870cd7 | [
"MIT"
] | null | null | null | factura/forms.py | ScastrillonE/sistema_compra_facturacion | 1d6bf725920fc5c2c7edd472bdaac74e6b870cd7 | [
"MIT"
] | 8 | 2021-03-19T02:38:40.000Z | 2022-01-13T02:38:04.000Z | factura/forms.py | ScastrillonE/sistema_compra_facturacion | 1d6bf725920fc5c2c7edd472bdaac74e6b870cd7 | [
"MIT"
] | null | null | null | from django import forms
from .models import Cliente
class ClienteForm(forms.ModelForm):
class Meta:
model = Cliente
fields = ['nombre','apellidos','tipo',
'celular', 'estado']
exclude = ['user_modification','modificado','user', 'creado']
widget = {'descripcion': forms.TextInput()}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field in iter(self.fields):
self.fields[field].widget.attrs.update({
'class': 'form-control'
})
| 33 | 69 | 0.552189 |
ace52317e9a3959fbd20bd6fac8cad0cfc37f592 | 2,425 | py | Python | Train_1103.py | bian0505/Pad_Me | c05b899b85a99d982948741e9da10e0a72d054d8 | [
"MIT"
] | null | null | null | Train_1103.py | bian0505/Pad_Me | c05b899b85a99d982948741e9da10e0a72d054d8 | [
"MIT"
] | null | null | null | Train_1103.py | bian0505/Pad_Me | c05b899b85a99d982948741e9da10e0a72d054d8 | [
"MIT"
] | null | null | null | import torch,time
import torch.nn as nn
#import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data.dataloader as DataLoader
#from torchvision import datasets, transforms
#
import numpy as np
#import matplotlib.pyplot as plt
from YOLO_data_dataset import DealDataset
from modelclass import Region_Mask
torch.__version__
BATCH_SIZE=30 #256大概需要2G的显存
EPOCHS=3000 # 总共训练批次
DEVICE = torch.device("cuda")
dataset=DealDataset()
dataloader = DataLoader.DataLoader(dataset,batch_size= 1, shuffle = True, num_workers= 0)
def train(model, device, train_loader, optimizer, epoch):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.float().to(device)
optimizer.zero_grad()
output = model(data)
#lossFunc = nn.MSELoss(reduction='mean')
lossFunc = nn.MSELoss(reduction='sum')
loss = lossFunc(output, target)
loss.backward()
optimizer.step()
if(batch_idx+1)%10 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
return loss.item()
loss_array=np.zeros([EPOCHS,2])
try:
model_cpu = torch.load("model_Large561.bian")
model = model_cpu.to(DEVICE)
except:
model = Region_Mask().to(DEVICE)
print("model recreated")
for epoch in range(EPOCHS+1):
#optimizer = optim.Adam(model.parameters())
#optimizer = optim.Adam(model.parameters(),lr=0.001)
LearnRate=0.001+0.01*(10**(-epoch/300))
optimizer = optim.SGD(model.parameters(),lr=LearnRate,momentum=0.9)
loss=train(model, DEVICE, dataloader,optimizer, epoch)
loss_array[epoch,0]=epoch
loss_array[epoch,1]=loss
if(epoch+1)%50 == 0:
filedate="day"+str(time.localtime().tm_mday)+"hour"+str(time.localtime().tm_hour)+"min"+str(time.localtime().tm_min)
model_filename="./model_ep"+str(epoch)+"_"+filedate+".bian"
csv_filename="./loss_log.csv"
torch.save(model,model_filename)
np.savetxt(csv_filename, loss_array, delimiter = ',')
#test(model, DEVICE,NUMS)
np.savetxt("./loss_Last.csv", loss_array, delimiter = ',')
torch.save(model,"./model_Last.bian")
| 32.333333 | 125 | 0.646598 |
ace523258094acd9beb8839ccdcbdd5300ad0084 | 2,389 | py | Python | python/fedml/simulation/mpi_p2p_mp/split_nn/server.py | NCLPhD/FedML | ffa15262ee963b9c856f34f0b2202f4dfeb3a76b | [
"Apache-2.0"
] | null | null | null | python/fedml/simulation/mpi_p2p_mp/split_nn/server.py | NCLPhD/FedML | ffa15262ee963b9c856f34f0b2202f4dfeb3a76b | [
"Apache-2.0"
] | null | null | null | python/fedml/simulation/mpi_p2p_mp/split_nn/server.py | NCLPhD/FedML | ffa15262ee963b9c856f34f0b2202f4dfeb3a76b | [
"Apache-2.0"
] | null | null | null | import logging
import torch.nn as nn
import torch.optim as optim
class SplitNN_server:
def __init__(self, args):
self.comm = args["comm"]
self.model = args["model"]
self.MAX_RANK = args["max_rank"]
self.init_params()
def init_params(self):
self.epoch = 0
self.log_step = 50
self.active_node = 1
self.train_mode()
self.optimizer = optim.SGD(
self.model.parameters(), lr=0.1, momentum=0.9, weight_decay=5e-4
)
self.criterion = nn.CrossEntropyLoss()
def reset_local_params(self):
self.total = 0
self.correct = 0
self.val_loss = 0
self.step = 0
self.batch_idx = 0
def train_mode(self):
self.model.train()
self.phase = "train"
self.reset_local_params()
def eval_mode(self):
self.model.eval()
self.phase = "validation"
self.reset_local_params()
def forward_pass(self, acts, labels):
self.acts = acts
self.optimizer.zero_grad()
self.acts.retain_grad()
logits = self.model(acts)
_, predictions = logits.max(1)
self.loss = self.criterion(logits, labels)
self.total += labels.size(0)
self.correct += predictions.eq(labels).sum().item()
if self.step % self.log_step == 0 and self.phase == "train":
acc = self.correct / self.total
logging.info(
"phase={} acc={} loss={} epoch={} and step={}".format(
"train", acc, self.loss.item(), self.epoch, self.step
)
)
if self.phase == "validation":
self.val_loss += self.loss.item()
self.step += 1
def backward_pass(self):
self.loss.backward()
self.optimizer.step()
return self.acts.grad
def validation_over(self):
# not precise estimation of validation loss
self.val_loss /= self.step
acc = self.correct / self.total
logging.info(
"phase={} acc={} loss={} epoch={} and step={}".format(
self.phase, acc, self.val_loss, self.epoch, self.step
)
)
self.epoch += 1
self.active_node = (self.active_node % self.MAX_RANK) + 1
self.train_mode()
logging.info("current active client is {}".format(self.active_node))
| 29.8625 | 76 | 0.560067 |
ace524566290bd6d30b77a0d1250551c5879e814 | 3,078 | py | Python | src_taxonomy/extract_user_score_for_top_topics.py | sanja7s/SR_Twitter | 2eb499c9aa25ba6e9860cd77eac6832890d2c126 | [
"MIT"
] | null | null | null | src_taxonomy/extract_user_score_for_top_topics.py | sanja7s/SR_Twitter | 2eb499c9aa25ba6e9860cd77eac6832890d2c126 | [
"MIT"
] | null | null | null | src_taxonomy/extract_user_score_for_top_topics.py | sanja7s/SR_Twitter | 2eb499c9aa25ba6e9860cd77eac6832890d2c126 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
extract top taxons for each user:
movies
music
sex
humor
school
'''
import codecs
from collections import defaultdict, OrderedDict
import json
import glob, os
f_in = "tweets_taxonomy_clean.JSON"
f_in_user_ids = "user_IDs.dat"
IN_DIR = "../../../DATA/taxonomy_stats/"
OUT_DIR = "user_taxons/"
f_out_topics = "user_score_for_top_topics.tab"
##################################################
# read in a map for the twitter username --> id
##################################################
def read_user_IDs():
user_ids = defaultdict(str)
with codecs.open(f_in_user_ids,'r', encoding='utf8') as f:
for line in f:
line = line.split()
user_id = line[0]
user = line[1]
user_ids[user] = user_id
return user_ids
###############################################################################
"""
go through taxon file and extract users scores for top topics
movies
music
sex
humor
school
"""
###############################################################################
def extract_usr_topics_score():
os.chdir(IN_DIR)
# resulting dictionary in which the counts and tfidf relevance are collected
res = defaultdict(int)
# holds all the user ids
user_ids = read_user_IDs()
output_file = codecs.open(OUT_DIR+f_out_topics, 'w', encoding='utf8')
cnt = 0
with codecs.open(f_in,'r', encoding='utf8') as input_file:
for line7s in input_file:
try:
line = json.loads(line7s)
taxonomy_all = line["taxonomy"]
user_name = line["_id"]
user_id = user_ids[user_name]
taxonomy = taxonomy_all["taxonomy"]
docSentiment = taxonomy_all["docSentiment"]
# the user we analyze
user_name = line["_id"]
user_id = user_ids[user_name]
res[user_id] = defaultdict(int)
# procedure for extracting the taxons
for el in taxonomy:
try:
if el["confident"] == "no":
continue
except: KeyError
taxonomy_tree = el["label"]
taxonomy_tree = taxonomy_tree.split("/")
taxonomy_tree.pop(0)
levels = len(taxonomy_tree)
score = float(el["score"])
if 'music' in taxonomy_tree:
res[user_id]['music'] += score
elif 'movies' in taxonomy_tree:
res[user_id]['movies'] += score
elif 'sex' in taxonomy_tree:
res[user_id]['sex'] += score
elif 'humor' in taxonomy_tree:
res[user_id]['humor'] += score
elif 'school' in taxonomy_tree:
res[user_id]['school'] += score
output_file.write(str(user_id) + '\t' + str(res[user_id]['music']) + \
'\t' + str(res[user_id]['movies']) + \
'\t' + str(res[user_id]['sex']) + \
'\t' + str(res[user_id]['humor']) + \
'\t' + str(res[user_id]['school']) + '\n')
cnt += 1
except KeyError:
#print line7s
# we don't print since it is tested, there some 10% users for whom
# the taxonomy was not successfuly downloaded and they would be listed here
continue
print "Topics saved for %d users " % (cnt)
###############################################################################
extract_usr_topics_score() | 28.238532 | 79 | 0.584795 |
ace5266424731059eb2b20c053f5c8d78764fc8b | 3,402 | py | Python | backend_app/routers.py | deephealthproject/backend | 4bf6899c1308cbd42231ff9e29fe68b3ccb881e5 | [
"MIT"
] | 2 | 2021-09-21T19:04:26.000Z | 2021-12-31T05:21:16.000Z | backend_app/routers.py | deephealthproject/backend | 4bf6899c1308cbd42231ff9e29fe68b3ccb881e5 | [
"MIT"
] | null | null | null | backend_app/routers.py | deephealthproject/backend | 4bf6899c1308cbd42231ff9e29fe68b3ccb881e5 | [
"MIT"
] | 2 | 2020-03-20T14:05:48.000Z | 2020-06-16T16:15:47.000Z | # coding:utf-8
from collections import OrderedDict
from django.urls import NoReverseMatch
from rest_framework import response, reverse, routers
class HybridRouter(routers.DefaultRouter):
# From http://stackoverflow.com/a/23321478/1459749
# and https://bitbucket.org/hub9/django-hybrid-router
# and https://stackoverflow.com/a/46163870
def __init__(self, *args, **kwargs):
super(HybridRouter, self).__init__(*args, **kwargs)
self._api_view_urls = {}
self.trailing_slash = '/?'
def add_api_view(self, name, url):
self._api_view_urls[name] = url
def remove_api_view(self, name):
del self._api_view_urls[name]
@property
def api_view_urls(self):
ret = {}
ret.update(self._api_view_urls)
return ret
def get_urls(self):
urls = super(HybridRouter, self).get_urls()
for api_view_key in self._api_view_urls.keys():
urls.append(self._api_view_urls[api_view_key])
return urls
def get_api_root_view(self, api_urls=None):
# Copy the following block from Default Router
api_root_dict = OrderedDict()
list_name = self.routes[0].name
for prefix, viewset, basename in self.registry:
api_root_dict[prefix] = list_name.format(basename=basename)
api_view_urls = self._api_view_urls
class DeephealthBackend(routers.APIRootView):
"""
The structure of the backend can be viewed [here](https://drawsql.app/aimagelab/diagrams/api).
"""
_ignore_model_permissions = True
schema = None # exclude from schema
api_root_dict = None
def get(self, request, *args, **kwargs):
ret = OrderedDict()
namespace = request.resolver_match.namespace
for key, url_name in api_root_dict.items():
if namespace:
url_name = namespace + ':' + url_name
try:
ret[key] = reverse.reverse(
url_name,
args=args,
kwargs=kwargs,
request=request,
format=kwargs.get('format', None)
)
except NoReverseMatch:
continue
# In addition to what had been added, now add the APIView urls
for api_view_key in api_view_urls.keys():
regex = api_view_urls[api_view_key].pattern.regex
if regex.groups == 0:
ret[api_view_key] = reverse.reverse(
api_view_urls[api_view_key].name,
args=args,
kwargs=kwargs,
request=request,
format=kwargs.get('format', None)
)
else:
ret[api_view_key] = "WITH PARAMS: " + regex.pattern
return response.Response(ret)
return DeephealthBackend.as_view()
def register_router(self, another_router):
self.registry.extend(another_router.registry)
if hasattr(another_router, "_api_view_urls"):
self._api_view_urls.update(another_router._api_view_urls)
| 37.8 | 106 | 0.556143 |
ace52675ac9ffa78d5a3f530a94757d21d97c9ba | 13,348 | py | Python | cluster-support-bot.py | TheDiemer/cluster-support-bot | 73fa3345b645bd430e003f07b067cf2dd5f265bc | [
"Apache-2.0"
] | null | null | null | cluster-support-bot.py | TheDiemer/cluster-support-bot | 73fa3345b645bd430e003f07b067cf2dd5f265bc | [
"Apache-2.0"
] | null | null | null | cluster-support-bot.py | TheDiemer/cluster-support-bot | 73fa3345b645bd430e003f07b067cf2dd5f265bc | [
"Apache-2.0"
] | null | null | null | import argparse
import logging
import os
import re
import time
import prometheus_client
import slack
import hydra
import telemetry
mention_counter = prometheus_client.Counter('cluster_support_mentions',
'Number of times a cluster is mentioned where the cluster-support bot is listening', ['_id'])
comment_counter = prometheus_client.Counter('cluster_support_comments',
'Number of times a cluster has been commented via the cluster-support bot', ['_id'])
# Eventually we'll likely switch to some sort of wsgi app but for now any path
# requested will return our metrics. We'll configure /metrics to be scrapped
# so we can leave room for some sort of landing page in the future.
prometheus_client.start_http_server(8080)
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
bot_mention = '<@{}> '.format(os.environ['BOT_ID'])
uuid_re = re.compile('.*([a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}).*', re.I)
recent_events = set() # cache recent event timestamps
hydra_client = hydra.Client(username=os.environ['HYDRA_USER'], password=os.environ['HYDRA_PASSWORD'])
dashboard_bases = [base for base in os.environ['DASHBOARDS'].split(' ') if base]
class HelpRequest(ValueError):
"For jumping out of ErrorRaisingArgumentParser.print_help"
pass
class ErrorRaisingArgumentParser(argparse.ArgumentParser):
def exit(self, status=0, message=None):
raise ValueError({'status': status, 'message': message})
def error(self, message):
raise ValueError({'message': message})
def print_help(self, file=None):
raise HelpRequest({'parser': self})
@slack.RTMClient.run_on(event='message')
def handle_message(**payload):
try:
_handle_message(payload=payload)
except Exception as e:
logger.debug('uncaught Exception in handle_message: {}'.format(e))
def _handle_message(payload):
global recent_events
data = payload.get('data')
if not data:
return
if data.get('subtype') is not None:
return # https://api.slack.com/events/message#message_subtypes
text = data.get('text')
if not text:
return
handle_uuid_mention(text)
if not text.startswith(bot_mention):
return
logger.debug('handle_message: {}'.format(payload))
timestamp = float(data.get('ts', 0))
if timestamp in recent_events: # high-resolution timestamps should have few false-negatives
logger.info('ignoring duplicate message: {}'.format(message))
return
recent_events.add(timestamp) # add after check without a lock should be a small race window
cutoff = time.time() - 60*60 # keep events for an hour
recent_events = {timestamp for timestamp in recent_events if timestamp > cutoff}
user_arg_line, body = (text.strip()+'\n').split('\n', 1)
user_args = user_arg_line.split()[1:] # split and drop the '<@{bot-id}>' prefix
try:
args = parser.parse_args(user_args)
except HelpRequest as error:
handler = handle_help(payload=payload, subparser=error.args[0]['parser'])
except ValueError as error:
handler = handle_parse_args_error(payload=payload, error=error)
else:
handler = args.func
if not handler:
logger.info('no handler found for {!r}'.format(user_args))
return
response = handler(payload=payload, args=args, body=body)
if not response:
return
if response.get('ok'):
logger.debug(response)
else:
logger.error(response)
def handle_uuid_mention(text):
match = uuid_re.match(text)
if match:
uuid = match.groups()[0]
logger.debug('{} mention'.format(uuid))
mention_counter.labels(uuid).inc()
def handle_parse_args_error(payload, error):
web_client = payload['web_client']
channel = payload['data']['channel']
thread = payload['data'].get('thread_ts', payload['data']['ts'])
if len(error.args) == 1:
details = error.args[0]
else:
logger.error('unrecognized parse_args error: {}'.format(error))
return
message = details.get('message')
if not message:
logger.error('parse_args error had no message: {}'.format(error))
return
return web_client.chat_postMessage(channel=channel, thread_ts=thread, text=message)
def handle_help(payload, args=None, body=None, subparser=None):
web_client = payload['web_client']
channel = payload['data']['channel']
thread = payload['data'].get('thread_ts', payload['data']['ts'])
if not subparser:
subparser = parser
message = subparser.format_help()
return web_client.chat_postMessage(channel=channel, thread_ts=thread, text=message)
def _block_from_text(line):
return {
"type": "section",
"text": {
"type": "mrkdwn",
"text": line
}
}
def _summary_to_text(summary):
if not summary:
return "No summary"
lines = []
for line in summary:
lines.extend([
line['subject'],
line['body'],
])
return "\n".join(lines)
def handle_summary(payload, args=None, body=None):
web_client = payload['web_client']
channel = payload['data']['channel']
thread = payload['data'].get('thread_ts', payload['data']['ts'])
cluster = args.cluster
blocks = []
try:
info, _, _ = get_summary(cluster=cluster)
for line in info:
blocks.append(_block_from_text(line))
except ValueError as error:
return web_client.chat_postMessage(
channel=channel,
thread_ts=thread,
text='{} {}'.format(cluster, error))
return web_client.chat_postMessage(channel=channel, thread_ts=thread, blocks=blocks)
def handle_detail(payload, args=None, body=None):
web_client = payload['web_client']
channel = payload['data']['channel']
thread = payload['data'].get('thread_ts', payload['data']['ts'])
cluster = args.cluster
blocks = []
try:
info, summary, notes = get_summary(cluster=cluster)
for line in info:
blocks.append(_block_from_text(line))
for line in summary:
blocks.append(_block_from_text(line))
if notes:
notes_text = _summary_to_text(notes)
blocks.append(_block_from_text(notes_text))
except ValueError as error:
return web_client.chat_postMessage(
channel=channel,
thread_ts=thread,
text='{} {}'.format(cluster, error))
return web_client.chat_postMessage(channel=channel, thread_ts=thread, blocks=blocks)
def get_notes(cluster, ebs_account):
notes = hydra_client.get_account_notes(account=ebs_account)
summary = None
subject_prefix = 'Summary (cluster {}): '.format(cluster)
related_notes = []
for note in notes:
if note.get('isRetired'):
continue
if not note['subject'].startswith(subject_prefix):
if cluster in note['subject']:
related_notes.append(note)
continue
summary = note
break
return summary, related_notes
def get_entitlements_summary(ebs_account):
entitlements = hydra_client.get_entitlements(account=ebs_account)
if not entitlements:
return 'None. Customer Experience and Engagement (CEE) will not be able to open support cases.'
openshift_entitlements = ', '.join(sorted(set(
entitlement['supportLevel']
for entitlement in entitlements
if 'OpenShift' in entitlement['name']
))) or 'None'
other_entitlements = ', '.join(sorted(set(
entitlement['supportLevel']
for entitlement in entitlements
if 'OpenShift' not in entitlement['name']
))) or 'None'
return 'OpenShift: {}. Other: {}'.format(openshift_entitlements, other_entitlements)
def get_summary(cluster):
subscription = telemetry.subscription(cluster=cluster)
ebs_account = telemetry.ebs_account(subscription=subscription)
summary, related_notes = get_notes(cluster=cluster, ebs_account=ebs_account)
lines = ['Cluster {}'.format(cluster)]
lines.extend([
'Created by Red Hat Customer Portal Account ID {}'.format(ebs_account),
'Managed: {}'.format(subscription.get('managed', 'Unknown')),
'Support: {}'.format(subscription.get('support', 'None')),
])
if not subscription.get('support'):
lines.append('Entitlements: {}'.format(get_entitlements_summary(ebs_account=ebs_account)))
lines.extend('Dashboard: {}{}'.format(dashboard_base, cluster) for dashboard_base in dashboard_bases)
cases = [
case
for case in hydra_client.get_open_cases(account=ebs_account)
if cluster in str(hydra_client.get_case_comments(case=case['caseNumber']))
]
lines.extend('Case {caseNumber} ({createdDate}, {caseOwner[name]}): {subject}'.format(**case) for case in cases)
existing_summary = []
if summary:
existing_summary.extend([
summary['subject'],
summary['body'],
])
return lines, existing_summary, related_notes
def handle_set_summary(payload, args=None, body=None):
web_client = payload['web_client']
channel = payload['data']['channel']
thread = payload['data'].get('thread_ts', payload['data']['ts'])
cluster = args.cluster
try:
subject, body = body.split('\n', 1)
except ValueError: # subject with no body
subject, body = body, ''
body = (body.strip() + '\n\nThis summary was created by the cluster-support bot. Workflow docs in https://github.com/openshift/cluster-support-bot/').strip()
subject_prefix = 'Summary (cluster {}): '.format(cluster)
try:
ebs_account = telemetry.ebs_account(subscription=telemetry.subscription(cluster=cluster))
summary, _ = get_notes(cluster=cluster, ebs_account=ebs_account)
hydra_client.post_account_note(
account=ebs_account,
subject='{}{}'.format(subject_prefix, subject),
body=body,
)
if summary:
hydra_client.delete_account_note(account=ebs_account, noteID=summary['id'])
comment_counter.labels(cluster).inc()
except ValueError as error:
return web_client.chat_postMessage(
channel=channel,
thread_ts=thread,
text='{} {}'.format(cluster, error))
return web_client.chat_postMessage(channel=channel, thread_ts=thread, text='set {} summary to:\n{}\n{}'.format(cluster, subject, body))
def handle_comment(payload, args=None, body=None):
web_client = payload['web_client']
channel = payload['data']['channel']
thread = payload['data'].get('thread_ts', payload['data']['ts'])
cluster = args.cluster
try:
subject, body = body.split('\n', 1)
except ValueError: # subject with no body
subject, body = body, ''
try:
ebs_account = telemetry.ebs_account(subscription=telemetry.subscription(cluster=cluster))
hydra_client.post_account_note(
account=ebs_account,
subject='cluster {}: {}'.format(cluster, subject),
body=body,
)
comment_counter.labels(cluster).inc()
except ValueError as error:
return web_client.chat_postMessage(
channel=channel,
thread_ts=thread,
text='{} {}'.format(cluster, error))
return web_client.chat_postMessage(channel=channel, thread_ts=thread, text='added comment on {}:\n{}\n{}'.format(cluster, subject, body))
parser = ErrorRaisingArgumentParser(
prog='Cluster support bot',
description='I help you collaborate on per-cluster support issues ( https://github.com/openshift/cluster-support-bot/ ).',
formatter_class=argparse.RawDescriptionHelpFormatter,
)
subparsers = parser.add_subparsers()
help_parser = subparsers.add_parser('help', help='Show this help.')
help_parser.set_defaults(func=handle_help)
summary_parser = subparsers.add_parser('summary', help='Summarize a cluster by ID.')
summary_parser.add_argument('cluster', metavar='ID', help='The cluster ID.')
summary_parser.set_defaults(func=handle_summary)
set_summary_parser = subparsers.add_parser('set-summary', help='Set (or edit) the cluster summary. The line following the set-summary command will be used in the summary subject, and subsequent lines will be used in the summary body.')
set_summary_parser.add_argument('cluster', metavar='ID', help='The cluster ID.')
set_summary_parser.set_defaults(func=handle_set_summary)
detail_parser = subparsers.add_parser('detail', help='Upload a file to Slack with the cluster summary and all comments.')
detail_parser.add_argument('cluster', metavar='ID', help='The cluster ID.')
detail_parser.set_defaults(func=handle_detail)
comment_parser = subparsers.add_parser('comment', help='Add a comment on a cluster by ID. The line following the comment command will be used in the summary subject, and subsequent lines will be used in the summary body.')
comment_parser.add_argument('cluster', metavar='ID', help='The cluster ID.')
comment_parser.set_defaults(func=handle_comment)
# start the RTM socket
rtm_client = slack.RTMClient(token=os.environ['SLACK_BOT_TOKEN'])
logger.info("bot starting...")
rtm_client.start()
| 38.137143 | 236 | 0.674258 |
ace52718523e0531a0e2283d1bb6122108312310 | 1,212 | py | Python | msc/jmvspn.py | rtagirov/python_scr_pc_imperial | 423204964ddbc9c117bd2b3bb4397ee98b89a56d | [
"MIT"
] | null | null | null | msc/jmvspn.py | rtagirov/python_scr_pc_imperial | 423204964ddbc9c117bd2b3bb4397ee98b89a56d | [
"MIT"
] | null | null | null | msc/jmvspn.py | rtagirov/python_scr_pc_imperial | 423204964ddbc9c117bd2b3bb4397ee98b89a56d | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import os
import importlib
import paths; importlib.reload(paths)
import nessy; importlib.reload(nessy)
import pltaux; importlib.reload(pltaux)
import sysaux; importlib.reload(sysaux)
plt.close('all')
sysaux.clean_dir(paths.figdir + 'jmvspn', mode = 'noverbose')
dir1 = paths.it0h + 'popcomp/lte/'
dir2 = paths.it0h + 'popcomp/lte_jobmax/'
lev, rne1, popnum1 = nessy.read_popnum(dir1)
fig, ax1 = plt.subplots(nrows = 1, ncols = 1, figsize = (12, 10))
fig.tight_layout
ax1.set_ylim(-0.0001, 0.0001)
j = 0
for l in lev:
print('Plotting ', l)
popnum2 = np.loadtxt(dir2 + paths.lev + l, skiprows = 2, usecols = 3)
# fig, ax1 = plt.subplots(nrows = 1, ncols = 1, figsize = (12, 10))
# fig.tight_layout
# fig.suptitle(l, y = 1.001)
ax1.plot((popnum2 - popnum1[j, :]) * 100 / popnum2)
# ax1.set_ylim(-0.0001, 0.0001)
# pltaux.savepdf('jmvspn/' + l)
j = j + 1
rne2 = np.loadtxt(dir2 + paths.lev + 'ELECTR', skiprows = 2, usecols = 3)
ax1.plot((rne2 - rne1) * 100 / rne2, color = 'k')
pltaux.savepdf('jmvspn')
#os.chdir(paths.figdir + 'jmvspn/')
#os.system('pdftk * output joined.pdf')
#os.chdir(paths.pydir)
| 19.238095 | 73 | 0.655941 |
ace5283ef0464fda13ad383ce64b9a39657b49f2 | 18,081 | py | Python | tests.py | miquelcampos/Qt.py | bc56d3cafbd6d1caf97492eeeeaefadd5ea9cd76 | [
"MIT"
] | 2 | 2019-09-24T17:09:02.000Z | 2020-09-18T04:58:00.000Z | tests.py | miquelcampos/Qt.py | bc56d3cafbd6d1caf97492eeeeaefadd5ea9cd76 | [
"MIT"
] | null | null | null | tests.py | miquelcampos/Qt.py | bc56d3cafbd6d1caf97492eeeeaefadd5ea9cd76 | [
"MIT"
] | 1 | 2019-09-24T17:09:46.000Z | 2019-09-24T17:09:46.000Z | """Tests that run once"""
import io
import os
import sys
import imp
import shutil
import tempfile
import subprocess
import contextlib
# Third-party dependency
import six
from nose.tools import (
assert_raises,
)
PYTHON = sys.version_info[0] # e.g. 2 or 3
try:
long
except NameError:
# Python 3 compatibility
long = int
@contextlib.contextmanager
def captured_output():
new_out, new_err = six.StringIO(), six.StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
self = sys.modules[__name__]
def setup():
"""Module-wide initialisation
This function runs once, followed by teardown() below once
all tests have completed.
"""
self.tempdir = tempfile.mkdtemp()
self.ui_qwidget = os.path.join(self.tempdir, "qwidget.ui")
with io.open(self.ui_qwidget, "w", encoding="utf-8") as f:
f.write(u"""\
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>Form</class>
<widget class="QWidget" name="Form">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>507</width>
<height>394</height>
</rect>
</property>
<property name="windowTitle">
<string>Form</string>
</property>
<layout class="QGridLayout" name="gridLayout">
<item row="0" column="0">
<widget class="QLineEdit" name="lineEdit"/>
</item>
<item row="1" column="0">
<widget class="QLabel" name="label">
<property name="text">
<string>TextLabel</string>
</property>
</widget>
</item>
<item row="2" column="0">
<widget class="QLineEdit" name="lineEdit_2"/>
</item>
</layout>
</widget>
<resources/>
<connections>
<connection>
<sender>lineEdit</sender>
<signal>textChanged(QString)</signal>
<receiver>label</receiver>
<slot>setText(QString)</slot>
<hints>
<hint type="sourcelabel">
<x>228</x>
<y>23</y>
</hint>
<hint type="destinationlabel">
<x>37</x>
<y>197</y>
</hint>
</hints>
</connection>
</connections>
</ui>
""")
def teardown():
shutil.rmtree(self.tempdir)
def binding(binding):
"""Isolate test to a particular binding
When used, tests inside the if-statement are run independently
with the given binding.
Without this function, a test is run once for each binding.
"""
return os.getenv("QT_PREFERRED_BINDING") == binding
def test_environment():
"""Tests require all bindings to be installed (except PySide on py3.5+)"""
if sys.version_info <= (3, 4):
# PySide is not available for Python > 3.4
imp.find_module("PySide")
imp.find_module("PySide2")
imp.find_module("PyQt4")
imp.find_module("PyQt5")
def test_load_ui_returntype():
"""load_ui returns an instance of QObject"""
import sys
from Qt import QtWidgets, QtCore, QtCompat
app = QtWidgets.QApplication(sys.argv)
obj = QtCompat.loadUi(self.ui_qwidget)
assert isinstance(obj, QtCore.QObject)
app.exit()
def test_load_ui_baseinstance():
"""Tests to see if the baseinstance loading loads widgets on properly"""
import sys
from Qt import QtWidgets, QtCompat
app = QtWidgets.QApplication(sys.argv)
win = QtWidgets.QWidget()
QtCompat.loadUi(self.ui_qwidget, win)
assert hasattr(win, 'lineEdit'), "loadUi could not load instance to win"
app.exit()
def test_load_ui_signals():
"""Tests to see if the baseinstance loading loads widgets on properly"""
import sys
from Qt import QtWidgets, QtCompat
app = QtWidgets.QApplication(sys.argv)
win = QtWidgets.QWidget()
QtCompat.loadUi(self.ui_qwidget, win)
win.lineEdit.setText('Hello')
assert str(win.label.text()) == 'Hello', "lineEdit signal did not fire"
app.exit()
def test_load_ui_invalidpath():
"""Tests to see if loadUi successfully fails on invalid paths"""
import sys
from Qt import QtWidgets, QtCompat
app = QtWidgets.QApplication(sys.argv)
assert_raises(IOError, QtCompat.loadUi, 'made/up/path')
app.exit()
def test_load_ui_invalidxml():
"""Tests to see if loadUi successfully fails on invalid ui files"""
import sys
invalid_xml = os.path.join(self.tempdir, "invalid.ui")
with io.open(invalid_xml, "w", encoding="utf-8") as f:
f.write(u"""
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0" garbage
</ui>
""")
from xml.etree import ElementTree
from Qt import QtWidgets, QtCompat
app = QtWidgets.QApplication(sys.argv)
assert_raises(ElementTree.ParseError, QtCompat.loadUi, invalid_xml)
app.exit()
def test_load_ui_overwrite_fails():
"""PyQt4/5 loadUi functiion will fail if the widget has a preexisting
layout. This tests that our custom implementation for PySide does the same
"""
import sys
from Qt import QtWidgets, QtCompat
app = QtWidgets.QApplication(sys.argv)
win = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(win)
win.lineEdit = QtWidgets.QPushButton('Test')
layout.addWidget(win.lineEdit)
assert_raises(RuntimeError, QtCompat.loadUi, self.ui_qwidget, win)
app.exit()
def test_preferred_none():
"""Preferring None shouldn't import anything"""
os.environ["QT_PREFERRED_BINDING"] = "None"
import Qt
assert Qt.__name__ == "Qt", Qt
def test_vendoring():
"""Qt.py may be bundled along with another library/project
Create toy project
from project.vendor import Qt # Absolute
from .vendor import Qt # Relative
project/
vendor/
__init__.py
__init__.py
"""
project = os.path.join(self.tempdir, "myproject")
vendor = os.path.join(project, "vendor")
os.makedirs(vendor)
# Make packages out of folders
with open(os.path.join(project, "__init__.py"), "w") as f:
f.write("from .vendor.Qt import QtWidgets")
with open(os.path.join(vendor, "__init__.py"), "w") as f:
f.write("\n")
# Copy real Qt.py into myproject
shutil.copy(os.path.join(os.path.dirname(__file__), "Qt.py"),
os.path.join(vendor, "Qt.py"))
print("Testing relative import..")
assert subprocess.call(
[sys.executable, "-c", "import myproject"],
cwd=self.tempdir,
stdout=subprocess.PIPE, # With nose process isolation, buffer can
stderr=subprocess.STDOUT, # easily get full and throw an error.
) == 0
print("Testing absolute import..")
assert subprocess.call(
[sys.executable, "-c", "from myproject.vendor.Qt import QtWidgets"],
cwd=self.tempdir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) == 0
print("Testing direct import..")
assert subprocess.call(
[sys.executable, "-c", "import myproject.vendor.Qt"],
cwd=self.tempdir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
) == 0
def test_convert_simple():
"""python -m Qt --convert works in general"""
before = """\
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_uic(object):
def setupUi(self, uic):
self.retranslateUi(uic)
def retranslateUi(self, uic):
self.pushButton_2.setText(
QtWidgets.QApplication.translate("uic", "NOT Ok", None, -1))
""".split("\n")
after = """\
from Qt import QtCompat, QtCore, QtGui, QtWidgets
class Ui_uic(object):
def setupUi(self, uic):
self.retranslateUi(uic)
def retranslateUi(self, uic):
self.pushButton_2.setText(
QtCompat.translate("uic", "NOT Ok", None, -1))
""".split("\n")
from Qt import QtCompat
assert QtCompat._convert(before) == after, after
def test_convert_idempotency():
"""Converting a converted file produces an identical file"""
before = """\
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_uic(object):
def setupUi(self, uic):
self.retranslateUi(uic)
def retranslateUi(self, uic):
self.pushButton_2.setText(
QtWidgets.QApplication.translate("uic", "NOT Ok", None, -1))
"""
after = """\
from Qt import QtCompat, QtCore, QtGui, QtWidgets
class Ui_uic(object):
def setupUi(self, uic):
self.retranslateUi(uic)
def retranslateUi(self, uic):
self.pushButton_2.setText(
QtCompat.translate("uic", "NOT Ok", None, -1))
"""
fname = os.path.join(self.tempdir, "idempotency.py")
with open(fname, "w") as f:
f.write(before)
from Qt import QtCompat
os.chdir(self.tempdir)
QtCompat._cli(args=["--convert", "idempotency.py"])
with open(fname) as f:
assert f.read() == after
QtCompat._cli(args=["--convert", "idempotency.py"])
with open(fname) as f:
assert f.read() == after
def test_convert_backup():
"""Converting produces a backup"""
fname = os.path.join(self.tempdir, "idempotency.py")
with open(fname, "w") as f:
f.write("")
from Qt import QtCompat
os.chdir(self.tempdir)
QtCompat._cli(args=["--convert", "idempotency.py"])
assert os.path.exists(
os.path.join(self.tempdir, "%s_backup%s" % os.path.splitext(fname))
)
def test_import_from_qtwidgets():
"""Fix #133, `from Qt.QtWidgets import XXX` works"""
from Qt.QtWidgets import QPushButton
assert QPushButton.__name__ == "QPushButton", QPushButton
def test_i158_qtcore_direct_import():
"""import Qt.QtCore works on all bindings
This addresses issue #158
"""
import Qt.QtCore
assert hasattr(Qt.QtCore, "Signal")
def test_translate_arguments():
"""Arguments of QtCompat.translate are correct
QtCompat.translate is a shim over the PySide, PyQt4 and PyQt5
equivalent with an interface like the one found in PySide2.
Reference: https://doc.qt.io/qt-5/qcoreapplication.html#translate
"""
import Qt
# This will run on each binding
result = Qt.QtCompat.translate("CustomDialog", # context
"Status", # sourceText
None, # disambiguation
-1) # n
assert result == u'Status', result
def test_binding_and_qt_version():
"""Qt's __binding_version__ and __qt_version__ populated"""
import Qt
assert Qt.__binding_version__ != "0.0.0", ("Binding version was not "
"populated")
assert Qt.__qt_version__ != "0.0.0", ("Qt version was not populated")
def test_binding_states():
"""Tests to see if the Qt binding enum states are set properly"""
import Qt
assert Qt.IsPySide == binding("PySide")
assert Qt.IsPySide2 == binding("PySide2")
assert Qt.IsPyQt5 == binding("PyQt5")
assert Qt.IsPyQt4 == binding("PyQt4")
def test_qtcompat_base_class():
"""Tests to ensure the QtCompat namespace object works as expected"""
import sys
import Qt
from Qt import QtWidgets
from Qt import QtCompat
app = QtWidgets.QApplication(sys.argv)
# suppress `local variable 'app' is assigned to but never used`
app
header = QtWidgets.QHeaderView(Qt.QtCore.Qt.Horizontal)
# Spot check compatibility functions
QtCompat.QHeaderView.setSectionsMovable(header, False)
assert QtCompat.QHeaderView.sectionsMovable(header) is False
QtCompat.QHeaderView.setSectionsMovable(header, True)
assert QtCompat.QHeaderView.sectionsMovable(header) is True
def test_cli():
"""Qt.py is available from the command-line"""
env = os.environ.copy()
env.pop("QT_VERBOSE") # Do not include debug messages
popen = subprocess.Popen(
[sys.executable, "Qt.py", "--help"],
stdout=subprocess.PIPE,
env=env
)
out, err = popen.communicate()
assert out.startswith(b"usage: Qt.py"), "\n%s" % out
def test_membership():
"""All members of Qt.py exist in all bindings"""
import Qt
missing = list()
for module, members in Qt._common_members.items():
missing.extend(
member for member in members
if not hasattr(getattr(Qt, module), member)
)
binding = Qt.__binding__
assert not missing, (
"Some members did not exist in {binding}\n{missing}".format(
**locals())
)
if sys.version_info <= (3, 4):
# PySide is not available for Python > 3.4
# Shiboken(1) doesn't support Python 3.5
# https://github.com/PySide/shiboken-setup/issues/3
def test_wrapInstance():
""".wrapInstance and .getCppPointer is identical across all bindings"""
from Qt import QtCompat, QtWidgets
app = QtWidgets.QApplication(sys.argv)
try:
button = QtWidgets.QPushButton("Hello world")
button.setObjectName("MySpecialButton")
pointer = QtCompat.getCppPointer(button)
widget = QtCompat.wrapInstance(long(pointer),
QtWidgets.QWidget)
assert isinstance(widget, QtWidgets.QWidget), widget
assert widget.objectName() == button.objectName()
# IMPORTANT: this differs across sip and shiboken.
if binding("PySide") or binding("PySide2"):
assert widget != button
else:
assert widget == button
finally:
app.exit()
def test_implicit_wrapInstance():
""".wrapInstance doesn't need the `base` argument"""
from Qt import QtCompat, QtWidgets
app = QtWidgets.QApplication(sys.argv)
try:
button = QtWidgets.QPushButton("Hello world")
button.setObjectName("MySpecialButton")
pointer = QtCompat.getCppPointer(button)
widget = QtCompat.wrapInstance(long(pointer))
assert isinstance(widget, QtWidgets.QWidget), widget
assert widget.objectName() == button.objectName()
if binding("PySide") or binding("PySide2"):
assert widget != button
else:
assert widget == button
finally:
app.exit()
if binding("PyQt4"):
def test_preferred_pyqt4():
"""QT_PREFERRED_BINDING = PyQt4 properly forces the binding"""
import Qt
assert Qt.__binding__ == "PyQt4", (
"PyQt4 should have been picked, "
"instead got %s" % Qt.__binding__)
def test_sip_api_qtpy():
"""Preferred binding PyQt4 should have sip version 2"""
__import__("Qt") # Bypass linter warning
import sip
assert sip.getapi("QString") == 2, (
"PyQt4 API version should be 2, "
"instead is %s" % sip.getapi("QString"))
if PYTHON == 2:
def test_sip_api_already_set():
"""Raise ImportError with sip was set to 1 with no hint, default"""
__import__("PyQt4.QtCore") # Bypass linter warning
import sip
sip.setapi("QString", 1)
assert_raises(ImportError, __import__, "Qt")
# A sip API hint of any kind bypasses ImportError
# on account of it being merely a hint.
def test_sip_api_1_1():
"""sip=1, hint=1 == OK"""
import sip
sip.setapi("QString", 1)
os.environ["QT_SIP_API_HINT"] = "1"
__import__("Qt") # Bypass linter warning
def test_sip_api_2_1():
"""sip=2, hint=1 == WARNING"""
import sip
sip.setapi("QString", 2)
os.environ["QT_SIP_API_HINT"] = "1"
with captured_output() as out:
__import__("Qt") # Bypass linter warning
stdout, stderr = out
assert stderr.getvalue().startswith("Warning:")
def test_sip_api_1_2():
"""sip=1, hint=2 == WARNING"""
import sip
sip.setapi("QString", 1)
os.environ["QT_SIP_API_HINT"] = "2"
with captured_output() as out:
__import__("Qt") # Bypass linter warning
stdout, stderr = out
assert stderr.getvalue().startswith("Warning:")
def test_sip_api_2_2():
"""sip=2, hint=2 == OK"""
import sip
sip.setapi("QString", 2)
os.environ["QT_SIP_API_HINT"] = "2"
__import__("Qt") # Bypass linter warning
if binding("PyQt5"):
def test_preferred_pyqt5():
"""QT_PREFERRED_BINDING = PyQt5 properly forces the binding"""
import Qt
assert Qt.__binding__ == "PyQt5", (
"PyQt5 should have been picked, "
"instead got %s" % Qt.__binding__)
if binding("PySide"):
def test_preferred_pyside():
"""QT_PREFERRED_BINDING = PySide properly forces the binding"""
import Qt
assert Qt.__binding__ == "PySide", (
"PySide should have been picked, "
"instead got %s" % Qt.__binding__)
if binding("PySide2"):
def test_preferred_pyside2():
"""QT_PREFERRED_BINDING = PySide2 properly forces the binding"""
import Qt
assert Qt.__binding__ == "PySide2", (
"PySide2 should have been picked, "
"instead got %s" % Qt.__binding__)
def test_coexistence():
"""Qt.py may be use alongside the actual binding"""
from Qt import QtCore
import PySide2.QtGui
# Qt remaps QStringListModel
assert QtCore.QStringListModel
# But does not delete the original
assert PySide2.QtGui.QStringListModel
if binding("PyQt4") or binding("PyQt5"):
def test_multiple_preferred():
"""QT_PREFERRED_BINDING = more than one binding excludes others"""
# PySide is the more desirable binding
os.environ["QT_PREFERRED_BINDING"] = os.pathsep.join(
["PyQt4", "PyQt5"])
import Qt
assert Qt.__binding__ == "PyQt4", (
"PyQt4 should have been picked, "
"instead got %s" % Qt.__binding__)
| 28.119751 | 79 | 0.622587 |
ace5287312df479313c8e4bbb7d79d65fa06f3ab | 544 | py | Python | zerver/migrations/0083_index_mentioned_user_messages.py | DD2480-group7-2020/zulip | 9a1e18bcf383c38c35da168563a7345768c6d784 | [
"Apache-2.0"
] | 1 | 2020-03-17T14:58:50.000Z | 2020-03-17T14:58:50.000Z | zerver/migrations/0083_index_mentioned_user_messages.py | DD2480-group7-2020/zulip | 9a1e18bcf383c38c35da168563a7345768c6d784 | [
"Apache-2.0"
] | 2 | 2020-09-07T22:32:24.000Z | 2021-05-08T18:17:53.000Z | zerver/migrations/0083_index_mentioned_user_messages.py | DD2480-group7-2020/zulip | 9a1e18bcf383c38c35da168563a7345768c6d784 | [
"Apache-2.0"
] | 1 | 2020-07-16T06:00:10.000Z | 2020-07-16T06:00:10.000Z | # -*- coding: utf-8 -*-
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0082_index_starred_user_messages'),
]
operations = [
migrations.RunSQL(
'''
CREATE INDEX IF NOT EXISTS zerver_usermessage_mentioned_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 8) != 0;
''',
reverse_sql='DROP INDEX zerver_usermessage_mentioned_message_id;'
),
]
| 25.904762 | 78 | 0.595588 |
ace529206b4e666a4892ab4ad1aeb45094db34dd | 7,704 | py | Python | tests/test_dlist.py | ujdhesa/youtube-dl-gui | ccf8d715517c5d19e4e45add4c0f5eae6f55bb29 | [
"Unlicense"
] | null | null | null | tests/test_dlist.py | ujdhesa/youtube-dl-gui | ccf8d715517c5d19e4e45add4c0f5eae6f55bb29 | [
"Unlicense"
] | null | null | null | tests/test_dlist.py | ujdhesa/youtube-dl-gui | ccf8d715517c5d19e4e45add4c0f5eae6f55bb29 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""Contains test cases for the DownloadList object."""
import sys
import os.path
import unittest
from unittest import mock
PATH = os.path.realpath(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(os.path.dirname(PATH)))
try:
from youtube_dl_gui.downloadmanager import DownloadList, synchronized
except ImportError as error:
print(error)
sys.exit(1)
class TestInit(unittest.TestCase):
"""Test case for the DownloadList init."""
def test_init(self):
mocks = [mock.Mock(object_id=0), mock.Mock(object_id=1)]
dlist = DownloadList(mocks)
self.assertEqual(dlist._items_list, [0, 1])
self.assertEqual(dlist._items_dict, {0: mocks[0], 1: mocks[1]})
def test_init_empty(self):
dlist = DownloadList()
self.assertEqual(dlist._items_list, [])
self.assertEqual(dlist._items_dict, {})
def test_init_invalid_args(self):
self.assertRaises(AssertionError, DownloadList, {})
self.assertRaises(AssertionError, DownloadList, ())
self.assertRaises(AssertionError, DownloadList, False)
class TestInsert(unittest.TestCase):
"""Test case for the DownloadList insert method."""
def test_insert(self):
mock_ditem = mock.Mock(object_id=0)
dlist = DownloadList()
dlist.insert(mock_ditem)
self.assertEqual(dlist._items_list, [0])
self.assertEqual(dlist._items_dict, {0: mock_ditem})
class TestRemove(unittest.TestCase):
"""Test case for the DownloadList remove method."""
def setUp(self):
self.mocks = [mock.Mock(object_id=0), mock.Mock(object_id=1), mock.Mock(object_id=2)]
self.dlist = DownloadList(self.mocks)
def test_remove(self):
self.assertTrue(self.dlist.remove(1))
self.assertEqual(self.dlist._items_list, [0, 2])
self.assertEqual(self.dlist._items_dict, {0: self.mocks[0], 2: self.mocks[2]})
def test_remove_not_exist(self):
self.assertRaises(KeyError, self.dlist.remove, 3)
def test_remove_active(self):
self.mocks[1].stage = "Active"
self.assertFalse(self.dlist.remove(1))
self.assertEqual(self.dlist._items_list, [0, 1, 2])
self.assertEqual(self.dlist._items_dict, {0: self.mocks[0], 1: self.mocks[1], 2: self.mocks[2]})
class TestFetchNext(unittest.TestCase):
"""Test case for the DownloadList fetch_next method."""
def test_fetch_next(self):
items_count = 3
mocks = [mock.Mock(object_id=i, stage="Queued") for i in range(items_count)]
dlist = DownloadList(mocks)
for i in range(items_count):
self.assertEqual(dlist.fetch_next(), mocks[i])
mocks[i].stage = "Active"
self.assertIsNone(dlist.fetch_next())
for i in range(items_count):
mocks[i].stage = "Completed"
self.assertIsNone(dlist.fetch_next())
mocks[1].stage = "Queued" # Re-queue item
self.assertEqual(dlist.fetch_next(), mocks[1])
def test_fetch_next_empty_list(self):
dlist = DownloadList()
self.assertIsNone(dlist.fetch_next())
class TestMoveUp(unittest.TestCase):
"""Test case for the DownloadList move_up method."""
def setUp(self):
mocks = [mock.Mock(object_id=i, stage="Queued") for i in range(3)]
self.dlist = DownloadList(mocks)
def test_move_up(self):
self.assertTrue(self.dlist.move_up(1))
self.assertEqual(self.dlist._items_list, [1, 0, 2])
def test_move_up_already_on_top(self):
self.assertFalse(self.dlist.move_up(0))
self.assertEqual(self.dlist._items_list, [0, 1, 2])
def test_move_up_not_exist(self):
self.assertRaises(ValueError, self.dlist.move_up, 666)
class TestMoveDown(unittest.TestCase):
"""Test case for the DownloadList move_down method."""
def setUp(self):
mocks = [mock.Mock(object_id=i, stage="Queued") for i in range(3)]
self.dlist = DownloadList(mocks)
def test_move_down(self):
self.assertTrue(self.dlist.move_down(1))
self.assertEqual(self.dlist._items_list, [0, 2, 1])
def test_move_down_already_on_bottom(self):
self.assertFalse(self.dlist.move_down(2))
self.assertEqual(self.dlist._items_list, [0, 1, 2])
def test_move_down_not_exist(self):
self.assertRaises(ValueError, self.dlist.move_down, 666)
class TestGetItem(unittest.TestCase):
"""Test case for the DownloadList get_item method."""
def test_get_item(self):
mocks = [mock.Mock(object_id=i) for i in range(3)]
dlist = DownloadList(mocks)
self.assertEqual(dlist.get_item(0), mocks[0])
self.assertEqual(dlist.get_item(2), mocks[2])
def test_get_item_not_exist(self):
dlist = DownloadList()
self.assertRaises(KeyError, dlist.get_item, 0)
class TestGetLength(unittest.TestCase):
"""Test case for the DownloadList __len__ method."""
def test_get_length(self):
dlist = DownloadList([mock.Mock(), mock.Mock()])
self.assertEqual(len(dlist), 2)
def test_get_length_empty_list(self):
dlist = DownloadList()
self.assertEqual(len(dlist), 0)
class TestHasItem(unittest.TestCase):
"""Test case for the DownloadList has_item method."""
def setUp(self):
mock_ditem = mock.Mock(object_id=1337)
self.dlist = DownloadList([mock_ditem])
def test_has_item_true(self):
self.assertTrue(self.dlist.has_item(1337))
def test_has_item_false(self):
self.assertFalse(self.dlist.has_item(1000))
class TestGetItems(unittest.TestCase):
"""Test case for the DownloadList get_items method."""
def test_get_items(self):
mocks = [mock.Mock() for _ in range(3)]
dlist = DownloadList(mocks)
self.assertEqual(dlist.get_items(), mocks)
def test_get_items_empty_list(self):
dlist = DownloadList()
self.assertEqual(dlist.get_items(), [])
class TestClear(unittest.TestCase):
"""Test case for the DownloadList clear method."""
def test_clear(self):
dlist = DownloadList([mock.Mock() for _ in range(3)])
self.assertEqual(len(dlist), 3)
dlist.clear()
self.assertEqual(len(dlist), 0)
class TestChangeStage(unittest.TestCase):
"""Test case for the DownloadList change_stage method."""
def setUp(self):
self.mocks = [mock.Mock(object_id=i, stage="Queued") for i in range(3)]
self.dlist = DownloadList(self.mocks)
def test_change_stage(self):
self.dlist.change_stage(0, "Active")
self.assertEqual(self.mocks[0].stage, "Active")
def test_change_stage_id_not_exist(self):
self.assertRaises(KeyError, self.dlist.change_stage, 3, "Active")
class TestIndex(unittest.TestCase):
"""Test case for the DownloadList index method."""
def setUp(self):
self.mocks = [mock.Mock(object_id=i) for i in range(3)]
self.dlist = DownloadList(self.mocks)
def test_index(self):
self.assertEqual(self.dlist.index(2), 2)
def test_index_not_exist(self):
self.assertEqual(self.dlist.index(3), -1)
class TestSynchronizeDecorator(unittest.TestCase):
def test_synchronize(self):
mock_func = mock.Mock()
mock_lock = mock.Mock()
decorated_func = synchronized(mock_lock)(mock_func)
self.assertEqual(decorated_func(1, a=2), mock_func.return_value)
mock_func.assert_called_once_with(1, a=2)
mock_lock.acquire.assert_called_once()
mock_lock.release.assert_called_once()
def main():
unittest.main()
if __name__ == '__main__':
main()
| 27.913043 | 104 | 0.666277 |
ace52ae228368ce6fe5d98146805054d0be5a77b | 2,368 | py | Python | allennlp/data/tokenizers/sentence_splitter.py | chardmeier/allennlp | d59974248a242508248abde31b2bf00a6aa3efed | [
"Apache-2.0"
] | 1 | 2020-06-09T16:36:46.000Z | 2020-06-09T16:36:46.000Z | allennlp/data/tokenizers/sentence_splitter.py | chardmeier/allennlp | d59974248a242508248abde31b2bf00a6aa3efed | [
"Apache-2.0"
] | null | null | null | allennlp/data/tokenizers/sentence_splitter.py | chardmeier/allennlp | d59974248a242508248abde31b2bf00a6aa3efed | [
"Apache-2.0"
] | null | null | null | from typing import List
from overrides import overrides
from allennlp.common import Registrable
from allennlp.common.util import get_spacy_model
class SentenceSplitter(Registrable):
"""
A ``SentenceSplitter`` splits strings into sentences.
"""
default_implementation = 'spacy'
def split_sentences(self, text: str) -> List[str]:
"""
Splits ``texts`` into a list of :class:`Token` objects.
"""
raise NotImplementedError
def batch_split_sentences(self, texts: List[str]) -> List[List[str]]:
"""
This method lets you take advantage of spacy's batch processing.
Default implementation is to just iterate over the texts and call ``split_sentences``.
"""
return [self.split_sentences(text) for text in texts]
@SentenceSplitter.register('spacy')
class SpacySentenceSplitter(SentenceSplitter):
"""
A ``SentenceSplitter`` that uses spaCy's built-in sentence boundary detection.
Spacy's default sentence splitter uses a dependency parse to detect sentence boundaries, so
it is slow, but accurate.
Another option is to use rule-based sentence boundary detection. It's fast and has a small memory footprint,
since it uses punctuation to detect sentence boundaries. This can be activated with the `rule_based` flag.
By default, ``SpacySentenceSplitter`` calls the default spacy boundary detector.
"""
def __init__(self,
language: str = 'en_core_web_sm',
rule_based: bool = False) -> None:
# we need spacy's dependency parser if we're not using rule-based sentence boundary detection.
self.spacy = get_spacy_model(language, parse=not rule_based, ner=False, pos_tags=False)
if rule_based:
# we use `sentencizer`, a built-in spacy module for rule-based sentence boundary detection.
if not self.spacy.has_pipe('sentencizer'):
sbd = self.spacy.create_pipe('sentencizer')
self.spacy.add_pipe(sbd)
@overrides
def split_sentences(self, text: str) -> List[str]:
return [sent.string.strip() for sent in self.spacy(text).sents]
@overrides
def batch_split_sentences(self, texts: List[str]) -> List[List[str]]:
return [[sentence.string.strip() for sentence in doc.sents] for doc in self.spacy.pipe(texts)]
| 40.827586 | 112 | 0.684544 |
ace52bf016d57d0ab51a79e4b7322395fd381107 | 5,135 | py | Python | midca/tests/test_all_examples.py | COLAB2/midca | 18d6b13e3d6b0d980cd3453196e82fad7302e79b | [
"MIT"
] | 12 | 2018-01-23T01:31:33.000Z | 2022-02-03T04:47:10.000Z | midca/tests/test_all_examples.py | COLAB2/midca | 18d6b13e3d6b0d980cd3453196e82fad7302e79b | [
"MIT"
] | 32 | 2017-11-02T20:58:03.000Z | 2021-04-15T18:59:27.000Z | midca/tests/test_all_examples.py | COLAB2/midca | 18d6b13e3d6b0d980cd3453196e82fad7302e79b | [
"MIT"
] | 5 | 2017-12-01T17:28:01.000Z | 2020-03-18T14:43:32.000Z | '''
Created on Oct 18, 2016
@author: Dustin
This is a script that will run any script found in examples/ .
Essentially, this tests whether or not initialization of MIDCA
succeeded.
'''
import glob, os
import subprocess
import time
import sys
EXAMPLES_DIRECTORY = 'examples/'
NUM_PROCESSES = 1 # number of python processes to run in parallel
FILES_TO_IGNORE = ['__init__','predicateworld',
'homography','baxter',
'ObjectDetector', 'cogsci_demo_ma',
"Calibrate", "nbeacons_aaai17_agent3"] # will ignore any file containing one of these
# WARNING: if the run delay is ever too short (i.e. it takes longer than the delay for midca to execute
# the skip command, this whole script will deadlock. Therefore the CUSTOM_RUN_DELAYS should be used for
# any script that needs more time
SKIP_COMMAND = 'skip 100'
DEFAULT_RUN_DELAY = 8
CUSTOM_RUN_DELAYS = {'nbeacons_aaai17_agent3':60}
i = 0
os.chdir(EXAMPLES_DIRECTORY)
script_files = glob.glob("*.py")
# go back to top level MIDCA dir
os.chdir('../')
for script_file in script_files:
# ignore certain files
ignore_file = False
for ign in FILES_TO_IGNORE:
if ign in script_file:
ignore_file = True
found_exception = False
if not ignore_file:
i += 1
script_name = '{:.<60}'.format("examples/"+script_file)
sys.stdout.write(script_name)
#print "|=|=|=|=|=|=| "+str(script_file)+" |=|=|=|=|=|=|"
script = subprocess.Popen(['python', '-u', EXAMPLES_DIRECTORY+script_file],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=-1)
time.sleep(2) # give MIDCA enough time to load all modules
# Read ten lines of stderr, if any exceptions or errors, failed!
NUM_STDERR_LINES_TO_READ = 10
i = 0
while script.poll():
output_line = script.stderr.readline()
if b"Error" in output_line or b"Traceback" in output_line or b"Exception" in output_line:
found_exception = True
break
if i >= NUM_STDERR_LINES_TO_READ:
i+=1
break
#sys.stdout.write("finished checking errors")
#print output_line
#while len(output_line) > 0:
# print output_line
# print "post while loop"
# all_output = script.stdout.read()
# for line in all_output:
# if 'Exception' in line or 'Traceback' in line:
# found_exception = True
# print line
#print "found_exception is "+str(found_exception)
if found_exception:
sys.stdout.write('{:.>60}'.format('[FAILED during initialization]\n'))
script.kill()
else:
sys.stdout.write('{:.<10}'.format('init'))
sys.stdout.write('{:.<5}'.format('run'))
#sys.stdout.write("........init....run")
script.stdin.write(b'skip 100 \n')
DELAY = DEFAULT_RUN_DELAY
for k,v in list(CUSTOM_RUN_DELAYS.items()):
if k in script_name:
DELAY = v
for i in range(DELAY):
time.sleep(1)
#sys.stdout.write(".")
i = 0
while script.poll():
output_line = script.stderr.readline()
if "Error" in output_line or "Traceback" in output_line or "Exception" in output_line:
found_exception = True
break
if i >= NUM_STDERR_LINES_TO_READ:
i+=1
break
# all_output = script.stderr.readlines()
# for line in all_output:
# if 'Exception' in line or 'Traceback' in line:
# found_exception = True
# print line
#stdout_value, stderr_value = script.communicate('skip 100')
#err_output = repr(stderr_value).replace('\\r\\n','\n').replace('\'','').strip()
#if len(err_output) > 0:
# pass
#print ".........ran 100 cycles",
if not found_exception:
stdout_value, stderr_value = script.communicate(b'q') # to quit MIDCA
#script_name = str(i)+". "+script_file+" Exceptions:\n"
#underline = "-"*len(script_name)
#err_output = repr(stderr_value).replace('\\r\\n','\n').replace('\'','').strip()
#if err_output == 'Next MIDCA command:':
# err_output = ''
if found_exception:
sys.stdout.write('{:.>44}\n'.format('[FAILED while running]'))
#sys.stdout.write("....[FAILED while running]\n")
else:
sys.stdout.write('{:.>44}\n'.format('[SUCCEEDED]'))
#sys.stdout.write("....[SUCCEEDED]\n")
| 36.942446 | 104 | 0.536709 |
ace52c24875f674dc2fc9ea56282584323fdd28e | 23,670 | py | Python | test/functional/wallet_basic.py | chipstar/xpchain | 012b84ad16cb1e5b4d843f4e214f76e630e3c540 | [
"MIT"
] | null | null | null | test/functional/wallet_basic.py | chipstar/xpchain | 012b84ad16cb1e5b4d843f4e214f76e630e3c540 | [
"MIT"
] | null | null | null | test/functional/wallet_basic.py | chipstar/xpchain | 012b84ad16cb1e5b4d843f4e214f76e630e3c540 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet."""
from decimal import Decimal
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_array_result,
assert_equal,
assert_fee_amount,
assert_raises_rpc_error,
connect_nodes_bi,
sync_blocks,
sync_mempools,
wait_until,
)
class WalletTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
def setup_network(self):
self.add_nodes(4)
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 1, 2)
connect_nodes_bi(self.nodes, 0, 2)
self.sync_all([self.nodes[0:3]])
def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
"""Return curr_balance after asserting the fee was in range"""
fee = balance_with_fee - curr_balance
assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
return curr_balance
def get_vsize(self, txn):
return self.nodes[0].decoderawtransaction(txn)['vsize']
def run_test(self):
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 1100000000)
assert_equal(walletinfo['balance'], 0)
self.sync_all([self.nodes[0:3]])
self.nodes[1].generate(101)
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 1100000000)
assert_equal(self.nodes[1].getbalance(), 1100000000)
assert_equal(self.nodes[2].getbalance(), 0)
# Check getbalance with different arguments
assert_equal(self.nodes[0].getbalance("*"), 1100000000)
assert_equal(self.nodes[0].getbalance("*", 1), 1100000000)
assert_equal(self.nodes[0].getbalance("*", 1, True), 1100000000)
assert_equal(self.nodes[0].getbalance(minconf=1), 1100000000)
# first argument of getbalance must be excluded or set to "*"
assert_raises_rpc_error(-32, "dummy first argument must be excluded or set to \"*\"", self.nodes[0].getbalance, "")
# Check that only first and second nodes have UTXOs
utxos = self.nodes[0].listunspent()
assert_equal(len(utxos), 1)
assert_equal(len(self.nodes[1].listunspent()), 1)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("test gettxout")
confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
# First, outputs that are unspent both in the chain and in the
# mempool should appear with or without include_mempool
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False)
assert_equal(txout['value'], 1100000000)
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
assert_equal(txout['value'], 1100000000)
# Send 21 BTC from 0 to 2 using sendtoaddress call.
# Locked memory should use at least 32 bytes to sign each transaction
self.log.info("test getmemoryinfo")
memory_before = self.nodes[0].getmemoryinfo()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
memory_after = self.nodes[0].getmemoryinfo()
assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used'])
self.log.info("test gettxout (second part)")
# utxo spent in mempool should be visible if you exclude mempool
# but invisible if you include mempool
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
assert_equal(txout['value'], 1100000000)
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
assert txout is None
# new utxo from mempool should be invisible if you exclude mempool
# but visible if you include mempool
txout = self.nodes[0].gettxout(mempool_txid, 0, False)
assert txout is None
txout1 = self.nodes[0].gettxout(mempool_txid, 0, True)
txout2 = self.nodes[0].gettxout(mempool_txid, 1, True)
# note the mempool tx will have randomly assigned indices
# but 10 will go to node2 and the rest will go to node0
balance = self.nodes[0].getbalance()
assert_equal(set([txout1['value'], txout2['value']]), set([10, balance]))
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 0)
# Have node0 mine a block, thus it will collect its own fee.
self.nodes[0].generate(1)
self.sync_all([self.nodes[0:3]])
# Exercise locking of unspent outputs
unspent_0 = self.nodes[2].listunspent()[0]
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0])
self.nodes[2].lockunspent(False, [unspent_0])
assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
assert_equal([unspent_0], self.nodes[2].listlockunspent())
self.nodes[2].lockunspent(True, [unspent_0])
assert_equal(len(self.nodes[2].listlockunspent()), 0)
assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction",
self.nodes[2].lockunspent, False,
[{"txid": "0000000000000000000000000000000000", "vout": 0}])
assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds",
self.nodes[2].lockunspent, False,
[{"txid": unspent_0["txid"], "vout": 999}])
# An output should be unlocked when spent
unspent_0 = self.nodes[1].listunspent()[0]
self.nodes[1].lockunspent(False, [unspent_0])
tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 })
tx = self.nodes[1].fundrawtransaction(tx)['hex']
tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"]
self.nodes[1].sendrawtransaction(tx)
assert_equal(len(self.nodes[1].listlockunspent()), 0)
# Have node1 generate 100 blocks (so node0 can recover the fee)
self.nodes[1].generate(100)
self.sync_all([self.nodes[0:3]])
# node0 should end up with 100 btc in block rewards plus fees, but
# minus the 21 plus fees sent to node2
assert_equal(self.nodes[0].getbalance(), 2200000000 - 21)
assert_equal(self.nodes[2].getbalance(), 21)
# Node0 should have two unspent outputs.
# Create a couple of transactions to send them to node2, submit them through
# node1, and make sure both node0 and node2 pick them up properly:
node0utxos = self.nodes[0].listunspent(1)
assert_equal(len(node0utxos), 2)
# create both transactions
txns_to_send = []
for utxo in node0utxos:
inputs = []
outputs = {}
inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx))
# Have node 1 (miner) send the transactions
self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True)
# Have node1 mine a block to confirm transactions:
self.nodes[1].generate(1)
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 2199999994)
# Verify that a spent output cannot be locked anymore
spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]}
assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0])
# Send 10 XPC normal
address = self.nodes[0].getnewaddress("test")
fee_per_byte = Decimal('10') / 1000
self.nodes[2].settxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 100000, "", "", False)
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('2199899994'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), Decimal('100000'))
# Send 10 XPC with subtract fee from amount
txid = self.nodes[2].sendtoaddress(address, 100000, "", "", True)
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal -= Decimal('100000')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('200000'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
# Sendmany 10 XPC
txid = self.nodes[2].sendmany('', {address: 100000}, 0, "", [])
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_0_bal += Decimal('100000')
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('100000'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), node_0_bal)
# Sendmany 10 XPC with subtract fee from amount
txid = self.nodes[2].sendmany('', {address: 100000}, 0, "", [address])
self.nodes[2].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal -= Decimal('100000')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('100000'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid)))
# Test ResendWalletTransactions:
# Create a couple of transactions, then start up a fourth
# node (nodes[3]) and ask nodes[0] to rebroadcast.
# EXPECT: nodes[3] should have those transactions in its mempool.
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
sync_mempools(self.nodes[0:2])
self.start_node(3)
connect_nodes_bi(self.nodes, 0, 3)
sync_blocks(self.nodes)
relayed = self.nodes[0].resendwallettransactions()
assert_equal(set(relayed), {txid1, txid2})
sync_mempools(self.nodes)
assert(txid1 in self.nodes[3].getrawmempool())
# Exercise balance rpcs
assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
assert_equal(self.nodes[0].getunconfirmedbalance(), 1)
# check if we can list zero value tx as available coins
# 1. create raw_tx
# 2. hex-changed one output to 0.0
# 3. sign and send
# 4. check if recipient (node0) can list the zero value tx
usp = self.nodes[1].listunspent(query_options={'minimumAmount': '1099999980'})[0]
inputs = [{"txid": usp['txid'], "vout": usp['vout']}]
outputs = {self.nodes[1].getnewaddress(): 1099999980, self.nodes[0].getnewaddress(): 11.11}
raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("fcb10100", "00000000") # replace 11.11 with 0.0 (int32)
signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx)
decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex'])
zero_value_txid = decoded_raw_tx['txid']
self.nodes[1].sendrawtransaction(signed_raw_tx['hex'])
self.sync_all()
self.nodes[1].generate(1) # mine a block
self.sync_all()
unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output
found = False
for uTx in unspent_txs:
if uTx['txid'] == zero_value_txid:
found = True
assert_equal(uTx['amount'], Decimal('0'))
assert(found)
# do some -walletbroadcast tests
self.stop_nodes()
self.start_node(0, ["-walletbroadcast=0"])
self.start_node(1, ["-walletbroadcast=0"])
self.start_node(2, ["-walletbroadcast=0"])
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 1, 2)
connect_nodes_bi(self.nodes, 0, 2)
self.sync_all([self.nodes[0:3]])
txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 20000)
tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
self.nodes[1].generate(1) # mine a block, tx should not be in there
self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted
# now broadcast from another node, mine a block, sync, and check the balance
self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex'])
self.nodes[1].generate(1)
self.sync_all([self.nodes[0:3]])
node_2_bal += 20000
tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
assert_equal(self.nodes[2].getbalance(), node_2_bal)
# create another tx
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 20000)
# restart the nodes with -walletbroadcast=1
self.stop_nodes()
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 1, 2)
connect_nodes_bi(self.nodes, 0, 2)
sync_blocks(self.nodes[0:3])
self.nodes[0].generate(1)
sync_blocks(self.nodes[0:3])
node_2_bal += 20000
# tx should be added to balance because after restarting the nodes tx should be broadcast
assert_equal(self.nodes[2].getbalance(), node_2_bal)
# send a tx with value in a string (PR#6380 +)
txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
tx_obj = self.nodes[0].gettransaction(txid)
assert_equal(tx_obj['amount'], Decimal('-2'))
txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.1")
tx_obj = self.nodes[0].gettransaction(txid)
assert_equal(tx_obj['amount'], Decimal('-0.1'))
# check if JSON parser can handle scientific notation in strings
txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e+4")
tx_obj = self.nodes[0].gettransaction(txid)
assert_equal(tx_obj['amount'], Decimal('-10000.0000'))
# This will raise an exception because the amount type is wrong
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
# This will raise an exception since generate does not accept a string
assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")
# Import address and private key to check correct behavior of spendable unspents
# 1. Send some coins to generate new UTXO
address_to_import = self.nodes[2].getnewaddress()
txid = self.nodes[0].sendtoaddress(address_to_import, 1)
self.nodes[0].generate(1)
self.sync_all([self.nodes[0:3]])
# 2. Import address from node2 to node1
self.nodes[1].importaddress(address_to_import)
# 3. Validate that the imported address is watch-only on node1
assert(self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"])
# 4. Check that the unspents after import are not spendable
assert_array_result(self.nodes[1].listunspent(),
{"address": address_to_import},
{"spendable": False})
# 5. Import private key of the previously imported address on node1
priv_key = self.nodes[2].dumpprivkey(address_to_import)
self.nodes[1].importprivkey(priv_key)
# 6. Check that the unspents are now spendable on node1
assert_array_result(self.nodes[1].listunspent(),
{"address": address_to_import},
{"spendable": True})
# Mine a block from node0 to an address from node1
coinbase_addr = self.nodes[1].getnewaddress()
block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0]
coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0]
self.sync_all([self.nodes[0:3]])
# Check that the txid and balance is found by node1
self.nodes[1].gettransaction(coinbase_txid)
# check if wallet or blockchain maintenance changes the balance
self.sync_all([self.nodes[0:3]])
blocks = self.nodes[0].generate(2)
self.sync_all([self.nodes[0:3]])
balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
block_count = self.nodes[0].getblockcount()
# Check modes:
# - True: unicode escaped as \u....
# - False: unicode directly as UTF-8
for mode in [True, False]:
self.nodes[0].rpc.ensure_ascii = mode
# unicode check: Basic Multilingual Plane, Supplementary Plane respectively
for label in [u'рыба', u'𝅘𝅥𝅯']:
addr = self.nodes[0].getnewaddress()
self.nodes[0].setlabel(addr, label)
assert_equal(self.nodes[0].getaddressinfo(addr)['label'], label)
assert(label in self.nodes[0].listlabels())
self.nodes[0].rpc.ensure_ascii = True # restore to default
# maintenance tests
maintenance = [
'-rescan',
'-reindex',
'-zapwallettxes=1',
'-zapwallettxes=2',
# disabled until issue is fixed: https://github.com/bitcoin/bitcoin/issues/7463
# '-salvagewallet',
]
chainlimit = 6
for m in maintenance:
self.log.info("check " + m)
self.stop_nodes()
# set lower ancestor limit for later
self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)])
self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)])
self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)])
if m == '-reindex':
# reindex will leave rpc warm up "early"; Wait for it to finish
wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
# Exercise listsinceblock with the last two blocks
coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
assert_equal(coinbase_tx_1["lastblock"], blocks[1])
assert_equal(len(coinbase_tx_1["transactions"]), 1)
assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)
# ==Check that wallet prefers to use coins that don't exceed mempool limits =====
# Get all non-zero utxos together
chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()]
singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True)
self.nodes[0].generate(1)
node0_balance = self.nodes[0].getbalance()
# Split into two chains
rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')})
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
# Make a long chain of unconfirmed payments without hitting mempool limit
# Each tx we make leaves only one output of change on a chain 1 longer
# Since the amount to send is always much less than the outputs, we only ever need one output
# So we should be able to generate exactly chainlimit txs for each original output
sending_addr = self.nodes[1].getnewaddress()
txid_list = []
for i in range(chainlimit * 2):
txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('1')))
assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2)
assert_equal(len(txid_list), chainlimit * 2)
# Without walletrejectlongchains, we will still generate a txid
# The tx will be stored in the wallet but not accepted to the mempool
extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('1'))
assert(extra_txid not in self.nodes[0].getrawmempool())
assert(extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()])
self.nodes[0].abandontransaction(extra_txid)
total_txs = len(self.nodes[0].listtransactions("*", 99999))
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])
# wait for loadmempool
timeout = 10
while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2):
time.sleep(0.5)
timeout -= 0.5
assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2)
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('10'))
# Verify nothing new in wallet
assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999)))
# Test getaddressinfo. Note that these addresses are taken from disablewallet.py
assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "CYv1XWNSZhmC1LyQVNJx1gEozTiXitCPfX")
address_info = self.nodes[0].getaddressinfo("xemq4hZL3nFmqgD48kwooRWWxU8JkC6NDn")
assert_equal(address_info['address'], "xemq4hZL3nFmqgD48kwooRWWxU8JkC6NDn")
assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac")
assert not address_info["ismine"]
assert not address_info["iswatchonly"]
assert not address_info["isscript"]
if __name__ == '__main__':
WalletTest().main()
| 48.904959 | 193 | 0.647613 |
ace52ce451849794150f58d2c11e642b9fab6684 | 13,820 | py | Python | savu/plugins/ptychography/base_ptycho.py | malte-storm/Savu | 16291e8a22464c50c511af01fbc648860c1236e6 | [
"Apache-2.0"
] | 1 | 2021-04-18T09:30:54.000Z | 2021-04-18T09:30:54.000Z | savu/plugins/ptychography/base_ptycho.py | malte-storm/Savu | 16291e8a22464c50c511af01fbc648860c1236e6 | [
"Apache-2.0"
] | 1 | 2019-07-30T12:31:51.000Z | 2019-07-30T12:31:51.000Z | savu/plugins/ptychography/base_ptycho.py | malte-storm/Savu | 16291e8a22464c50c511af01fbc648860c1236e6 | [
"Apache-2.0"
] | 1 | 2021-05-20T16:31:29.000Z | 2021-05-20T16:31:29.000Z | # Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: base_ptycho
:platform: Unix
:synopsis: A base class for all ptychographic analysis methods
.. moduleauthor:: Aaron Parsons <scientificsoftware@diamond.ac.uk>
"""
from savu.plugins.plugin import Plugin
from savu.plugins.driver.cpu_plugin import CpuPlugin
import logging
import numpy as np
class BasePtycho(Plugin, CpuPlugin): # also make one for gpu
"""
A base plugin for doing ptychography. Other ptychography plugins should \
inherit from this.
:param in_datasets: A list of the dataset(s) to process. Default: [].
:param out_datasets: A list of the dataset(s) to \
process. Default: ['probe', 'object_transmission', 'positions'].
"""
def __init__(self, name):
super(BasePtycho, self).__init__(name)
def setup(self):
self.exp.log(self.name + " Setting up the ptycho")
in_dataset, out_dataset = self.get_datasets()
in_meta_data = in_dataset[0].meta_data# grab the positions from the metadata
logging.debug('getting the positions...')
self.positions = in_meta_data.get('xy') # get the positions and bind them
# lets set up the axis labels for output datasets
position_labels, probe_labels, object_labels, self.sh = self.setup_axis_labels(in_dataset)
# print "probe labels are:"+str(probe_labels)
# print "object labels are:"+str(object_labels)
# print "position labels are:"+str(position_labels)
# Now create the datasets and work out the patterns
### PROBE ###
probe = out_dataset[0]
# probe_shape = in_dataset[0].get_shape()[-2:] + (self.get_num_probe_modes(),)
self.set_size_probe(in_dataset[0].get_shape()[-2:])
logging.debug("##### PROBE #####")
#print("probe shape is:%s",str(self.get_size_probe()))
probe.create_dataset(axis_labels=probe_labels,
shape=self.get_size_probe()) # create the dataset
self.probe_pattern_setup(probe_labels, probe)
### OBJECT ####
self.set_size_object(in_dataset[0], self.get_positions(),
self.get_pixel_size())
object_trans = out_dataset[1]
object_shape = self.sh + self.get_size_object()
logging.debug("##### OBJECT #####")
#print("object shape is:%s",str(object_shape))
# print object_labels
object_trans.create_dataset(axis_labels=object_labels,
shape=object_shape) # create the dataset
self.object_pattern_setup(object_labels, object_trans)
### POSITIONS ###
logging.debug('##### POSITIONS #####')
positions = out_dataset[2]
#print self.sh, self.get_positions().shape
positions_shape = self.sh + self.get_positions().shape[-2:]
logging.debug('positions shape is:%s',str(positions_shape))
#print "positions shape",positions_shape
positions.create_dataset(axis_labels=position_labels,
shape=positions_shape)
rest_pos = range(len(position_labels))
pos_md = \
{'core_dims':tuple(set(rest_pos) - set([0])), 'slice_dims':(0,)}
positions.add_pattern("CHANNEL", **pos_md)
'''
now we need to tell the setup what we want as input shapes, output shapes, and the number of each of them in one go.
'''
in_pData, out_pData = self.get_plugin_datasets()
in_pData[0].plugin_data_setup(self.get_plugin_pattern(), self.get_max_frames())
out_pData[0].plugin_data_setup("PROJECTION", self.get_num_probe_modes())
out_pData[1].plugin_data_setup("PROJECTION", self.get_num_object_modes())
out_pData[2].plugin_data_setup("CHANNEL", self.get_max_frames())
self.exp.log(self.name + " End")
'''
The below methods influence the set-up and can be over-ridden depending on which software package we are using
'''
def get_plugin_pattern(self):
'''
sets the pattern to work in. In this case we consider a ptycho scan to be a 4D_SCAN.
'''
return "4D_SCAN"
def nInput_datasets(self):
return 1
def nOutput_datasets(self):
return 3
def get_num_probe_modes(self):
return 1
def get_num_object_modes(self):
return 1
def get_positions(self):
return self.positions
def get_pixel_size(self):
return 30e-9
def set_size_object(self, dataset,positions, pobj=33e-9):
'''
returns tuple
'''
# print "positions is "+str(self.get_positions().shape)
x,y = self.get_positions()[0],self.get_positions()[1]
probe_size = self.get_size_probe()
x_fov = np.max(x)-np.min(x)
y_fov = np.max(y)-np.min(y)
xsize = int(x_fov//pobj) + probe_size[0]
ysize = int(y_fov//pobj) + probe_size[1]
self.obj_shape = xsize,ysize,self.get_num_object_modes()
def get_size_object(self):
return self.obj_shape
def set_size_probe(self,val):
self.probe_size = (1,)+val + (self.get_num_probe_modes(),)
def get_size_probe(self):
'''
returns tuple
'''
return self.probe_size
def get_max_frames(self):
return 'single'
def get_output_axis_units(self):
return 'nm'
def probe_pattern_setup(self, probe_labels, probe):
'''
This is where we set up the patterns, we need to add, PROJECTIONS, SINOGRAMS, TIMESERIES and SPECTRA
I've created the TIMESERIES because we could in theory have a time series of spectra
probe_patterns: PROJECTION, TIMESERIES (for each projection), SPECTRUM (for each energy)
object_patterns: PROJECTION, SINOGRAM, SPECTRUM (for each energy)
position_patterns: 1D_METADATA
'''
probe_dims = len(probe_labels) # the number of dimensions from the axis labels
rest_probe = range(probe_dims) # all the dimensions we have
self.set_projection_pattern(probe, rest_probe)
self.set_probe_rotation_patterns(probe, rest_probe)
self.set_probe_energy_patterns(probe, rest_probe)
def object_pattern_setup(self, object_labels, object_trans):
'''
This is where we set up the patterns, we need to add, PROJECTIONS, SINOGRAMS, TIMESERIES and SPECTRA
I've created the TIMESERIES because we could in theory have a time series of spectra
probe_patterns: PROJECTION, TIMESERIES (for each projection), SPECTRUM (for each energy)
object_patterns: PROJECTION, SINOGRAM, SPECTRUM (for each energy)
position_patterns: 1D_METADATA
'''
obj_dims = len(object_labels) # the number of dimensions from the axis labels
# print "object has "+str(obj_dims)+"dimensions"
rest_obj = range(obj_dims) # all the dimensions we have
self.set_projection_pattern(object_trans, rest_obj)
self.set_object_rotation_patterns(object_trans, rest_obj)
self.set_object_energy_patterns(object_trans, rest_obj)
def setup_axis_labels(self, in_dataset):
'''
This is where we set up the axis labels
the 4D scan will contain labels that are: 'xy', 'detectorX', 'detectorY', but the data
itself may be scanned in energy or rotation or something else. We want to remove all the above,
and amend them to be the following (preferably with additional scan axes at the front):
probe: 'x','y','mode_idx'
object: 'x','y','mode_idx'
positions: 'xy'
'''
PATTERN_LABELS = ['xy', 'detectorX', 'detectorY']
in_labels = in_dataset[0].data_info.get('axis_labels') # this is a list of dictionarys
existing_labels = [d.keys()[0] for d in in_labels] # this just gets the axes names
logging.debug('The existing labels are:%s, we will remove:%s' % (existing_labels, PATTERN_LABELS))
logging.debug('removing these labels from the list')
core_labels_raw = [l for l in existing_labels if l not in PATTERN_LABELS] # removes them from the list
core_labels = [l + '.' + in_labels[0][l] for l in core_labels_raw] # add the units in for the ones we are keeping
# now we just have to add the new ones to this.
trans_units = self.get_output_axis_units()
probe_labels = list(core_labels) # take a copy
probe_labels.extend(['mode_idx.number','x.' + trans_units, 'y.' + trans_units, ])
logging.debug('the labels for the probe are:%s' % str(probe_labels))
object_labels = list(core_labels)
object_labels.extend(['mode_idx.number','x.' + trans_units, 'y.' + trans_units])
logging.debug('the labels for the object are:%s' % str(object_labels))
position_labels = list(core_labels)
position_labels.extend(['xy.m','idx'])
logging.debug('the labels for the positions are:%s' % str(position_labels))
# now we also need this part of the shape of the data so...
md = in_dataset[0].meta_data
sh = tuple([len(md.get(l)) for l in core_labels_raw])
return position_labels, probe_labels, object_labels, sh
def set_probe_rotation_patterns(self, probe, rest_probe):
try:
rot_axis = probe.get_data_dimension_by_axis_label('rotation_angle', contains=True) # get the rotation axis
except Exception as e:
logging.warn(str(e) + 'we were looking for "rotation_angle"')
logging.debug('This is not a tomography, so no time series for the probe')
else:
# print('the rotation axis is:%s' % str(rot_axis))
probe_ts = {'core_dims':(rot_axis,),
'slice_dims':tuple(set(rest_probe) - set([rot_axis]))}
probe.add_pattern("TIMESERIES", **probe_ts) # so we can FT the wiggles etc...
# print('This is a tomography so I have added a TIMESERIES pattern to the probe') # the probe oscillates in time for each projection, set this as a time series pattern
def set_probe_energy_patterns(self, probe, rest_probe):
try:
energy_axis = probe.get_data_dimension_by_axis_label('energy', contains=True) # get an energy axis
except Exception as e:
logging.warn(str(e) + 'we were looking for "energy"')
logging.debug('This is not spectro-microscopy, so no spectrum/timeseries for the probe')
else:
probe_spec = {'core_dims':tuple(energy_axis), 'slice_dims':tuple(set(rest_probe) - set([energy_axis]))}
probe.add_pattern("SPECTRUM", **probe_spec)
probe.add_pattern("TIMESERIES", **probe_spec)
logging.debug('This is probably spectro-microscopy so I have added a SPECTRUM pattern to the probe')
logging.debug('I have also added a TIMESERIES pattern on the same axis, but be careful with what this means!') # the probe oscillates in time for each projection, set this as a time series pattern
def set_projection_pattern(self, probe, rest_probe):
probe_proj_core = tuple([rest_probe[idx] for idx in (-3, -2)]) # hard coded since we set them just above
probe_slice = tuple(set(rest_probe) - set(probe_proj_core))
probe_proj = {'core_dims':probe_proj_core, 'slice_dims':probe_slice}
probe.add_pattern("PROJECTION", **probe_proj)
logging.debug('have added a PROJECTION pattern')
def set_object_energy_patterns(self, object_trans, rest_obj):
try:
energy_axis = object_trans.get_data_dimension_by_axis_label('energy', contains=True) # get an energy axis
except Exception as e:
logging.warn(str(e) + 'we were looking for "energy"')
logging.debug('This is not spectro-microscopy, so no spectrum for the object')
else:
obj_spec = {'core_dims':tuple(energy_axis), 'slice_dims':tuple(set(rest_obj) - set([energy_axis]))}
object_trans.add_pattern("SPECTRUM", **obj_spec)
logging.debug('This is probably spectro-microscopy so I have added a SPECTRUM pattern to the object') # the probe oscillates in time for each projection, set this as a time series pattern
def set_object_rotation_patterns(self, object_trans, rest_obj):
try:
rot_axis = object_trans.get_data_dimension_by_axis_label('rotation_angle', contains=True) # get the rotation axis
except Exception as e:
logging.warn(str(e) + 'we were looking for "rotation_angle"')
logging.debug('This is not a tomography, so no sinograms for the object transmission')
else:
x_axis = object_trans.get_data_dimension_by_axis_label('x', contains=True) # get the x axis
obj_sino = {'core_dims':(rot_axis, x_axis), 'slice_dims':tuple(set(rest_obj) - set((rot_axis, x_axis)))}
object_trans.add_pattern("SINOGRAM", **obj_sino) # for the tomography
logging.debug('This is a tomography so I have added a SINOGRAM pattern to the object transmission') # the probe oscillates in time for each projection, set this as a time series pattern
| 48.15331 | 208 | 0.656946 |
ace52d54312ee683758849f38324c7e6f95b4d61 | 449 | py | Python | image_basics/view_rgb_channels.py | adrianB3/cv_practice | 615e3f94f985e882bf9c21ab087d056c869571ee | [
"MIT"
] | null | null | null | image_basics/view_rgb_channels.py | adrianB3/cv_practice | 615e3f94f985e882bf9c21ab087d056c869571ee | [
"MIT"
] | null | null | null | image_basics/view_rgb_channels.py | adrianB3/cv_practice | 615e3f94f985e882bf9c21ab087d056c869571ee | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import matplotlib.image as mpimg
image = mpimg.imread('images/wa_state_highway.jpg')
plt.imshow(image)
#isolating rgb channels
r = image[:, :, 0]
g = image[:, :, 1]
b = image[:, :, 2]
f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(20, 10))
ax1.set_title('R channel')
ax1.imshow(r, cmap='gray')
ax2.set_title('G channel')
ax2.imshow(g, cmap='gray')
ax3.set_title('B channel')
ax3.imshow(b, cmap='gray')
plt.show() | 23.631579 | 57 | 0.681514 |
ace52ea5d7a27770cbb0b1e7d55b29e151d6b375 | 743 | py | Python | hard-gists/4995154/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 21 | 2019-07-08T08:26:45.000Z | 2022-01-24T23:53:25.000Z | hard-gists/4995154/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 5 | 2019-06-15T14:47:47.000Z | 2022-02-26T05:02:56.000Z | hard-gists/4995154/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 17 | 2019-05-16T03:50:34.000Z | 2021-01-14T14:35:12.000Z | from django.contrib.auth.models import User
from tastypie.authentication import ApiKeyAuthentication
class EmailApiKeyAuthentication (ApiKeyAuthentication):
""" The same as base class, but use email to find user """
def is_authenticated(self, request, **kwargs):
email = request.GET.get('username') or request.POST.get('username')
api_key = request.GET.get('api_key') or request.POST.get('api_key')
if not email or not api_key:
return self._unauthorized()
try:
user = User.objects.get(email=email)
except (User.DoesNotExist, User.MultipleObjectsReturned):
return self._unauthorized()
request.user = user
return self.get_key(user, api_key)
| 37.15 | 75 | 0.679677 |
ace52efa539514d24240a1ea9c33cccabb6300d6 | 25,000 | py | Python | glance/common/format_inspector.py | cloudscale-ch/glance | 1344c45772c1a77107f621632642cf1488e431db | [
"Apache-2.0"
] | null | null | null | glance/common/format_inspector.py | cloudscale-ch/glance | 1344c45772c1a77107f621632642cf1488e431db | [
"Apache-2.0"
] | null | null | null | glance/common/format_inspector.py | cloudscale-ch/glance | 1344c45772c1a77107f621632642cf1488e431db | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Red Hat, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This is a python implementation of virtual disk format inspection routines
gathered from various public specification documents, as well as qemu disk
driver code. It attempts to store and parse the minimum amount of data
required, and in a streaming-friendly manner to collect metadata about
complex-format images.
"""
import struct
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class CaptureRegion(object):
"""Represents a region of a file we want to capture.
A region of a file we want to capture requires a byte offset into
the file and a length. This is expected to be used by a data
processing loop, calling capture() with the most recently-read
chunk. This class handles the task of grabbing the desired region
of data across potentially multiple fractional and unaligned reads.
:param offset: Byte offset into the file starting the region
:param length: The length of the region
"""
def __init__(self, offset, length):
self.offset = offset
self.length = length
self.data = b''
@property
def complete(self):
"""Returns True when we have captured the desired data."""
return self.length == len(self.data)
def capture(self, chunk, current_position):
"""Process a chunk of data.
This should be called for each chunk in the read loop, at least
until complete returns True.
:param chunk: A chunk of bytes in the file
:param current_position: The position of the file processed by the
read loop so far. Note that this will be
the position in the file *after* the chunk
being presented.
"""
read_start = current_position - len(chunk)
if (read_start <= self.offset <= current_position or
self.offset <= read_start <= (self.offset + self.length)):
if read_start < self.offset:
lead_gap = self.offset - read_start
else:
lead_gap = 0
self.data += chunk[lead_gap:]
self.data = self.data[:self.length]
class ImageFormatError(Exception):
"""An unrecoverable image format error that aborts the process."""
pass
class TraceDisabled(object):
"""A logger-like thing that swallows tracing when we do not want it."""
def debug(self, *a, **k):
pass
info = debug
warning = debug
error = debug
class FileInspector(object):
"""A stream-based disk image inspector.
This base class works on raw images and is subclassed for more
complex types. It is to be presented with the file to be examined
one chunk at a time, during read processing and will only store
as much data as necessary to determine required attributes of
the file.
"""
def __init__(self, tracing=False):
self._total_count = 0
# NOTE(danms): The logging in here is extremely verbose for a reason,
# but should never really be enabled at that level at runtime. To
# retain all that work and assist in future debug, we have a separate
# debug flag that can be passed from a manual tool to turn it on.
if tracing:
self._log = logging.getLogger(str(self))
else:
self._log = TraceDisabled()
self._capture_regions = {}
def _capture(self, chunk, only=None):
for name, region in self._capture_regions.items():
if only and name not in only:
continue
if not region.complete:
region.capture(chunk, self._total_count)
def eat_chunk(self, chunk):
"""Call this to present chunks of the file to the inspector."""
pre_regions = set(self._capture_regions.keys())
# Increment our position-in-file counter
self._total_count += len(chunk)
# Run through the regions we know of to see if they want this
# data
self._capture(chunk)
# Let the format do some post-read processing of the stream
self.post_process()
# Check to see if the post-read processing added new regions
# which may require the current chunk.
new_regions = set(self._capture_regions.keys()) - pre_regions
if new_regions:
self._capture(chunk, only=new_regions)
def post_process(self):
"""Post-read hook to process what has been read so far.
This will be called after each chunk is read and potentially captured
by the defined regions. If any regions are defined by this call,
those regions will be presented with the current chunk in case it
is within one of the new regions.
"""
pass
def region(self, name):
"""Get a CaptureRegion by name."""
return self._capture_regions[name]
def new_region(self, name, region):
"""Add a new CaptureRegion by name."""
if self.has_region(name):
# This is a bug, we tried to add the same region twice
raise ImageFormatError('Inspector re-added region %s' % name)
self._capture_regions[name] = region
def has_region(self, name):
"""Returns True if named region has been defined."""
return name in self._capture_regions
@property
def format_match(self):
"""Returns True if the file appears to be the expected format."""
return True
@property
def virtual_size(self):
"""Returns the virtual size of the disk image, or zero if unknown."""
return self._total_count
@property
def actual_size(self):
"""Returns the total size of the file, usually smaller than
virtual_size.
"""
return self._total_count
def __str__(self):
"""The string name of this file format."""
return 'raw'
@property
def context_info(self):
"""Return info on amount of data held in memory for auditing.
This is a dict of region:sizeinbytes items that the inspector
uses to examine the file.
"""
return {name: len(region.data) for name, region in
self._capture_regions.items()}
# The qcow2 format consists of a big-endian 72-byte header, of which
# only a small portion has information we care about:
#
# Dec Hex Name
# 0 0x00 Magic 4-bytes 'QFI\xfb'
# 4 0x04 Version (uint32_t, should always be 2 for modern files)
# . . .
# 24 0x18 Size in bytes (unint64_t)
#
# https://people.gnome.org/~markmc/qcow-image-format.html
class QcowInspector(FileInspector):
"""QEMU QCOW2 Format
This should only require about 32 bytes of the beginning of the file
to determine the virtual size.
"""
def __init__(self, *a, **k):
super(QcowInspector, self).__init__(*a, **k)
self.new_region('header', CaptureRegion(0, 512))
def _qcow_header_data(self):
magic, version, bf_offset, bf_sz, cluster_bits, size = (
struct.unpack('>4sIQIIQ', self.region('header').data[:32]))
return magic, size
@property
def virtual_size(self):
if not self.region('header').complete:
return 0
if not self.format_match:
return 0
magic, size = self._qcow_header_data()
return size
@property
def format_match(self):
if not self.region('header').complete:
return False
magic, size = self._qcow_header_data()
return magic == b'QFI\xFB'
def __str__(self):
return 'qcow2'
# The VHD (or VPC as QEMU calls it) format consists of a big-endian
# 512-byte "footer" at the beginning fo the file with various
# information, most of which does not matter to us:
#
# Dec Hex Name
# 0 0x00 Magic string (8-bytes, always 'conectix')
# 40 0x28 Disk size (uint64_t)
#
# https://github.com/qemu/qemu/blob/master/block/vpc.c
class VHDInspector(FileInspector):
"""Connectix/MS VPC VHD Format
This should only require about 512 bytes of the beginning of the file
to determine the virtual size.
"""
def __init__(self, *a, **k):
super(VHDInspector, self).__init__(*a, **k)
self.new_region('header', CaptureRegion(0, 512))
@property
def format_match(self):
return self.region('header').data.startswith(b'conectix')
@property
def virtual_size(self):
if not self.region('header').complete:
return 0
if not self.format_match:
return 0
return struct.unpack('>Q', self.region('header').data[40:48])[0]
def __str__(self):
return 'vhd'
# The VHDX format consists of a complex dynamic little-endian
# structure with multiple regions of metadata and data, linked by
# offsets with in the file (and within regions), identified by MSFT
# GUID strings. The header is a 320KiB structure, only a few pieces of
# which we actually need to capture and interpret:
#
# Dec Hex Name
# 0 0x00000 Identity (Technically 9-bytes, padded to 64KiB, the first
# 8 bytes of which are 'vhdxfile')
# 196608 0x30000 The Region table (64KiB of a 32-byte header, followed
# by up to 2047 36-byte region table entry structures)
#
# The region table header includes two items we need to read and parse,
# which are:
#
# 196608 0x30000 4-byte signature ('regi')
# 196616 0x30008 Entry count (uint32-t)
#
# The region table entries follow the region table header immediately
# and are identified by a 16-byte GUID, and provide an offset of the
# start of that region. We care about the "metadata region", identified
# by the METAREGION class variable. The region table entry is (offsets
# from the beginning of the entry, since it could be in multiple places):
#
# 0 0x00000 16-byte MSFT GUID
# 16 0x00010 Offset of the actual metadata region (uint64_t)
#
# When we find the METAREGION table entry, we need to grab that offset
# and start examining the region structure at that point. That
# consists of a metadata table of structures, which point to places in
# the data in an unstructured space that follows. The header is
# (offsets relative to the region start):
#
# 0 0x00000 8-byte signature ('metadata')
# . . .
# 16 0x00010 2-byte entry count (up to 2047 entries max)
#
# This header is followed by the specified number of metadata entry
# structures, identified by GUID:
#
# 0 0x00000 16-byte MSFT GUID
# 16 0x00010 4-byte offset (uint32_t, relative to the beginning of
# the metadata region)
#
# We need to find the "Virtual Disk Size" metadata item, identified by
# the GUID in the VIRTUAL_DISK_SIZE class variable, grab the offset,
# add it to the offset of the metadata region, and examine that 8-byte
# chunk of data that follows.
#
# The "Virtual Disk Size" is a naked uint64_t which contains the size
# of the virtual disk, and is our ultimate target here.
#
# https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-vhdx/83e061f8-f6e2-4de1-91bd-5d518a43d477
class VHDXInspector(FileInspector):
"""MS VHDX Format
This requires some complex parsing of the stream. The first 256KiB
of the image is stored to get the header and region information,
and then we capture the first metadata region to read those
records, find the location of the virtual size data and parse
it. This needs to store the metadata table entries up until the
VDS record, which may consist of up to 2047 32-byte entries at
max. Finally, it must store a chunk of data at the offset of the
actual VDS uint64.
"""
METAREGION = '8B7CA206-4790-4B9A-B8FE-575F050F886E'
VIRTUAL_DISK_SIZE = '2FA54224-CD1B-4876-B211-5DBED83BF4B8'
def __init__(self, *a, **k):
super(VHDXInspector, self).__init__(*a, **k)
self.new_region('ident', CaptureRegion(0, 32))
self.new_region('header', CaptureRegion(192 * 1024, 64 * 1024))
def post_process(self):
# After reading a chunk, we may have the following conditions:
#
# 1. We may have just completed the header region, and if so,
# we need to immediately read and calculate the location of
# the metadata region, as it may be starting in the same
# read we just did.
# 2. We may have just completed the metadata region, and if so,
# we need to immediately calculate the location of the
# "virtual disk size" record, as it may be starting in the
# same read we just did.
if self.region('header').complete and not self.has_region('metadata'):
region = self._find_meta_region()
if region:
self.new_region('metadata', region)
elif self.has_region('metadata') and not self.has_region('vds'):
region = self._find_meta_entry(self.VIRTUAL_DISK_SIZE)
if region:
self.new_region('vds', region)
@property
def format_match(self):
return self.region('ident').data.startswith(b'vhdxfile')
@staticmethod
def _guid(buf):
"""Format a MSFT GUID from the 16-byte input buffer."""
guid_format = '<IHHBBBBBBBB'
return '%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X' % (
struct.unpack(guid_format, buf))
def _find_meta_region(self):
# The region table entries start after a 16-byte table header
region_entry_first = 16
# Parse the region table header to find the number of regions
regi, cksum, count, reserved = struct.unpack(
'<IIII', self.region('header').data[:16])
if regi != 0x69676572:
raise ImageFormatError('Region signature not found at %x' % (
self.region('header').offset))
if count >= 2048:
raise ImageFormatError('Region count is %i (limit 2047)' % count)
# Process the regions until we find the metadata one; grab the
# offset and return
self._log.debug('Region entry first is %x', region_entry_first)
self._log.debug('Region entries %i', count)
meta_offset = 0
for i in range(0, count):
entry_start = region_entry_first + (i * 32)
entry_end = entry_start + 32
entry = self.region('header').data[entry_start:entry_end]
self._log.debug('Entry offset is %x', entry_start)
# GUID is the first 16 bytes
guid = self._guid(entry[:16])
if guid == self.METAREGION:
# This entry is the metadata region entry
meta_offset, meta_len, meta_req = struct.unpack(
'<QII', entry[16:])
self._log.debug('Meta entry %i specifies offset: %x',
i, meta_offset)
# NOTE(danms): The meta_len in the region descriptor is the
# entire size of the metadata table and data. This can be
# very large, so we should only capture the size required
# for the maximum length of the table, which is one 32-byte
# table header, plus up to 2047 32-byte entries.
meta_len = 2048 * 32
return CaptureRegion(meta_offset, meta_len)
self._log.warning('Did not find metadata region')
return None
def _find_meta_entry(self, desired_guid):
meta_buffer = self.region('metadata').data
if len(meta_buffer) < 32:
# Not enough data yet for full header
return None
# Make sure we found the metadata region by checking the signature
sig, reserved, count = struct.unpack('<8sHH', meta_buffer[:12])
if sig != b'metadata':
raise ImageFormatError(
'Invalid signature for metadata region: %r' % sig)
entries_size = 32 + (count * 32)
if len(meta_buffer) < entries_size:
# Not enough data yet for all metadata entries. This is not
# strictly necessary as we could process whatever we have until
# we find the V-D-S one, but there are only 2047 32-byte
# entries max (~64k).
return None
if count >= 2048:
raise ImageFormatError(
'Metadata item count is %i (limit 2047)' % count)
for i in range(0, count):
entry_offset = 32 + (i * 32)
guid = self._guid(meta_buffer[entry_offset:entry_offset + 16])
if guid == desired_guid:
# Found the item we are looking for by id.
# Stop our region from capturing
item_offset, item_length, _reserved = struct.unpack(
'<III',
meta_buffer[entry_offset + 16:entry_offset + 28])
self.region('metadata').length = len(meta_buffer)
self._log.debug('Found entry at offset %x', item_offset)
# Metadata item offset is from the beginning of the metadata
# region, not the file.
return CaptureRegion(
self.region('metadata').offset + item_offset,
item_length)
self._log.warning('Did not find guid %s', desired_guid)
return None
@property
def virtual_size(self):
# Until we have found the offset and have enough metadata buffered
# to read it, return "unknown"
if not self.has_region('vds') or not self.region('vds').complete:
return 0
size, = struct.unpack('<Q', self.region('vds').data)
return size
def __str__(self):
return 'vhdx'
# The VMDK format comes in a large number of variations, but the
# single-file 'monolithicSparse' version 4 one is mostly what we care
# about. It contains a 512-byte little-endian header, followed by a
# variable-length "descriptor" region of text. The header looks like:
#
# Dec Hex Name
# 0 0x00 4-byte magic string 'KDMV'
# 4 0x04 Version (uint32_t)
# 8 0x08 Flags (uint32_t, unused by us)
# 16 0x10 Number of 512 byte sectors in the disk (uint64_t)
# 24 0x18 Granularity (uint64_t, unused by us)
# 32 0x20 Descriptor offset in 512-byte sectors (uint64_t)
# 40 0x28 Descriptor size in 512-byte sectors (uint64_t)
#
# After we have the header, we need to find the descriptor region,
# which starts at the sector identified in the "descriptor offset"
# field, and is "descriptor size" 512-byte sectors long. Once we have
# that region, we need to parse it as text, looking for the
# createType=XXX line that specifies the mechanism by which the data
# extents are stored in this file. We only support the
# "monolithicSparse" format, so we just need to confirm that this file
# contains that specifier.
#
# https://www.vmware.com/app/vmdk/?src=vmdk
class VMDKInspector(FileInspector):
"""vmware VMDK format (monolithicSparse variant only)
This needs to store the 512 byte header and the descriptor region
which should be just after that. The descriptor region is some
variable number of 512 byte sectors, but is just text defining the
layout of the disk.
"""
def __init__(self, *a, **k):
super(VMDKInspector, self).__init__(*a, **k)
self.new_region('header', CaptureRegion(0, 512))
def post_process(self):
# If we have just completed the header region, we need to calculate
# the location and length of the descriptor, which should immediately
# follow and may have been partially-read in this read.
if not self.region('header').complete:
return
sig, ver, _flags, _sectors, _grain, desc_sec, desc_num = struct.unpack(
'<4sIIQQQQ', self.region('header').data[:44])
if sig != b'KDMV':
raise ImageFormatError('Signature KDMV not found: %r' % sig)
return
if ver not in (1, 2, 3):
raise ImageFormatError('Unsupported format version %i' % ver)
return
if not self.has_region('descriptor'):
self.new_region('descriptor', CaptureRegion(
desc_sec * 512, desc_num * 512))
@property
def format_match(self):
return self.region('header').data.startswith(b'KDMV')
@property
def virtual_size(self):
if not self.has_region('descriptor'):
# Not enough data yet
return 0
descriptor_rgn = self.region('descriptor')
if not descriptor_rgn.complete:
# Not enough data yet
return 0
descriptor = descriptor_rgn.data
type_idx = descriptor.index(b'createType="') + len(b'createType="')
type_end = descriptor.find(b'"', type_idx)
# Make sure we don't grab and log a huge chunk of data in a
# maliciously-formatted descriptor region
if type_end - type_idx < 64:
vmdktype = descriptor[type_idx:type_end]
else:
vmdktype = b'formatnotfound'
if vmdktype != b'monolithicSparse':
raise ImageFormatError('Unsupported VMDK format %s' % vmdktype)
return 0
# If we have the descriptor, we definitely have the header
_sig, _ver, _flags, sectors, _grain, _desc_sec, _desc_num = (
struct.unpack('<IIIQQQQ', self.region('header').data[:44]))
return sectors * 512
def __str__(self):
return 'vmdk'
# The VirtualBox VDI format consists of a 512-byte little-endian
# header, some of which we care about:
#
# Dec Hex Name
# 64 0x40 4-byte Magic (0xbeda107f)
# . . .
# 368 0x170 Size in bytes (uint64_t)
#
# https://github.com/qemu/qemu/blob/master/block/vdi.c
class VDIInspector(FileInspector):
"""VirtualBox VDI format
This only needs to store the first 512 bytes of the image.
"""
def __init__(self, *a, **k):
super(VDIInspector, self).__init__(*a, **k)
self.new_region('header', CaptureRegion(0, 512))
@property
def format_match(self):
if not self.region('header').complete:
return False
signature, = struct.unpack('<I', self.region('header').data[0x40:0x44])
return signature == 0xbeda107f
@property
def virtual_size(self):
if not self.region('header').complete:
return 0
if not self.format_match:
return 0
size, = struct.unpack('<Q', self.region('header').data[0x170:0x178])
return size
def __str__(self):
return 'vdi'
class InfoWrapper(object):
"""A file-like object that wraps another and updates a format inspector.
This passes chunks to the format inspector while reading. If the inspector
fails, it logs the error and stops calling it, but continues proxying data
from the source to its user.
"""
def __init__(self, source, fmt):
self._source = source
self._format = fmt
self._error = False
def __iter__(self):
return self
def _process_chunk(self, chunk):
if not self._error:
try:
self._format.eat_chunk(chunk)
except Exception as e:
# Absolutely do not allow the format inspector to break
# our streaming of the image. If we failed, just stop
# trying, log and keep going.
LOG.error('Format inspector failed, aborting: %s', e)
self._error = True
def __next__(self):
try:
chunk = next(self._source)
except StopIteration:
raise
self._process_chunk(chunk)
return chunk
def read(self, size):
chunk = self._source.read(size)
self._process_chunk(chunk)
return chunk
def close(self):
if hasattr(self._source, 'close'):
self._source.close()
def get_inspector(format_name):
"""Returns a FormatInspector class based on the given name.
:param format_name: The name of the disk_format (raw, qcow2, etc).
:returns: A FormatInspector or None if unsupported.
"""
formats = {
'raw': FileInspector,
'qcow2': QcowInspector,
'vhd': VHDInspector,
'vhdx': VHDXInspector,
'vmdk': VMDKInspector,
'vdi': VDIInspector,
}
return formats.get(format_name)
| 36.603221 | 107 | 0.63716 |
ace52f3325234778eff8b50d3808426a51d3cb30 | 6,578 | py | Python | bazar/south_migrations/0002_auto__add_field_entity_fax.py | emencia/emencia-django-bazar | a0cf56c00988c84c2288c21fa2a08364fc5033aa | [
"MIT"
] | null | null | null | bazar/south_migrations/0002_auto__add_field_entity_fax.py | emencia/emencia-django-bazar | a0cf56c00988c84c2288c21fa2a08364fc5033aa | [
"MIT"
] | 11 | 2015-05-06T14:50:14.000Z | 2017-12-16T23:46:17.000Z | bazar/south_migrations/0002_auto__add_field_entity_fax.py | emencia/emencia-django-bazar | a0cf56c00988c84c2288c21fa2a08364fc5033aa | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Entity.fax'
db.add_column(u'bazar_entity', 'fax',
self.gf('django.db.models.fields.CharField')(default='', max_length=15, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Entity.fax'
db.delete_column(u'bazar_entity', 'fax')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'bazar.entity': {
'Meta': {'object_name': 'Entity'},
'adress': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'default': "'customer'", 'max_length': '40'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'town': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'})
},
u'bazar.note': {
'Meta': {'object_name': 'Note'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'entity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bazar.Entity']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['bazar'] | 67.814433 | 187 | 0.557616 |
ace52f49cd18a866be8692a829a1134ad430eba5 | 818 | py | Python | src/utsc/core/_vendor/bluecat_libraries/address_manager/api/rest/provisional/rest_dict.py | utsc-networking/utsc-tools | d5bc10cf825f1be46999d5a42da62cc0df456f0c | [
"MIT"
] | null | null | null | src/utsc/core/_vendor/bluecat_libraries/address_manager/api/rest/provisional/rest_dict.py | utsc-networking/utsc-tools | d5bc10cf825f1be46999d5a42da62cc0df456f0c | [
"MIT"
] | null | null | null | src/utsc/core/_vendor/bluecat_libraries/address_manager/api/rest/provisional/rest_dict.py | utsc-networking/utsc-tools | d5bc10cf825f1be46999d5a42da62cc0df456f0c | [
"MIT"
] | null | null | null | # Copyright 2020 BlueCat Networks (USA) Inc. and its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
"""
Wrappers for standard dictionary REST results to make them mimic `suds`
dictionary results.
"""
from copy import deepcopy
class RESTDict(dict):
"""
Wrapper for standard dictionary REST results to make them mimic `suds`
dictionary results.
"""
def __getattr__(self, item):
return self[item]
def __getstate__(self):
pass
def __setattr__(self, key, value):
self[key] = value
def __deepcopy__(self, memo):
cls = self.__class__
res = deepcopy(super(RESTDict, self)) # pylint: disable=super-with-arguments; legacy code
return cls(res)
| 27.266667 | 98 | 0.680929 |
ace52fc2b5476927313b17938bfa540553fac7d5 | 21,780 | py | Python | conformity/fields/structures.py | sanketsaurav/conformity | 22a88b4db493378b21ea736abe50f2b93cf5ff99 | [
"Apache-2.0"
] | null | null | null | conformity/fields/structures.py | sanketsaurav/conformity | 22a88b4db493378b21ea736abe50f2b93cf5ff99 | [
"Apache-2.0"
] | null | null | null | conformity/fields/structures.py | sanketsaurav/conformity | 22a88b4db493378b21ea736abe50f2b93cf5ff99 | [
"Apache-2.0"
] | null | null | null | from __future__ import (
absolute_import,
unicode_literals,
)
import abc
from collections import OrderedDict
import sys
from typing import (
AbstractSet,
Any as AnyType,
Callable,
Container,
Dict,
FrozenSet,
Generic,
Hashable as HashableType,
List as ListType,
Mapping,
Optional,
Sequence as SequenceType,
Sized,
Tuple as TupleType,
Type,
TypeVar,
Union,
cast,
)
import attr
import six
from conformity.error import (
ERROR_CODE_MISSING,
ERROR_CODE_UNKNOWN,
Error,
update_error_pointer,
)
from conformity.fields.basic import (
Anything,
Base,
Hashable,
Introspection,
)
from conformity.utils import (
attr_is_instance,
attr_is_int,
attr_is_iterable,
attr_is_optional,
attr_is_string,
strip_none,
)
VT = TypeVar('VT', bound=Container)
if sys.version_info < (3, 7):
# We can't just decorate this with @six.add_metaclass. In Python < 3.7, that results in this error:
# TypeError: Cannot inherit from plain Generic
# But we can't leave that off, because in Python 3.7+, the abstract method is not enforced without this (it is
# enforced in < 3.7 since GenericMeta extends ABCMeta).
# So we do it this way:
_ACVT = TypeVar('_ACVT')
def _acv_decorator(_metaclass): # type: (Type) -> Callable[[Type[_ACVT]], Type[_ACVT]]
def wrapper(cls): # type: (Type[_ACVT]) -> Type[_ACVT]
return cls
return wrapper
else:
_acv_decorator = six.add_metaclass
@_acv_decorator(abc.ABCMeta)
class AdditionalCollectionValidator(Generic[VT]):
"""
Conformity fields validating collections can have an additional custom validator that can perform extra checks
across the entire collection, such as ensuring that values that need to refer to other values in the same
collection properly match. This is especially helpful to be able to avoid duplicating the existing collection
validation in Conformity's structure fields.
"""
@abc.abstractmethod
def errors(self, value): # type: (VT) -> ListType[Error]
"""
Called after the collection has otherwise passed validation, and not called if the collection has not passed
its normal validation.
:param value: The value to be validated.
:return: A list of errors encountered with this value.
"""
@attr.s
class _BaseSequenceOrSet(Base):
"""
Conformity field that ensures that the value is a list of items that all pass validation with the Conformity field
passed to the `contents` argument and optionally establishes boundaries for that list with the `max_length` and
`min_length` arguments.
"""
contents = attr.ib()
max_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int]
min_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int]
description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type]
additional_validator = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)),
) # type: Optional[AdditionalCollectionValidator[AnyType]]
valid_types = None # type: Union[Type[Sized], TupleType[Type[Sized], ...]]
type_noun = None # deprecated, will be removed in Conformity 2.0
introspect_type = None # type: six.text_type
type_error = None # type: six.text_type
def __attrs_post_init__(self): # type: () -> None
if self.min_length is not None and self.max_length is not None and self.min_length > self.max_length:
raise ValueError('min_length cannot be greater than max_length in UnicodeString')
def errors(self, value): # type: (AnyType) -> ListType[Error]
if not isinstance(value, self.valid_types):
return [Error(self.type_error)]
result = []
if self.max_length is not None and len(value) > self.max_length:
result.append(
Error('List is longer than {}'.format(self.max_length)),
)
elif self.min_length is not None and len(value) < self.min_length:
result.append(
Error('List is shorter than {}'.format(self.min_length)),
)
for lazy_pointer, element in self._enumerate(value):
result.extend(
update_error_pointer(error, lazy_pointer.get())
for error in (self.contents.errors(element) or [])
)
if not result and self.additional_validator:
return self.additional_validator.errors(value)
return result
@classmethod
def _enumerate(cls, values):
# We use a lazy pointer here so that we don't evaluate the pointer for every item that doesn't generate an
# error. We only evaluate the pointer for each item that does generate an error. This is critical in sets,
# where the pointer is the value converted to a string instead of an index.
return ((cls.LazyPointer(i, value), value) for i, value in enumerate(values))
def introspect(self): # type: () -> Introspection
return strip_none({
'type': self.introspect_type,
'contents': self.contents.introspect(),
'max_length': self.max_length,
'min_length': self.min_length,
'description': self.description,
'additional_validation': (
self.additional_validator.__class__.__name__ if self.additional_validator else None
),
})
class LazyPointer(object):
def __init__(self, index, _):
self.get = lambda: index
@attr.s
class List(_BaseSequenceOrSet):
additional_validator = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)),
) # type: Optional[AdditionalCollectionValidator[list]]
valid_types = list
introspect_type = 'list'
type_error = 'Not a list'
@attr.s
class Sequence(_BaseSequenceOrSet):
additional_validator = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)),
) # type: Optional[AdditionalCollectionValidator[SequenceType]]
valid_types = SequenceType
introspect_type = 'sequence'
type_error = 'Not a sequence'
@attr.s
class Set(_BaseSequenceOrSet):
"""
Conformity field that ensures that the value is an abstract set of items that all pass validation with the
Conformity field passed to the `contents` argument and optionally establishes boundaries for that list with the
`max_length` and `min_length` arguments.
"""
additional_validator = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)),
) # type: Optional[AdditionalCollectionValidator[AbstractSet]]
valid_types = AbstractSet
introspect_type = 'set'
type_error = 'Not a set or frozenset'
class LazyPointer(object):
def __init__(self, _, value):
self.get = lambda: '[{}]'.format(str(value))
@attr.s
class Dictionary(Base):
"""
Conformity field that ensures that the value is a dictionary with a specific set of keys and value that validate
with the Conformity fields associated with those keys (`contents`). Keys are required unless they are listed in
the `optional_keys` argument. No extra keys are allowed unless the `allow_extra_keys` argument is set to `True`.
If the `contents` argument is an instance of `OrderedDict`, the field introspection will include a `display_order`
list of keys matching the order they exist in the `OrderedDict`, and errors will be reported in the order the keys
exist in the `OrderedDict`. Order will be maintained for any calls to `extend` as long as those calls also use
`OrderedDict`. Ordering behavior is undefined otherwise. This field does NOT enforce that the value it validates
presents keys in the same order. `OrderedDict` is used strictly for documentation and error-object-ordering
purposes only.
"""
introspect_type = 'dictionary'
# Makes MyPy allow optional_keys to have this type
_optional_keys_default = frozenset() # type: Union[TupleType[HashableType, ...], FrozenSet[HashableType]]
contents = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(dict)),
) # type: Mapping[HashableType, Base]
optional_keys = attr.ib(
default=_optional_keys_default,
validator=attr_is_iterable(attr_is_instance(object)),
) # type: Union[TupleType[HashableType, ...], FrozenSet[HashableType]]
allow_extra_keys = attr.ib(default=None) # type: bool
description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type]
additional_validator = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)),
) # type: Optional[AdditionalCollectionValidator[Mapping[HashableType, AnyType]]]
def __attrs_post_init__(self): # type: () -> None
if self.contents is None and getattr(self.__class__, 'contents', None) is not None:
# If no contents were provided but a subclass has hard-coded contents, use those
self.contents = self.__class__.contents
if self.contents is None:
# If there are still no contents, raise an error
raise ValueError("'contents' is a required argument")
if not isinstance(self.contents, dict):
raise TypeError("'contents' must be a dict")
if (
self.optional_keys is self._optional_keys_default and
getattr(self.__class__, 'optional_keys', None) is not None
):
# If the optional_keys argument was defaulted (not specified) but a subclass has it hard-coded, use that
self.optional_keys = self.__class__.optional_keys
if not isinstance(self.optional_keys, frozenset):
self.optional_keys = frozenset(self.optional_keys)
if self.allow_extra_keys is None and getattr(self.__class__, 'allow_extra_keys', None) is not None:
# If the allow_extra_keys argument was not specified but a subclass has it hard-coded, use that value
self.allow_extra_keys = self.__class__.allow_extra_keys
if self.allow_extra_keys is None:
# If no value is found, default to False
self.allow_extra_keys = False
if not isinstance(self.allow_extra_keys, bool):
raise TypeError("'allow_extra_keys' must be a boolean")
if self.description is None and getattr(self.__class__, 'description', None):
# If the description was not specified but a subclass has it hard-coded, use that value
self.description = self.__class__.description
if self.description is not None and not isinstance(self.description, six.text_type):
raise TypeError("'description' must be a unicode string")
def errors(self, value): # type: (AnyType) -> ListType[Error]
if not isinstance(value, dict):
return [Error('Not a dict')]
result = []
for key, field in self.contents.items():
# Check key is present
if key not in value:
if key not in self.optional_keys:
result.append(
Error('Missing key: {}'.format(key), code=ERROR_CODE_MISSING, pointer=six.text_type(key)),
)
else:
# Check key type
result.extend(
update_error_pointer(error, key)
for error in (field.errors(value[key]) or [])
)
# Check for extra keys
extra_keys = set(value.keys()) - set(self.contents.keys())
if extra_keys and not self.allow_extra_keys:
result.append(
Error(
'Extra keys present: {}'.format(', '.join(six.text_type(key) for key in sorted(extra_keys))),
code=ERROR_CODE_UNKNOWN,
),
)
if not result and self.additional_validator:
return self.additional_validator.errors(value)
return result
def extend(
self,
contents=None, # type: Optional[Mapping[HashableType, Base]]
optional_keys=None, # type: Optional[Union[TupleType[HashableType, ...], FrozenSet[HashableType]]]
allow_extra_keys=None, # type: Optional[bool]
description=None, # type: Optional[six.text_type]
replace_optional_keys=False, # type: bool
additional_validator=None, # type: Optional[AdditionalCollectionValidator[Mapping[HashableType, AnyType]]]
):
# type: (...) -> Dictionary
"""
This method allows you to create a new `Dictionary` that extends the current `Dictionary` with additional
contents and/or optional keys, and/or replaces the `allow_extra_keys` and/or `description` attributes.
:param contents: More contents, if any, to extend the current contents
:param optional_keys: More optional keys, if any, to extend the current optional keys
:param allow_extra_keys: If non-`None`, this overrides the current `allow_extra_keys` attribute
:param description: If non-`None`, this overrides the current `description` attribute
:param replace_optional_keys: If `True`, then the `optional_keys` argument will completely replace, instead of
extend, the current optional keys
:param additional_validator: If non-`None`, this overrides the current `additional_validator` attribute
:return: A new `Dictionary` extended from the current `Dictionary` based on the supplied arguments
"""
optional_keys = frozenset(optional_keys or ())
return Dictionary(
contents=cast(Type[Union[Dict, OrderedDict]], type(self.contents))(
(k, v) for d in (self.contents, contents) for k, v in six.iteritems(d)
) if contents else self.contents,
optional_keys=optional_keys if replace_optional_keys else frozenset(self.optional_keys) | optional_keys,
allow_extra_keys=self.allow_extra_keys if allow_extra_keys is None else allow_extra_keys,
description=self.description if description is None else description,
additional_validator=self.additional_validator if additional_validator is None else additional_validator,
)
def introspect(self): # type: () -> Introspection
display_order = None # type: Optional[ListType[AnyType]]
if isinstance(self.contents, OrderedDict):
display_order = list(self.contents.keys())
return strip_none({
'type': self.introspect_type,
'contents': {
key: value.introspect()
for key, value in self.contents.items()
},
'optional_keys': sorted(self.optional_keys),
'allow_extra_keys': self.allow_extra_keys,
'description': self.description,
'display_order': display_order,
'additional_validation': (
self.additional_validator.__class__.__name__ if self.additional_validator else None
),
})
@attr.s
class SchemalessDictionary(Base):
"""
Conformity field that ensures that the value is a dictionary of any keys and values, but optionally enforcing that
the keys pass the Conformity validation specified with the `key_type` argument and/or that the values pass the
Conformity validation specified with the `value_type` argument. Size of the dictionary can also be constrained with
the optional `max_length` and `min_length` arguments.
"""
introspect_type = 'schemaless_dictionary'
# Makes MyPy allow key_type and value_type have type Base
_default_key_type = attr.Factory(Hashable) # type: Base
_default_value_type = attr.Factory(Anything) # type: Base
key_type = attr.ib(default=_default_key_type, validator=attr_is_instance(Base)) # type: Base
value_type = attr.ib(default=_default_value_type, validator=attr_is_instance(Base)) # type: Base
max_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int]
min_length = attr.ib(default=None, validator=attr_is_optional(attr_is_int())) # type: Optional[int]
description = attr.ib(default=None, validator=attr_is_optional(attr_is_string())) # type: Optional[six.text_type]
additional_validator = attr.ib(
default=None,
validator=attr_is_optional(attr_is_instance(AdditionalCollectionValidator)),
) # type: Optional[AdditionalCollectionValidator[Mapping[HashableType, AnyType]]]
def __attrs_post_init__(self): # type: () -> None
if self.min_length is not None and self.max_length is not None and self.min_length > self.max_length:
raise ValueError('min_length cannot be greater than max_length in UnicodeString')
def errors(self, value): # type: (AnyType) -> ListType[Error]
if not isinstance(value, dict):
return [Error('Not a dict')]
result = []
if self.max_length is not None and len(value) > self.max_length:
result.append(Error('Dict contains more than {} value(s)'.format(self.max_length)))
elif self.min_length is not None and len(value) < self.min_length:
result.append(Error('Dict contains fewer than {} value(s)'.format(self.min_length)))
for key, field in value.items():
result.extend(
update_error_pointer(error, key)
for error in (self.key_type.errors(key) or [])
)
result.extend(
update_error_pointer(error, key)
for error in (self.value_type.errors(field) or [])
)
if not result and self.additional_validator:
return self.additional_validator.errors(value)
return result
def introspect(self): # type: () -> Introspection
result = {
'type': self.introspect_type,
'max_length': self.max_length,
'min_length': self.min_length,
'description': self.description,
'additional_validation': (
self.additional_validator.__class__.__name__ if self.additional_validator else None
),
} # type: Introspection
# We avoid using isinstance() here as that would also match subclass instances
if not self.key_type.__class__ == Hashable:
result['key_type'] = self.key_type.introspect()
if not self.value_type.__class__ == Anything:
result['value_type'] = self.value_type.introspect()
return strip_none(result)
class Tuple(Base):
"""
Conformity field that ensures that the value is a tuple with the same number of arguments as the number of
positional arguments passed to this field, and that each argument passes validation with the corresponding
Conformity field provided to the positional arguments.
"""
introspect_type = 'tuple'
def __init__(self, *contents, **kwargs): # type: (*Base, **AnyType) -> None
# We can't use attrs here because we need to capture all positional arguments and support keyword arguments
self.contents = contents
for i, c in enumerate(self.contents):
if not isinstance(c, Base):
raise TypeError('Argument {} must be a Conformity field instance, is actually: {!r}'.format(i, c))
# We can't put a keyword argument after *args in Python 2, so we need this
self.description = kwargs.pop(str('description'), None) # type: Optional[six.text_type]
if self.description and not isinstance(self.description, six.text_type):
raise TypeError("'description' must be a unicode string")
self.additional_validator = kwargs.pop(
'additional_validator',
None,
) # type: Optional[AdditionalCollectionValidator[TupleType[AnyType, ...]]]
if self.additional_validator and not isinstance(self.additional_validator, AdditionalCollectionValidator):
raise TypeError("'additional_validator' must be an AdditionalCollectionValidator")
if kwargs:
raise TypeError('Unknown keyword arguments: {}'.format(', '.join(kwargs.keys())))
def errors(self, value): # type: (AnyType) -> ListType[Error]
if not isinstance(value, tuple):
return [Error('Not a tuple')]
result = []
if len(value) != len(self.contents):
result.append(
Error('Number of elements {} does not match expected {}'.format(len(value), len(self.contents)))
)
for i, (c_elem, v_elem) in enumerate(zip(self.contents, value)):
result.extend(
update_error_pointer(error, i)
for error in (c_elem.errors(v_elem) or [])
)
if not result and self.additional_validator:
return self.additional_validator.errors(value)
return result
def introspect(self): # type: () -> Introspection
return strip_none({
'type': self.introspect_type,
'contents': [value.introspect() for value in self.contents],
'description': self.description,
'additional_validation': (
self.additional_validator.__class__.__name__ if self.additional_validator else None
),
})
| 43.043478 | 119 | 0.663682 |
ace530c758f406b6164096f1079f2615bff50980 | 5,460 | py | Python | sdk/servicebus/azure-mgmt-servicebus/azure/mgmt/servicebus/v2018_01_01_preview/operations/_regions_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/servicebus/azure-mgmt-servicebus/azure/mgmt/servicebus/v2018_01_01_preview/operations/_regions_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/servicebus/azure-mgmt-servicebus/azure/mgmt/servicebus/v2018_01_01_preview/operations/_regions_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class RegionsOperations(object):
"""RegionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.servicebus.v2018_01_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_sku(
self,
sku, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PremiumMessagingRegionsListResult"]
"""Gets the available Regions for a given sku.
:param sku: The sku type.
:type sku: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PremiumMessagingRegionsListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.servicebus.v2018_01_01_preview.models.PremiumMessagingRegionsListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PremiumMessagingRegionsListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-01-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_sku.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'sku': self._serialize.url("sku", sku, 'str', max_length=50, min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PremiumMessagingRegionsListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_sku.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.ServiceBus/sku/{sku}/regions'} # type: ignore
| 45.882353 | 134 | 0.657875 |
ace530d4353e2780676e8d9855ce982d3d541f24 | 16,424 | py | Python | examples/ca/cts_lattice_grain.py | cctrunz/landlab | 4e4ef12f4bae82bc5194f1dcc9af8ff1a7c20939 | [
"MIT"
] | null | null | null | examples/ca/cts_lattice_grain.py | cctrunz/landlab | 4e4ef12f4bae82bc5194f1dcc9af8ff1a7c20939 | [
"MIT"
] | 1 | 2016-03-16T02:34:08.000Z | 2016-04-20T19:31:30.000Z | examples/ca/cts_lattice_grain.py | cctrunz/landlab | 4e4ef12f4bae82bc5194f1dcc9af8ff1a7c20939 | [
"MIT"
] | null | null | null | #!/usr/env/python
"""
cts_lattice_gas_with_gravity_and_friction.py:
continuous-time stochastic version of a lattice-gas cellular
automaton model.
GT Oct 2014
"""
from __future__ import print_function
import random
import time
from landlab import HexModelGrid
from landlab.ca.celllab_cts import CAPlotter, Transition
from landlab.ca.oriented_hex_cts import OrientedHexCTS
_DEBUG = False
def setup_transition_list(g=1.0, f=0.0):
"""
Creates and returns a list of Transition() objects to represent state
transitions for simple granular mechanics model.
Parameters
----------
(none)
Returns
-------
xn_list : list of Transition objects
List of objects that encode information about the link-state transitions.
"""
xn_list = []
# Transitions for particle movement into an empty cell
xn_list.append(Transition((1, 0, 0), (0, 1, 0), 1., "motion"))
xn_list.append(Transition((2, 0, 1), (0, 2, 1), 1., "motion"))
xn_list.append(Transition((3, 0, 2), (0, 3, 2), 1., "motion"))
xn_list.append(Transition((0, 4, 0), (4, 0, 0), 1., "motion"))
xn_list.append(Transition((0, 5, 1), (5, 0, 1), 1., "motion"))
xn_list.append(Transition((0, 6, 2), (6, 0, 2), 1., "motion"))
# Transitions for wall impact
xn_list.append(Transition((1, 8, 0), (4, 8, 0), 1.0, "wall rebound"))
xn_list.append(Transition((2, 8, 1), (5, 8, 1), 1.0, "wall rebound"))
xn_list.append(Transition((3, 8, 2), (6, 8, 2), 1.0, "wall rebound"))
xn_list.append(Transition((8, 4, 0), (8, 1, 0), 1.0, "wall rebound"))
xn_list.append(Transition((8, 5, 1), (8, 2, 1), 1.0, "wall rebound"))
xn_list.append(Transition((8, 6, 2), (8, 3, 2), 1.0, "wall rebound"))
# Transitions for wall impact frictional stop
xn_list.append(Transition((1, 8, 0), (7, 8, 0), f, "wall stop"))
xn_list.append(Transition((2, 8, 1), (7, 8, 1), f, "wall stop"))
xn_list.append(Transition((3, 8, 2), (7, 8, 2), f, "wall stop"))
xn_list.append(Transition((8, 4, 0), (8, 7, 0), f, "wall stop"))
xn_list.append(Transition((8, 5, 1), (8, 7, 1), f, "wall stop"))
xn_list.append(Transition((8, 6, 2), (8, 7, 2), f, "wall stop"))
# Transitions for head-on collision
xn_list.append(Transition((1, 4, 0), (3, 6, 0), 0.5, "head-on collision"))
xn_list.append(Transition((1, 4, 0), (5, 2, 0), 0.5, "head-on collision"))
xn_list.append(Transition((2, 5, 1), (4, 1, 1), 0.5, "head-on collision"))
xn_list.append(Transition((2, 5, 1), (6, 3, 1), 0.5, "head-on collision"))
xn_list.append(Transition((3, 6, 2), (1, 4, 2), 0.5, "head-on collision"))
xn_list.append(Transition((3, 6, 2), (5, 2, 2), 0.5, "head-on collision"))
xn_list.append(Transition((1, 4, 0), (7, 7, 0), f, "head-on collision"))
xn_list.append(Transition((2, 5, 1), (7, 7, 1), f, "head-on collision"))
xn_list.append(Transition((3, 6, 2), (7, 7, 2), f, "head-on collision"))
# Transitions for glancing collision
xn_list.append(Transition((1, 3, 0), (3, 1, 0), 1.0, "glancing collision"))
xn_list.append(Transition((1, 5, 0), (5, 1, 0), 1.0, "glancing collision"))
xn_list.append(Transition((2, 4, 0), (4, 2, 0), 1.0, "glancing collision"))
xn_list.append(Transition((6, 4, 0), (4, 6, 0), 1.0, "glancing collision"))
xn_list.append(Transition((2, 4, 1), (4, 2, 1), 1.0, "glancing collision"))
xn_list.append(Transition((2, 6, 1), (6, 2, 1), 1.0, "glancing collision"))
xn_list.append(Transition((1, 5, 1), (5, 1, 1), 1.0, "glancing collision"))
xn_list.append(Transition((3, 5, 1), (5, 3, 1), 1.0, "glancing collision"))
xn_list.append(Transition((3, 1, 2), (1, 3, 2), 1.0, "glancing collision"))
xn_list.append(Transition((3, 5, 2), (5, 3, 2), 1.0, "glancing collision"))
xn_list.append(Transition((2, 6, 2), (6, 2, 2), 1.0, "glancing collision"))
xn_list.append(Transition((4, 6, 2), (6, 4, 2), 1.0, "glancing collision"))
# Transitions for oblique-from-behind collisions
xn_list.append(Transition((1, 2, 0), (2, 1, 0), 1.0, "oblique"))
xn_list.append(Transition((1, 6, 0), (6, 1, 0), 1.0, "oblique"))
xn_list.append(Transition((3, 4, 0), (4, 3, 0), 1.0, "oblique"))
xn_list.append(Transition((5, 4, 0), (4, 5, 0), 1.0, "oblique"))
xn_list.append(Transition((2, 1, 1), (1, 2, 1), 1.0, "oblique"))
xn_list.append(Transition((2, 3, 1), (3, 2, 1), 1.0, "oblique"))
xn_list.append(Transition((4, 5, 1), (5, 4, 1), 1.0, "oblique"))
xn_list.append(Transition((6, 5, 1), (5, 6, 1), 1.0, "oblique"))
xn_list.append(Transition((3, 2, 2), (2, 3, 2), 1.0, "oblique"))
xn_list.append(Transition((3, 4, 2), (4, 3, 2), 1.0, "oblique"))
xn_list.append(Transition((1, 6, 2), (6, 1, 2), 1.0, "oblique"))
xn_list.append(Transition((5, 6, 2), (6, 5, 2), 1.0, "oblique"))
xn_list.append(Transition((1, 2, 0), (7, 7, 0), f, "oblique"))
xn_list.append(Transition((1, 6, 0), (7, 7, 0), f, "oblique"))
xn_list.append(Transition((3, 4, 0), (7, 7, 0), f, "oblique"))
xn_list.append(Transition((5, 4, 0), (7, 7, 0), f, "oblique"))
xn_list.append(Transition((2, 1, 1), (7, 7, 1), f, "oblique"))
xn_list.append(Transition((2, 3, 1), (7, 7, 1), f, "oblique"))
xn_list.append(Transition((4, 5, 1), (7, 7, 1), f, "oblique"))
xn_list.append(Transition((6, 5, 1), (7, 7, 1), f, "oblique"))
xn_list.append(Transition((3, 2, 2), (7, 7, 2), f, "oblique"))
xn_list.append(Transition((3, 4, 2), (7, 7, 2), f, "oblique"))
xn_list.append(Transition((1, 6, 2), (7, 7, 2), f, "oblique"))
xn_list.append(Transition((5, 6, 2), (7, 7, 2), f, "oblique"))
# Transitions for direct-from-behind collisions
xn_list.append(Transition((1, 1, 0), (2, 6, 0), 0.5, "behind"))
xn_list.append(Transition((1, 1, 0), (6, 2, 0), 0.5, "behind"))
xn_list.append(Transition((4, 4, 0), (3, 5, 0), 0.5, "behind"))
xn_list.append(Transition((4, 4, 0), (5, 3, 0), 0.5, "behind"))
xn_list.append(Transition((2, 2, 1), (1, 3, 1), 0.5, "behind"))
xn_list.append(Transition((2, 2, 1), (3, 1, 1), 0.5, "behind"))
xn_list.append(Transition((5, 5, 1), (4, 6, 1), 0.5, "behind"))
xn_list.append(Transition((5, 5, 1), (6, 4, 1), 0.5, "behind"))
xn_list.append(Transition((3, 3, 2), (2, 4, 2), 0.5, "behind"))
xn_list.append(Transition((3, 3, 2), (4, 2, 2), 0.5, "behind"))
xn_list.append(Transition((6, 6, 2), (1, 5, 2), 0.5, "behind"))
xn_list.append(Transition((6, 6, 2), (5, 1, 2), 0.5, "behind"))
xn_list.append(Transition((1, 1, 0), (7, 1, 0), f, "behind"))
xn_list.append(Transition((4, 4, 0), (4, 7, 0), f, "behind"))
xn_list.append(Transition((2, 2, 1), (7, 2, 1), f, "behind"))
xn_list.append(Transition((5, 5, 1), (5, 7, 1), f, "behind"))
xn_list.append(Transition((3, 3, 2), (7, 3, 2), f, "behind"))
xn_list.append(Transition((6, 6, 2), (6, 7, 2), f, "behind"))
# Transitions for collision with stationary (resting) particle
xn_list.append(Transition((1, 7, 0), (7, 2, 0), 0.5, "rest"))
xn_list.append(Transition((1, 7, 0), (7, 6, 0), 0.5, "rest"))
xn_list.append(Transition((7, 4, 0), (3, 7, 0), 0.5, "rest"))
xn_list.append(Transition((7, 4, 0), (5, 7, 0), 0.5, "rest"))
xn_list.append(Transition((2, 7, 1), (7, 1, 1), 0.5, "rest"))
xn_list.append(Transition((2, 7, 1), (7, 3, 1), 0.5, "rest"))
xn_list.append(Transition((7, 5, 1), (4, 7, 1), 0.5, "rest"))
xn_list.append(Transition((7, 5, 1), (6, 7, 1), 0.5, "rest"))
xn_list.append(Transition((3, 7, 2), (7, 2, 2), 0.5, "rest"))
xn_list.append(Transition((3, 7, 2), (7, 4, 2), 0.5, "rest"))
xn_list.append(Transition((7, 6, 2), (1, 7, 2), 0.5, "rest"))
xn_list.append(Transition((7, 6, 2), (5, 7, 2), 0.5, "rest"))
xn_list.append(Transition((1, 7, 0), (7, 7, 0), f, "rest"))
xn_list.append(Transition((7, 4, 0), (7, 7, 0), f, "rest"))
xn_list.append(Transition((2, 7, 1), (7, 7, 1), f, "rest"))
xn_list.append(Transition((7, 5, 1), (7, 7, 1), f, "rest"))
xn_list.append(Transition((3, 7, 2), (7, 7, 2), f, "rest"))
xn_list.append(Transition((7, 6, 2), (7, 7, 2), f, "rest"))
# Gravity rules
xn_list.append(Transition((1, 0, 0), (7, 0, 0), g, "up to rest"))
xn_list.append(Transition((1, 1, 0), (7, 1, 0), g, "up to rest"))
xn_list.append(Transition((1, 2, 0), (7, 2, 0), g, "up to rest"))
xn_list.append(Transition((1, 3, 0), (7, 3, 0), g, "up to rest"))
xn_list.append(Transition((1, 4, 0), (7, 4, 0), g, "up to rest"))
xn_list.append(Transition((1, 5, 0), (7, 5, 0), g, "up to rest"))
xn_list.append(Transition((1, 6, 0), (7, 6, 0), g, "up to rest"))
xn_list.append(Transition((1, 7, 0), (7, 7, 0), g, "up to rest"))
xn_list.append(Transition((0, 1, 0), (0, 7, 0), g, "up to rest"))
xn_list.append(Transition((1, 1, 0), (1, 7, 0), g, "up to rest"))
xn_list.append(Transition((2, 1, 0), (2, 7, 0), g, "up to rest"))
xn_list.append(Transition((3, 1, 0), (3, 7, 0), g, "up to rest"))
xn_list.append(Transition((4, 1, 0), (4, 7, 0), g, "up to rest"))
xn_list.append(Transition((5, 1, 0), (5, 7, 0), g, "up to rest"))
xn_list.append(Transition((6, 1, 0), (6, 7, 0), g, "up to rest"))
xn_list.append(Transition((7, 1, 0), (7, 7, 0), g, "up to rest"))
# xn_list.append( Transition((7,0,0), (4,0,0), g, 'rest to down') )
# xn_list.append( Transition((7,1,0), (4,1,0), g, 'rest to down') )
# xn_list.append( Transition((7,2,0), (4,2,0), g, 'rest to down') )
# xn_list.append( Transition((7,3,0), (4,3,0), g, 'rest to down') )
# xn_list.append( Transition((7,4,0), (4,4,0), g, 'rest to down') )
# xn_list.append( Transition((7,5,0), (4,5,0), g, 'rest to down') )
# xn_list.append( Transition((7,6,0), (4,6,0), g, 'rest to down') )
# xn_list.append( Transition((7,7,0), (4,7,0), g, 'rest to down') )
xn_list.append(Transition((0, 7, 0), (0, 4, 0), g, "rest to down"))
# xn_list.append( Transition((1,7,0), (1,4,0), g, 'rest to down') )
# xn_list.append( Transition((2,7,0), (2,4,0), g, 'rest to down') )
# xn_list.append( Transition((3,7,0), (3,4,0), g, 'rest to down') )
# xn_list.append( Transition((4,7,0), (4,4,0), g, 'rest to down') )
# xn_list.append( Transition((5,7,0), (5,4,0), g, 'rest to down') )
# xn_list.append( Transition((6,7,0), (6,4,0), g, 'rest to down') )
# xn_list.append( Transition((7,7,0), (7,4,0), g, 'rest to down') )
xn_list.append(Transition((7, 0, 2), (3, 0, 2), g, "rest to right-down"))
xn_list.append(Transition((0, 7, 1), (0, 5, 1), g, "rest to left-down"))
xn_list.append(Transition((2, 0, 1), (3, 0, 1), g, "right up to right down"))
xn_list.append(Transition((2, 1, 1), (3, 1, 1), g, "right up to right down"))
xn_list.append(Transition((2, 2, 1), (3, 2, 1), g, "right up to right down"))
xn_list.append(Transition((2, 3, 1), (3, 3, 1), g, "right up to right down"))
xn_list.append(Transition((2, 4, 1), (3, 4, 1), g, "right up to right down"))
xn_list.append(Transition((2, 5, 1), (3, 5, 1), g, "right up to right down"))
xn_list.append(Transition((2, 6, 1), (3, 6, 1), g, "right up to right down"))
xn_list.append(Transition((2, 7, 1), (3, 7, 1), g, "right up to right down"))
xn_list.append(Transition((0, 2, 1), (0, 3, 1), g, "right up to right down"))
xn_list.append(Transition((1, 2, 1), (1, 3, 1), g, "right up to right down"))
xn_list.append(Transition((2, 2, 1), (2, 3, 1), g, "right up to right down"))
xn_list.append(Transition((3, 2, 1), (3, 3, 1), g, "right up to right down"))
xn_list.append(Transition((4, 2, 1), (4, 3, 1), g, "right up to right down"))
xn_list.append(Transition((5, 2, 1), (5, 3, 1), g, "right up to right down"))
xn_list.append(Transition((6, 2, 1), (6, 3, 1), g, "right up to right down"))
xn_list.append(Transition((7, 2, 1), (7, 3, 1), g, "right up to right down"))
xn_list.append(Transition((6, 0, 2), (5, 0, 2), g, "left up to left down"))
xn_list.append(Transition((6, 1, 2), (5, 1, 2), g, "left up to left down"))
xn_list.append(Transition((6, 2, 2), (5, 2, 2), g, "left up to left down"))
xn_list.append(Transition((6, 3, 2), (5, 3, 2), g, "left up to left down"))
xn_list.append(Transition((6, 4, 2), (5, 4, 2), g, "left up to left down"))
xn_list.append(Transition((6, 5, 2), (5, 5, 2), g, "left up to left down"))
xn_list.append(Transition((6, 6, 2), (5, 6, 2), g, "left up to left down"))
xn_list.append(Transition((6, 7, 2), (5, 7, 2), g, "left up to left down"))
xn_list.append(Transition((0, 6, 2), (0, 5, 2), g, "left up to left down"))
xn_list.append(Transition((1, 6, 2), (1, 5, 2), g, "left up to left down"))
xn_list.append(Transition((2, 6, 2), (2, 5, 2), g, "left up to left down"))
xn_list.append(Transition((3, 6, 2), (3, 5, 2), g, "left up to left down"))
xn_list.append(Transition((4, 6, 2), (4, 5, 2), g, "left up to left down"))
xn_list.append(Transition((5, 6, 2), (5, 5, 2), g, "left up to left down"))
xn_list.append(Transition((6, 6, 2), (6, 5, 2), g, "left up to left down"))
xn_list.append(Transition((7, 6, 2), (7, 5, 2), g, "left up to left down"))
if _DEBUG:
print()
print("setup_transition_list(): list has", len(xn_list), "transitions:")
for t in xn_list:
print(
" From state",
t.from_state,
"to state",
t.to_state,
"at rate",
t.rate,
"called",
t.name,
)
return xn_list
def main():
# INITIALIZE
# User-defined parameters
nr = 41
nc = 61
g = 0.8
f = 1.0
plot_interval = 1.0
run_duration = 200.0
report_interval = 5.0 # report interval, in real-time seconds
p_init = 0.4 # probability that a cell is occupied at start
plot_every_transition = False
# Remember the clock time, and calculate when we next want to report
# progress.
current_real_time = time.time()
next_report = current_real_time + report_interval
# Create a grid
hmg = HexModelGrid(nr, nc, 1.0, orientation="vertical", reorient_links=True)
# Close the grid boundaries
# hmg.set_closed_nodes(hmg.open_boundary_nodes)
# Set up the states and pair transitions.
# Transition data here represent particles moving on a lattice: one state
# per direction (for 6 directions), plus an empty state, a stationary
# state, and a wall state.
ns_dict = {
0: "empty",
1: "moving up",
2: "moving right and up",
3: "moving right and down",
4: "moving down",
5: "moving left and down",
6: "moving left and up",
7: "rest",
8: "wall",
}
xn_list = setup_transition_list(g, f)
# Create data and initialize values.
node_state_grid = hmg.add_zeros("node", "node_state_grid")
# Make the grid boundary all wall particles
node_state_grid[hmg.boundary_nodes] = 8
# Seed the grid interior with randomly oriented particles
for i in hmg.core_nodes:
if random.random() < p_init:
node_state_grid[i] = random.randint(1, 7)
# Create the CA model
ca = OrientedHexCTS(hmg, ns_dict, xn_list, node_state_grid)
# Create a CAPlotter object for handling screen display
ca_plotter = CAPlotter(ca)
# Plot the initial grid
ca_plotter.update_plot()
# RUN
current_time = 0.0
while current_time < run_duration:
# Once in a while, print out simulation and real time to let the user
# know that the sim is running ok
current_real_time = time.time()
if current_real_time >= next_report:
print(
"Current sim time",
current_time,
"(",
100 * current_time / run_duration,
"%)",
)
next_report = current_real_time + report_interval
# Run the model forward in time until the next output step
ca.run(
current_time + plot_interval,
ca.node_state,
plot_each_transition=plot_every_transition,
plotter=ca_plotter,
)
current_time += plot_interval
# Plot the current grid
ca_plotter.update_plot()
# FINALIZE
# Plot
ca_plotter.finalize()
if __name__ == "__main__":
main()
| 48.164223 | 81 | 0.577935 |
ace53161a5eeae40b9dd9ccd050e6a490ce4e10a | 833 | py | Python | sls-dbt/handler.py | anwartarique/dbt-fargate | 3601cbfc789656bc38fff5cdedf597b6613d3376 | [
"Apache-2.0"
] | 8 | 2019-10-07T12:58:26.000Z | 2019-11-14T04:51:18.000Z | sls-dbt/handler.py | anwartarique/dbt-fargate | 3601cbfc789656bc38fff5cdedf597b6613d3376 | [
"Apache-2.0"
] | null | null | null | sls-dbt/handler.py | anwartarique/dbt-fargate | 3601cbfc789656bc38fff5cdedf597b6613d3376 | [
"Apache-2.0"
] | 1 | 2019-10-07T13:27:04.000Z | 2019-10-07T13:27:04.000Z | import boto3
import os
FARGATE_VPC = os.environ['FARGATE_VPC']
FARGATE_SN1 = os.environ['FARGATE_SN1']
FARGATE_SN2 = os.environ['FARGATE_SN2']
FARGATE_SG = os.environ['FARGATE_SG']
FARGATE_CLUSTER = os.environ['FARGATE_CLUSTER']
def hello(event, context):
client = boto3.client('ecs')
response = client.run_task(
cluster=FARGATE_CLUSTER,
launchType = 'FARGATE',
taskDefinition='dbt-fargate:3', # replace with your task definition name and revision
count = 1,
platformVersion='LATEST',
networkConfiguration={
'awsvpcConfiguration': {
'subnets': [
FARGATE_SN1,
FARGATE_SN2
],
'assignPublicIp': 'ENABLED'
}
}
)
print(response)
return str(response)
| 26.03125 | 93 | 0.595438 |
ace53260f179f27eab94cc79a052ee3d21fc67b1 | 415 | py | Python | ocean_lib/common/agreements/consumable.py | abhik-99/ocean.py | dce5ad09e3c652083afc27b87a825bbb07ae0550 | [
"Apache-2.0"
] | null | null | null | ocean_lib/common/agreements/consumable.py | abhik-99/ocean.py | dce5ad09e3c652083afc27b87a825bbb07ae0550 | [
"Apache-2.0"
] | null | null | null | ocean_lib/common/agreements/consumable.py | abhik-99/ocean.py | dce5ad09e3c652083afc27b87a825bbb07ae0550 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2021 Ocean Protocol Foundation
# SPDX-License-Identifier: Apache-2.0
#
class ConsumableCodes:
OK = 0
ASSET_DISABLED = 1
CONNECTIVITY_FAIL = 2
CREDENTIAL_NOT_IN_ALLOW_LIST = 3
CREDENTIAL_IN_DENY_LIST = 4
class MalformedCredential(Exception):
pass
class AssetNotConsumable(Exception):
def __init__(self, consumable_code):
self.consumable_code = consumable_code
| 18.863636 | 46 | 0.739759 |
ace5328468140f9febc402f64b6d6e2135ffa2e4 | 54,328 | py | Python | webStorm-APICloud/python_tools/Lib/test/test_itertools.py | zzr925028429/androidyianyan | 8967fdba92473e8e65ee222515dfc54cdae5bb0b | [
"MIT"
] | null | null | null | webStorm-APICloud/python_tools/Lib/test/test_itertools.py | zzr925028429/androidyianyan | 8967fdba92473e8e65ee222515dfc54cdae5bb0b | [
"MIT"
] | null | null | null | webStorm-APICloud/python_tools/Lib/test/test_itertools.py | zzr925028429/androidyianyan | 8967fdba92473e8e65ee222515dfc54cdae5bb0b | [
"MIT"
] | null | null | null | import unittest
from test import test_support
from itertools import *
from weakref import proxy
import sys
import operator
import random
maxsize = test_support.MAX_Py_ssize_t
minsize = -maxsize-1
def onearg(x):
'Test function of one argument'
return 2*x
def errfunc(*args):
'Test function that raises an error'
raise ValueError
def gen3():
'Non-restartable source sequence'
for i in (0, 1, 2):
yield i
def isEven(x):
'Test predicate'
return x%2==0
def isOdd(x):
'Test predicate'
return x%2==1
class StopNow:
'Class emulating an empty iterable.'
def __iter__(self):
return self
def next(self):
raise StopIteration
def take(n, seq):
'Convenience function for partially consuming a long of infinite iterable'
return list(islice(seq, n))
def prod(iterable):
return reduce(operator.mul, iterable, 1)
def fact(n):
'Factorial'
return prod(range(1, n+1))
class TestBasicOps(unittest.TestCase):
def test_chain(self):
def chain2(*iterables):
'Pure python version in the docs'
for it in iterables:
for element in it:
yield element
for c in (chain, chain2):
self.assertEqual(list(c('abc', 'def')), list('abcdef'))
self.assertEqual(list(c('abc')), list('abc'))
self.assertEqual(list(c('')), [])
self.assertEqual(take(4, c('abc', 'def')), list('abcd'))
self.assertRaises(TypeError, list,c(2, 3))
def test_chain_from_iterable(self):
self.assertEqual(list(chain.from_iterable(['abc', 'def'])), list('abcdef'))
self.assertEqual(list(chain.from_iterable(['abc'])), list('abc'))
self.assertEqual(list(chain.from_iterable([''])), [])
self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd'))
self.assertRaises(TypeError, list, chain.from_iterable([2, 3]))
def test_combinations(self):
self.assertRaises(TypeError, combinations, 'abc') # missing r argument
self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, combinations, None) # pool is not iterable
self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative
self.assertRaises(ValueError, combinations, 'abc', 32) # r is too big
self.assertEqual(list(combinations(range(4), 3)),
[(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
def combinations1(iterable, r):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
indices = range(r)
yield tuple(pool[i] for i in indices)
while 1:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
def combinations2(iterable, r):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
for indices in permutations(range(n), r):
if sorted(indices) == list(indices):
yield tuple(pool[i] for i in indices)
for n in range(7):
values = [5*x-12 for x in range(n)]
for r in range(n+1):
result = list(combinations(values, r))
self.assertEqual(len(result), fact(n) / fact(r) / fact(n-r)) # right number of combs
self.assertEqual(len(result), len(set(result))) # no repeats
self.assertEqual(result, sorted(result)) # lexicographic order
for c in result:
self.assertEqual(len(c), r) # r-length combinations
self.assertEqual(len(set(c)), r) # no duplicate elements
self.assertEqual(list(c), sorted(c)) # keep original ordering
self.assert_(all(e in values for e in c)) # elements taken from input iterable
self.assertEqual(list(c),
[e for e in values if e in c]) # comb is a subsequence of the input iterable
self.assertEqual(result, list(combinations1(values, r))) # matches first pure python version
self.assertEqual(result, list(combinations2(values, r))) # matches first pure python version
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(combinations('abcde', 3))))), 1)
def test_permutations(self):
self.assertRaises(TypeError, permutations) # too few arguments
self.assertRaises(TypeError, permutations, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, permutations, None) # pool is not iterable
self.assertRaises(ValueError, permutations, 'abc', -2) # r is negative
self.assertRaises(ValueError, permutations, 'abc', 32) # r is too big
self.assertRaises(TypeError, permutations, 'abc', 's') # r is not an int or None
self.assertEqual(list(permutations(range(3), 2)),
[(0,1), (0,2), (1,0), (1,2), (2,0), (2,1)])
def permutations1(iterable, r=None):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
r = n if r is None else r
indices = range(n)
cycles = range(n, n-r, -1)
yield tuple(pool[i] for i in indices[:r])
while n:
for i in reversed(range(r)):
cycles[i] -= 1
if cycles[i] == 0:
indices[i:] = indices[i+1:] + indices[i:i+1]
cycles[i] = n - i
else:
j = cycles[i]
indices[i], indices[-j] = indices[-j], indices[i]
yield tuple(pool[i] for i in indices[:r])
break
else:
return
def permutations2(iterable, r=None):
'Pure python version shown in the docs'
pool = tuple(iterable)
n = len(pool)
r = n if r is None else r
for indices in product(range(n), repeat=r):
if len(set(indices)) == r:
yield tuple(pool[i] for i in indices)
for n in range(7):
values = [5*x-12 for x in range(n)]
for r in range(n+1):
result = list(permutations(values, r))
self.assertEqual(len(result), fact(n) / fact(n-r)) # right number of perms
self.assertEqual(len(result), len(set(result))) # no repeats
self.assertEqual(result, sorted(result)) # lexicographic order
for p in result:
self.assertEqual(len(p), r) # r-length permutations
self.assertEqual(len(set(p)), r) # no duplicate elements
self.assert_(all(e in values for e in p)) # elements taken from input iterable
self.assertEqual(result, list(permutations1(values, r))) # matches first pure python version
self.assertEqual(result, list(permutations2(values, r))) # matches first pure python version
if r == n:
self.assertEqual(result, list(permutations(values, None))) # test r as None
self.assertEqual(result, list(permutations(values))) # test default r
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(permutations('abcde', 3))))), 1)
def test_count(self):
self.assertEqual(zip('abc',count()), [('a', 0), ('b', 1), ('c', 2)])
self.assertEqual(zip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)])
self.assertEqual(take(2, zip('abc',count(3))), [('a', 3), ('b', 4)])
self.assertEqual(take(2, zip('abc',count(-1))), [('a', -1), ('b', 0)])
self.assertEqual(take(2, zip('abc',count(-3))), [('a', -3), ('b', -2)])
self.assertRaises(TypeError, count, 2, 3)
self.assertRaises(TypeError, count, 'a')
self.assertEqual(list(islice(count(maxsize-5), 10)), range(maxsize-5, maxsize+5))
self.assertEqual(list(islice(count(-maxsize-5), 10)), range(-maxsize-5, -maxsize+5))
c = count(3)
self.assertEqual(repr(c), 'count(3)')
c.next()
self.assertEqual(repr(c), 'count(4)')
c = count(-9)
self.assertEqual(repr(c), 'count(-9)')
c.next()
self.assertEqual(c.next(), -8)
for i in (-sys.maxint-5, -sys.maxint+5 ,-10, -1, 0, 10, sys.maxint-5, sys.maxint+5):
# Test repr (ignoring the L in longs)
r1 = repr(count(i)).replace('L', '')
r2 = 'count(%r)'.__mod__(i).replace('L', '')
self.assertEqual(r1, r2)
def test_cycle(self):
self.assertEqual(take(10, cycle('abc')), list('abcabcabca'))
self.assertEqual(list(cycle('')), [])
self.assertRaises(TypeError, cycle)
self.assertRaises(TypeError, cycle, 5)
self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0])
def test_groupby(self):
# Check whether it accepts arguments correctly
self.assertEqual([], list(groupby([])))
self.assertEqual([], list(groupby([], key=id)))
self.assertRaises(TypeError, list, groupby('abc', []))
self.assertRaises(TypeError, groupby, None)
self.assertRaises(TypeError, groupby, 'abc', lambda x:x, 10)
# Check normal input
s = [(0, 10, 20), (0, 11,21), (0,12,21), (1,13,21), (1,14,22),
(2,15,22), (3,16,23), (3,17,23)]
dup = []
for k, g in groupby(s, lambda r:r[0]):
for elem in g:
self.assertEqual(k, elem[0])
dup.append(elem)
self.assertEqual(s, dup)
# Check nested case
dup = []
for k, g in groupby(s, lambda r:r[0]):
for ik, ig in groupby(g, lambda r:r[2]):
for elem in ig:
self.assertEqual(k, elem[0])
self.assertEqual(ik, elem[2])
dup.append(elem)
self.assertEqual(s, dup)
# Check case where inner iterator is not used
keys = [k for k, g in groupby(s, lambda r:r[0])]
expectedkeys = set([r[0] for r in s])
self.assertEqual(set(keys), expectedkeys)
self.assertEqual(len(keys), len(expectedkeys))
# Exercise pipes and filters style
s = 'abracadabra'
# sort s | uniq
r = [k for k, g in groupby(sorted(s))]
self.assertEqual(r, ['a', 'b', 'c', 'd', 'r'])
# sort s | uniq -d
r = [k for k, g in groupby(sorted(s)) if list(islice(g,1,2))]
self.assertEqual(r, ['a', 'b', 'r'])
# sort s | uniq -c
r = [(len(list(g)), k) for k, g in groupby(sorted(s))]
self.assertEqual(r, [(5, 'a'), (2, 'b'), (1, 'c'), (1, 'd'), (2, 'r')])
# sort s | uniq -c | sort -rn | head -3
r = sorted([(len(list(g)) , k) for k, g in groupby(sorted(s))], reverse=True)[:3]
self.assertEqual(r, [(5, 'a'), (2, 'r'), (2, 'b')])
# iter.next failure
class ExpectedError(Exception):
pass
def delayed_raise(n=0):
for i in range(n):
yield 'yo'
raise ExpectedError
def gulp(iterable, keyp=None, func=list):
return [func(g) for k, g in groupby(iterable, keyp)]
# iter.next failure on outer object
self.assertRaises(ExpectedError, gulp, delayed_raise(0))
# iter.next failure on inner object
self.assertRaises(ExpectedError, gulp, delayed_raise(1))
# __cmp__ failure
class DummyCmp:
def __cmp__(self, dst):
raise ExpectedError
s = [DummyCmp(), DummyCmp(), None]
# __cmp__ failure on outer object
self.assertRaises(ExpectedError, gulp, s, func=id)
# __cmp__ failure on inner object
self.assertRaises(ExpectedError, gulp, s)
# keyfunc failure
def keyfunc(obj):
if keyfunc.skip > 0:
keyfunc.skip -= 1
return obj
else:
raise ExpectedError
# keyfunc failure on outer object
keyfunc.skip = 0
self.assertRaises(ExpectedError, gulp, [None], keyfunc)
keyfunc.skip = 1
self.assertRaises(ExpectedError, gulp, [None, None], keyfunc)
def test_ifilter(self):
self.assertEqual(list(ifilter(isEven, range(6))), [0,2,4])
self.assertEqual(list(ifilter(None, [0,1,0,2,0])), [1,2])
self.assertEqual(list(ifilter(bool, [0,1,0,2,0])), [1,2])
self.assertEqual(take(4, ifilter(isEven, count())), [0,2,4,6])
self.assertRaises(TypeError, ifilter)
self.assertRaises(TypeError, ifilter, lambda x:x)
self.assertRaises(TypeError, ifilter, lambda x:x, range(6), 7)
self.assertRaises(TypeError, ifilter, isEven, 3)
self.assertRaises(TypeError, ifilter(range(6), range(6)).next)
def test_ifilterfalse(self):
self.assertEqual(list(ifilterfalse(isEven, range(6))), [1,3,5])
self.assertEqual(list(ifilterfalse(None, [0,1,0,2,0])), [0,0,0])
self.assertEqual(list(ifilterfalse(bool, [0,1,0,2,0])), [0,0,0])
self.assertEqual(take(4, ifilterfalse(isEven, count())), [1,3,5,7])
self.assertRaises(TypeError, ifilterfalse)
self.assertRaises(TypeError, ifilterfalse, lambda x:x)
self.assertRaises(TypeError, ifilterfalse, lambda x:x, range(6), 7)
self.assertRaises(TypeError, ifilterfalse, isEven, 3)
self.assertRaises(TypeError, ifilterfalse(range(6), range(6)).next)
def test_izip(self):
ans = [(x,y) for x, y in izip('abc',count())]
self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
self.assertEqual(list(izip('abc', range(6))), zip('abc', range(6)))
self.assertEqual(list(izip('abcdef', range(3))), zip('abcdef', range(3)))
self.assertEqual(take(3,izip('abcdef', count())), zip('abcdef', range(3)))
self.assertEqual(list(izip('abcdef')), zip('abcdef'))
self.assertEqual(list(izip()), zip())
self.assertRaises(TypeError, izip, 3)
self.assertRaises(TypeError, izip, range(3), 3)
# Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in izip('abc', 'def')],
zip('abc', 'def'))
self.assertEqual([pair for pair in izip('abc', 'def')],
zip('abc', 'def'))
ids = map(id, izip('abc', 'def'))
self.assertEqual(min(ids), max(ids))
ids = map(id, list(izip('abc', 'def')))
self.assertEqual(len(dict.fromkeys(ids)), len(ids))
def test_iziplongest(self):
for args in [
['abc', range(6)],
[range(6), 'abc'],
[range(1000), range(2000,2100), range(3000,3050)],
[range(1000), range(0), range(3000,3050), range(1200), range(1500)],
[range(1000), range(0), range(3000,3050), range(1200), range(1500), range(0)],
]:
target = map(None, *args)
self.assertEqual(list(izip_longest(*args)), target)
self.assertEqual(list(izip_longest(*args, **{})), target)
target = [tuple((e is None and 'X' or e) for e in t) for t in target] # Replace None fills with 'X'
self.assertEqual(list(izip_longest(*args, **dict(fillvalue='X'))), target)
self.assertEqual(take(3,izip_longest('abcdef', count())), zip('abcdef', range(3))) # take 3 from infinite input
self.assertEqual(list(izip_longest()), zip())
self.assertEqual(list(izip_longest([])), zip([]))
self.assertEqual(list(izip_longest('abcdef')), zip('abcdef'))
self.assertEqual(list(izip_longest('abc', 'defg', **{})), map(None, 'abc', 'defg')) # empty keyword dict
self.assertRaises(TypeError, izip_longest, 3)
self.assertRaises(TypeError, izip_longest, range(3), 3)
for stmt in [
"izip_longest('abc', fv=1)",
"izip_longest('abc', fillvalue=1, bogus_keyword=None)",
]:
try:
eval(stmt, globals(), locals())
except TypeError:
pass
else:
self.fail('Did not raise Type in: ' + stmt)
# Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in izip_longest('abc', 'def')],
zip('abc', 'def'))
self.assertEqual([pair for pair in izip_longest('abc', 'def')],
zip('abc', 'def'))
ids = map(id, izip_longest('abc', 'def'))
self.assertEqual(min(ids), max(ids))
ids = map(id, list(izip_longest('abc', 'def')))
self.assertEqual(len(dict.fromkeys(ids)), len(ids))
def test_product(self):
for args, result in [
([], [()]), # zero iterables
(['ab'], [('a',), ('b',)]), # one iterable
([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]), # two iterables
([range(0), range(2), range(3)], []), # first iterable with zero length
([range(2), range(0), range(3)], []), # middle iterable with zero length
([range(2), range(3), range(0)], []), # last iterable with zero length
]:
self.assertEqual(list(product(*args)), result)
for r in range(4):
self.assertEqual(list(product(*(args*r))),
list(product(*args, **dict(repeat=r))))
self.assertEqual(len(list(product(*[range(7)]*6))), 7**6)
self.assertRaises(TypeError, product, range(6), None)
def product1(*args, **kwds):
pools = map(tuple, args) * kwds.get('repeat', 1)
n = len(pools)
if n == 0:
yield ()
return
if any(len(pool) == 0 for pool in pools):
return
indices = [0] * n
yield tuple(pool[i] for pool, i in zip(pools, indices))
while 1:
for i in reversed(range(n)): # right to left
if indices[i] == len(pools[i]) - 1:
continue
indices[i] += 1
for j in range(i+1, n):
indices[j] = 0
yield tuple(pool[i] for pool, i in zip(pools, indices))
break
else:
return
def product2(*args, **kwds):
'Pure python version used in docs'
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
argtypes = ['', 'abc', '', xrange(0), xrange(4), dict(a=1, b=2, c=3),
set('abcdefg'), range(11), tuple(range(13))]
for i in range(100):
args = [random.choice(argtypes) for j in range(random.randrange(5))]
expected_len = prod(map(len, args))
self.assertEqual(len(list(product(*args))), expected_len)
self.assertEqual(list(product(*args)), list(product1(*args)))
self.assertEqual(list(product(*args)), list(product2(*args)))
args = map(iter, args)
self.assertEqual(len(list(product(*args))), expected_len)
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1)
def test_repeat(self):
self.assertEqual(zip(xrange(3),repeat('a')),
[(0, 'a'), (1, 'a'), (2, 'a')])
self.assertEqual(list(repeat('a', 3)), ['a', 'a', 'a'])
self.assertEqual(take(3, repeat('a')), ['a', 'a', 'a'])
self.assertEqual(list(repeat('a', 0)), [])
self.assertEqual(list(repeat('a', -3)), [])
self.assertRaises(TypeError, repeat)
self.assertRaises(TypeError, repeat, None, 3, 4)
self.assertRaises(TypeError, repeat, None, 'a')
r = repeat(1+0j)
self.assertEqual(repr(r), 'repeat((1+0j))')
r = repeat(1+0j, 5)
self.assertEqual(repr(r), 'repeat((1+0j), 5)')
list(r)
self.assertEqual(repr(r), 'repeat((1+0j), 0)')
def test_imap(self):
self.assertEqual(list(imap(operator.pow, range(3), range(1,7))),
[0**1, 1**2, 2**3])
self.assertEqual(list(imap(None, 'abc', range(5))),
[('a',0),('b',1),('c',2)])
self.assertEqual(list(imap(None, 'abc', count())),
[('a',0),('b',1),('c',2)])
self.assertEqual(take(2,imap(None, 'abc', count())),
[('a',0),('b',1)])
self.assertEqual(list(imap(operator.pow, [])), [])
self.assertRaises(TypeError, imap)
self.assertRaises(TypeError, imap, operator.neg)
self.assertRaises(TypeError, imap(10, range(5)).next)
self.assertRaises(ValueError, imap(errfunc, [4], [5]).next)
self.assertRaises(TypeError, imap(onearg, [4], [5]).next)
def test_starmap(self):
self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))),
[0**1, 1**2, 2**3])
self.assertEqual(take(3, starmap(operator.pow, izip(count(), count(1)))),
[0**1, 1**2, 2**3])
self.assertEqual(list(starmap(operator.pow, [])), [])
self.assertEqual(list(starmap(operator.pow, [iter([4,5])])), [4**5])
self.assertRaises(TypeError, list, starmap(operator.pow, [None]))
self.assertRaises(TypeError, starmap)
self.assertRaises(TypeError, starmap, operator.pow, [(4,5)], 'extra')
self.assertRaises(TypeError, starmap(10, [(4,5)]).next)
self.assertRaises(ValueError, starmap(errfunc, [(4,5)]).next)
self.assertRaises(TypeError, starmap(onearg, [(4,5)]).next)
def test_islice(self):
for args in [ # islice(args) should agree with range(args)
(10, 20, 3),
(10, 3, 20),
(10, 20),
(10, 3),
(20,)
]:
self.assertEqual(list(islice(xrange(100), *args)), range(*args))
for args, tgtargs in [ # Stop when seqn is exhausted
((10, 110, 3), ((10, 100, 3))),
((10, 110), ((10, 100))),
((110,), (100,))
]:
self.assertEqual(list(islice(xrange(100), *args)), range(*tgtargs))
# Test stop=None
self.assertEqual(list(islice(xrange(10), None)), range(10))
self.assertEqual(list(islice(xrange(10), None, None)), range(10))
self.assertEqual(list(islice(xrange(10), None, None, None)), range(10))
self.assertEqual(list(islice(xrange(10), 2, None)), range(2, 10))
self.assertEqual(list(islice(xrange(10), 1, None, 2)), range(1, 10, 2))
# Test number of items consumed SF #1171417
it = iter(range(10))
self.assertEqual(list(islice(it, 3)), range(3))
self.assertEqual(list(it), range(3, 10))
# Test invalid arguments
self.assertRaises(TypeError, islice, xrange(10))
self.assertRaises(TypeError, islice, xrange(10), 1, 2, 3, 4)
self.assertRaises(ValueError, islice, xrange(10), -5, 10, 1)
self.assertRaises(ValueError, islice, xrange(10), 1, -5, -1)
self.assertRaises(ValueError, islice, xrange(10), 1, 10, -1)
self.assertRaises(ValueError, islice, xrange(10), 1, 10, 0)
self.assertRaises(ValueError, islice, xrange(10), 'a')
self.assertRaises(ValueError, islice, xrange(10), 'a', 1)
self.assertRaises(ValueError, islice, xrange(10), 1, 'a')
self.assertRaises(ValueError, islice, xrange(10), 'a', 1, 1)
self.assertRaises(ValueError, islice, xrange(10), 1, 'a', 1)
self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1)
def test_takewhile(self):
data = [1, 3, 5, 20, 2, 4, 6, 8]
underten = lambda x: x<10
self.assertEqual(list(takewhile(underten, data)), [1, 3, 5])
self.assertEqual(list(takewhile(underten, [])), [])
self.assertRaises(TypeError, takewhile)
self.assertRaises(TypeError, takewhile, operator.pow)
self.assertRaises(TypeError, takewhile, operator.pow, [(4,5)], 'extra')
self.assertRaises(TypeError, takewhile(10, [(4,5)]).next)
self.assertRaises(ValueError, takewhile(errfunc, [(4,5)]).next)
t = takewhile(bool, [1, 1, 1, 0, 0, 0])
self.assertEqual(list(t), [1, 1, 1])
self.assertRaises(StopIteration, t.next)
def test_dropwhile(self):
data = [1, 3, 5, 20, 2, 4, 6, 8]
underten = lambda x: x<10
self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8])
self.assertEqual(list(dropwhile(underten, [])), [])
self.assertRaises(TypeError, dropwhile)
self.assertRaises(TypeError, dropwhile, operator.pow)
self.assertRaises(TypeError, dropwhile, operator.pow, [(4,5)], 'extra')
self.assertRaises(TypeError, dropwhile(10, [(4,5)]).next)
self.assertRaises(ValueError, dropwhile(errfunc, [(4,5)]).next)
def test_tee(self):
n = 200
def irange(n):
for i in xrange(n):
yield i
a, b = tee([]) # test empty iterator
self.assertEqual(list(a), [])
self.assertEqual(list(b), [])
a, b = tee(irange(n)) # test 100% interleaved
self.assertEqual(zip(a,b), zip(range(n),range(n)))
a, b = tee(irange(n)) # test 0% interleaved
self.assertEqual(list(a), range(n))
self.assertEqual(list(b), range(n))
a, b = tee(irange(n)) # test dealloc of leading iterator
for i in xrange(100):
self.assertEqual(a.next(), i)
del a
self.assertEqual(list(b), range(n))
a, b = tee(irange(n)) # test dealloc of trailing iterator
for i in xrange(100):
self.assertEqual(a.next(), i)
del b
self.assertEqual(list(a), range(100, n))
for j in xrange(5): # test randomly interleaved
order = [0]*n + [1]*n
random.shuffle(order)
lists = ([], [])
its = tee(irange(n))
for i in order:
value = its[i].next()
lists[i].append(value)
self.assertEqual(lists[0], range(n))
self.assertEqual(lists[1], range(n))
# test argument format checking
self.assertRaises(TypeError, tee)
self.assertRaises(TypeError, tee, 3)
self.assertRaises(TypeError, tee, [1,2], 'x')
self.assertRaises(TypeError, tee, [1,2], 3, 'x')
# tee object should be instantiable
a, b = tee('abc')
c = type(a)('def')
self.assertEqual(list(c), list('def'))
# test long-lagged and multi-way split
a, b, c = tee(xrange(2000), 3)
for i in xrange(100):
self.assertEqual(a.next(), i)
self.assertEqual(list(b), range(2000))
self.assertEqual([c.next(), c.next()], range(2))
self.assertEqual(list(a), range(100,2000))
self.assertEqual(list(c), range(2,2000))
# test values of n
self.assertRaises(TypeError, tee, 'abc', 'invalid')
self.assertRaises(ValueError, tee, [], -1)
for n in xrange(5):
result = tee('abc', n)
self.assertEqual(type(result), tuple)
self.assertEqual(len(result), n)
self.assertEqual(map(list, result), [list('abc')]*n)
# tee pass-through to copyable iterator
a, b = tee('abc')
c, d = tee(a)
self.assert_(a is c)
# test tee_new
t1, t2 = tee('abc')
tnew = type(t1)
self.assertRaises(TypeError, tnew)
self.assertRaises(TypeError, tnew, 10)
t3 = tnew(t1)
self.assert_(list(t1) == list(t2) == list(t3) == list('abc'))
# test that tee objects are weak referencable
a, b = tee(xrange(10))
p = proxy(a)
self.assertEqual(getattr(p, '__class__'), type(b))
del a
self.assertRaises(ReferenceError, getattr, p, '__class__')
def test_StopIteration(self):
self.assertRaises(StopIteration, izip().next)
for f in (chain, cycle, izip, groupby):
self.assertRaises(StopIteration, f([]).next)
self.assertRaises(StopIteration, f(StopNow()).next)
self.assertRaises(StopIteration, islice([], None).next)
self.assertRaises(StopIteration, islice(StopNow(), None).next)
p, q = tee([])
self.assertRaises(StopIteration, p.next)
self.assertRaises(StopIteration, q.next)
p, q = tee(StopNow())
self.assertRaises(StopIteration, p.next)
self.assertRaises(StopIteration, q.next)
self.assertRaises(StopIteration, repeat(None, 0).next)
for f in (ifilter, ifilterfalse, imap, takewhile, dropwhile, starmap):
self.assertRaises(StopIteration, f(lambda x:x, []).next)
self.assertRaises(StopIteration, f(lambda x:x, StopNow()).next)
class TestExamples(unittest.TestCase):
def test_chain(self):
self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF')
def test_chain_from_iterable(self):
self.assertEqual(''.join(chain.from_iterable(['ABC', 'DEF'])), 'ABCDEF')
def test_combinations(self):
self.assertEqual(list(combinations('ABCD', 2)),
[('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')])
self.assertEqual(list(combinations(range(4), 3)),
[(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
def test_count(self):
self.assertEqual(list(islice(count(10), 5)), [10, 11, 12, 13, 14])
def test_cycle(self):
self.assertEqual(list(islice(cycle('ABCD'), 12)), list('ABCDABCDABCD'))
def test_dropwhile(self):
self.assertEqual(list(dropwhile(lambda x: x<5, [1,4,6,4,1])), [6,4,1])
def test_groupby(self):
self.assertEqual([k for k, g in groupby('AAAABBBCCDAABBB')],
list('ABCDAB'))
self.assertEqual([(list(g)) for k, g in groupby('AAAABBBCCD')],
[list('AAAA'), list('BBB'), list('CC'), list('D')])
def test_ifilter(self):
self.assertEqual(list(ifilter(lambda x: x%2, range(10))), [1,3,5,7,9])
def test_ifilterfalse(self):
self.assertEqual(list(ifilterfalse(lambda x: x%2, range(10))), [0,2,4,6,8])
def test_imap(self):
self.assertEqual(list(imap(pow, (2,3,10), (5,2,3))), [32, 9, 1000])
def test_islice(self):
self.assertEqual(list(islice('ABCDEFG', 2)), list('AB'))
self.assertEqual(list(islice('ABCDEFG', 2, 4)), list('CD'))
self.assertEqual(list(islice('ABCDEFG', 2, None)), list('CDEFG'))
self.assertEqual(list(islice('ABCDEFG', 0, None, 2)), list('ACEG'))
def test_izip(self):
self.assertEqual(list(izip('ABCD', 'xy')), [('A', 'x'), ('B', 'y')])
def test_izip_longest(self):
self.assertEqual(list(izip_longest('ABCD', 'xy', fillvalue='-')),
[('A', 'x'), ('B', 'y'), ('C', '-'), ('D', '-')])
def test_permutations(self):
self.assertEqual(list(permutations('ABCD', 2)),
map(tuple, 'AB AC AD BA BC BD CA CB CD DA DB DC'.split()))
self.assertEqual(list(permutations(range(3))),
[(0,1,2), (0,2,1), (1,0,2), (1,2,0), (2,0,1), (2,1,0)])
def test_product(self):
self.assertEqual(list(product('ABCD', 'xy')),
map(tuple, 'Ax Ay Bx By Cx Cy Dx Dy'.split()))
self.assertEqual(list(product(range(2), repeat=3)),
[(0,0,0), (0,0,1), (0,1,0), (0,1,1),
(1,0,0), (1,0,1), (1,1,0), (1,1,1)])
def test_repeat(self):
self.assertEqual(list(repeat(10, 3)), [10, 10, 10])
def test_stapmap(self):
self.assertEqual(list(starmap(pow, [(2,5), (3,2), (10,3)])),
[32, 9, 1000])
def test_takewhile(self):
self.assertEqual(list(takewhile(lambda x: x<5, [1,4,6,4,1])), [1,4])
class TestGC(unittest.TestCase):
def makecycle(self, iterator, container):
container.append(iterator)
iterator.next()
del container, iterator
def test_chain(self):
a = []
self.makecycle(chain(a), a)
def test_chain_from_iterable(self):
a = []
self.makecycle(chain.from_iterable([a]), a)
def test_combinations(self):
a = []
self.makecycle(combinations([1,2,a,3], 3), a)
def test_cycle(self):
a = []
self.makecycle(cycle([a]*2), a)
def test_dropwhile(self):
a = []
self.makecycle(dropwhile(bool, [0, a, a]), a)
def test_groupby(self):
a = []
self.makecycle(groupby([a]*2, lambda x:x), a)
def test_issue2246(self):
# Issue 2246 -- the _grouper iterator was not included in GC
n = 10
keyfunc = lambda x: x
for i, j in groupby(xrange(n), key=keyfunc):
keyfunc.__dict__.setdefault('x',[]).append(j)
def test_ifilter(self):
a = []
self.makecycle(ifilter(lambda x:True, [a]*2), a)
def test_ifilterfalse(self):
a = []
self.makecycle(ifilterfalse(lambda x:False, a), a)
def test_izip(self):
a = []
self.makecycle(izip([a]*2, [a]*3), a)
def test_izip_longest(self):
a = []
self.makecycle(izip_longest([a]*2, [a]*3), a)
b = [a, None]
self.makecycle(izip_longest([a]*2, [a]*3, fillvalue=b), a)
def test_imap(self):
a = []
self.makecycle(imap(lambda x:x, [a]*2), a)
def test_islice(self):
a = []
self.makecycle(islice([a]*2, None), a)
def test_permutations(self):
a = []
self.makecycle(permutations([1,2,a,3], 3), a)
def test_product(self):
a = []
self.makecycle(product([1,2,a,3], repeat=3), a)
def test_repeat(self):
a = []
self.makecycle(repeat(a), a)
def test_starmap(self):
a = []
self.makecycle(starmap(lambda *t: t, [(a,a)]*2), a)
def test_takewhile(self):
a = []
self.makecycle(takewhile(bool, [1, 0, a, a]), a)
def R(seqn):
'Regular generator'
for i in seqn:
yield i
class G:
'Sequence using __getitem__'
def __init__(self, seqn):
self.seqn = seqn
def __getitem__(self, i):
return self.seqn[i]
class I:
'Sequence using iterator protocol'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def next(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class Ig:
'Sequence using iterator protocol defined with a generator'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
for val in self.seqn:
yield val
class X:
'Missing __getitem__ and __iter__'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def next(self):
if self.i >= len(self.seqn): raise StopIteration
v = self.seqn[self.i]
self.i += 1
return v
class N:
'Iterator missing next()'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
class E:
'Test propagation of exceptions'
def __init__(self, seqn):
self.seqn = seqn
self.i = 0
def __iter__(self):
return self
def next(self):
3 // 0
class S:
'Test immediate stop'
def __init__(self, seqn):
pass
def __iter__(self):
return self
def next(self):
raise StopIteration
def L(seqn):
'Test multiple tiers of iterators'
return chain(imap(lambda x:x, R(Ig(G(seqn)))))
class TestVariousIteratorArgs(unittest.TestCase):
def test_chain(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(chain(g(s))), list(g(s)))
self.assertEqual(list(chain(g(s), g(s))), list(g(s))+list(g(s)))
self.assertRaises(TypeError, list, chain(X(s)))
self.assertRaises(TypeError, list, chain(N(s)))
self.assertRaises(ZeroDivisionError, list, chain(E(s)))
def test_product(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
self.assertRaises(TypeError, product, X(s))
self.assertRaises(TypeError, product, N(s))
self.assertRaises(ZeroDivisionError, product, E(s))
def test_cycle(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
tgtlen = len(s) * 3
expected = list(g(s))*3
actual = list(islice(cycle(g(s)), tgtlen))
self.assertEqual(actual, expected)
self.assertRaises(TypeError, cycle, X(s))
self.assertRaises(TypeError, list, cycle(N(s)))
self.assertRaises(ZeroDivisionError, list, cycle(E(s)))
def test_groupby(self):
for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual([k for k, sb in groupby(g(s))], list(g(s)))
self.assertRaises(TypeError, groupby, X(s))
self.assertRaises(TypeError, list, groupby(N(s)))
self.assertRaises(ZeroDivisionError, list, groupby(E(s)))
def test_ifilter(self):
for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(ifilter(isEven, g(s))), filter(isEven, g(s)))
self.assertRaises(TypeError, ifilter, isEven, X(s))
self.assertRaises(TypeError, list, ifilter(isEven, N(s)))
self.assertRaises(ZeroDivisionError, list, ifilter(isEven, E(s)))
def test_ifilterfalse(self):
for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(ifilterfalse(isEven, g(s))), filter(isOdd, g(s)))
self.assertRaises(TypeError, ifilterfalse, isEven, X(s))
self.assertRaises(TypeError, list, ifilterfalse(isEven, N(s)))
self.assertRaises(ZeroDivisionError, list, ifilterfalse(isEven, E(s)))
def test_izip(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(izip(g(s))), zip(g(s)))
self.assertEqual(list(izip(g(s), g(s))), zip(g(s), g(s)))
self.assertRaises(TypeError, izip, X(s))
self.assertRaises(TypeError, list, izip(N(s)))
self.assertRaises(ZeroDivisionError, list, izip(E(s)))
def test_iziplongest(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(izip_longest(g(s))), zip(g(s)))
self.assertEqual(list(izip_longest(g(s), g(s))), zip(g(s), g(s)))
self.assertRaises(TypeError, izip_longest, X(s))
self.assertRaises(TypeError, list, izip_longest(N(s)))
self.assertRaises(ZeroDivisionError, list, izip_longest(E(s)))
def test_imap(self):
for s in (range(10), range(0), range(100), (7,11), xrange(20,50,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(imap(onearg, g(s))), map(onearg, g(s)))
self.assertEqual(list(imap(operator.pow, g(s), g(s))), map(operator.pow, g(s), g(s)))
self.assertRaises(TypeError, imap, onearg, X(s))
self.assertRaises(TypeError, list, imap(onearg, N(s)))
self.assertRaises(ZeroDivisionError, list, imap(onearg, E(s)))
def test_islice(self):
for s in ("12345", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
self.assertEqual(list(islice(g(s),1,None,2)), list(g(s))[1::2])
self.assertRaises(TypeError, islice, X(s), 10)
self.assertRaises(TypeError, list, islice(N(s), 10))
self.assertRaises(ZeroDivisionError, list, islice(E(s), 10))
def test_starmap(self):
for s in (range(10), range(0), range(100), (7,11), xrange(20,50,5)):
for g in (G, I, Ig, S, L, R):
ss = zip(s, s)
self.assertEqual(list(starmap(operator.pow, g(ss))), map(operator.pow, g(s), g(s)))
self.assertRaises(TypeError, starmap, operator.pow, X(ss))
self.assertRaises(TypeError, list, starmap(operator.pow, N(ss)))
self.assertRaises(ZeroDivisionError, list, starmap(operator.pow, E(ss)))
def test_takewhile(self):
for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
tgt = []
for elem in g(s):
if not isEven(elem): break
tgt.append(elem)
self.assertEqual(list(takewhile(isEven, g(s))), tgt)
self.assertRaises(TypeError, takewhile, isEven, X(s))
self.assertRaises(TypeError, list, takewhile(isEven, N(s)))
self.assertRaises(ZeroDivisionError, list, takewhile(isEven, E(s)))
def test_dropwhile(self):
for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
tgt = []
for elem in g(s):
if not tgt and isOdd(elem): continue
tgt.append(elem)
self.assertEqual(list(dropwhile(isOdd, g(s))), tgt)
self.assertRaises(TypeError, dropwhile, isOdd, X(s))
self.assertRaises(TypeError, list, dropwhile(isOdd, N(s)))
self.assertRaises(ZeroDivisionError, list, dropwhile(isOdd, E(s)))
def test_tee(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (G, I, Ig, S, L, R):
it1, it2 = tee(g(s))
self.assertEqual(list(it1), list(g(s)))
self.assertEqual(list(it2), list(g(s)))
self.assertRaises(TypeError, tee, X(s))
self.assertRaises(TypeError, list, tee(N(s))[0])
self.assertRaises(ZeroDivisionError, list, tee(E(s))[0])
class LengthTransparency(unittest.TestCase):
def test_repeat(self):
from test.test_iterlen import len
self.assertEqual(len(repeat(None, 50)), 50)
self.assertRaises(TypeError, len, repeat(None))
class RegressionTests(unittest.TestCase):
def test_sf_793826(self):
# Fix Armin Rigo's successful efforts to wreak havoc
def mutatingtuple(tuple1, f, tuple2):
# this builds a tuple t which is a copy of tuple1,
# then calls f(t), then mutates t to be equal to tuple2
# (needs len(tuple1) == len(tuple2)).
def g(value, first=[1]):
if first:
del first[:]
f(z.next())
return value
items = list(tuple2)
items[1:1] = list(tuple1)
gen = imap(g, items)
z = izip(*[gen]*len(tuple1))
z.next()
def f(t):
global T
T = t
first[:] = list(T)
first = []
mutatingtuple((1,2,3), f, (4,5,6))
second = list(T)
self.assertEqual(first, second)
def test_sf_950057(self):
# Make sure that chain() and cycle() catch exceptions immediately
# rather than when shifting between input sources
def gen1():
hist.append(0)
yield 1
hist.append(1)
raise AssertionError
hist.append(2)
def gen2(x):
hist.append(3)
yield 2
hist.append(4)
if x:
raise StopIteration
hist = []
self.assertRaises(AssertionError, list, chain(gen1(), gen2(False)))
self.assertEqual(hist, [0,1])
hist = []
self.assertRaises(AssertionError, list, chain(gen1(), gen2(True)))
self.assertEqual(hist, [0,1])
hist = []
self.assertRaises(AssertionError, list, cycle(gen1()))
self.assertEqual(hist, [0,1])
class SubclassWithKwargsTest(unittest.TestCase):
def test_keywords_in_subclass(self):
# count is not subclassable...
for cls in (repeat, izip, ifilter, ifilterfalse, chain, imap,
starmap, islice, takewhile, dropwhile, cycle):
class Subclass(cls):
def __init__(self, newarg=None, *args):
cls.__init__(self, *args)
try:
Subclass(newarg=1)
except TypeError, err:
# we expect type errors because of wrong argument count
self.failIf("does not take keyword arguments" in err.args[0])
libreftest = """ Doctest for examples in the library reference: libitertools.tex
>>> amounts = [120.15, 764.05, 823.14]
>>> for checknum, amount in izip(count(1200), amounts):
... print 'Check %d is for $%.2f' % (checknum, amount)
...
Check 1200 is for $120.15
Check 1201 is for $764.05
Check 1202 is for $823.14
>>> import operator
>>> for cube in imap(operator.pow, xrange(1,4), repeat(3)):
... print cube
...
1
8
27
>>> reportlines = ['EuroPython', 'Roster', '', 'alex', '', 'laura', '', 'martin', '', 'walter', '', 'samuele']
>>> for name in islice(reportlines, 3, None, 2):
... print name.title()
...
Alex
Laura
Martin
Walter
Samuele
>>> from operator import itemgetter
>>> d = dict(a=1, b=2, c=1, d=2, e=1, f=2, g=3)
>>> di = sorted(sorted(d.iteritems()), key=itemgetter(1))
>>> for k, g in groupby(di, itemgetter(1)):
... print k, map(itemgetter(0), g)
...
1 ['a', 'c', 'e']
2 ['b', 'd', 'f']
3 ['g']
# Find runs of consecutive numbers using groupby. The key to the solution
# is differencing with a range so that consecutive numbers all appear in
# same group.
>>> data = [ 1, 4,5,6, 10, 15,16,17,18, 22, 25,26,27,28]
>>> for k, g in groupby(enumerate(data), lambda (i,x):i-x):
... print map(operator.itemgetter(1), g)
...
[1]
[4, 5, 6]
[10]
[15, 16, 17, 18]
[22]
[25, 26, 27, 28]
>>> def take(n, iterable):
... "Return first n items of the iterable as a list"
... return list(islice(iterable, n))
>>> def enumerate(iterable, start=0):
... return izip(count(start), iterable)
>>> def tabulate(function, start=0):
... "Return function(0), function(1), ..."
... return imap(function, count(start))
>>> def nth(iterable, n):
... "Returns the nth item or empty list"
... return list(islice(iterable, n, n+1))
>>> def quantify(iterable, pred=bool):
... "Count how many times the predicate is true"
... return sum(imap(pred, iterable))
>>> def padnone(iterable):
... "Returns the sequence elements and then returns None indefinitely"
... return chain(iterable, repeat(None))
>>> def ncycles(iterable, n):
... "Returns the seqeuence elements n times"
... return chain(*repeat(iterable, n))
>>> def dotproduct(vec1, vec2):
... return sum(imap(operator.mul, vec1, vec2))
>>> def flatten(listOfLists):
... return list(chain.from_iterable(listOfLists))
>>> def repeatfunc(func, times=None, *args):
... "Repeat calls to func with specified arguments."
... " Example: repeatfunc(random.random)"
... if times is None:
... return starmap(func, repeat(args))
... else:
... return starmap(func, repeat(args, times))
>>> def pairwise(iterable):
... "s -> (s0,s1), (s1,s2), (s2, s3), ..."
... a, b = tee(iterable)
... for elem in b:
... break
... return izip(a, b)
>>> def grouper(n, iterable, fillvalue=None):
... "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
... args = [iter(iterable)] * n
... return izip_longest(fillvalue=fillvalue, *args)
>>> def roundrobin(*iterables):
... "roundrobin('ABC', 'D', 'EF') --> A D E B F C"
... # Recipe credited to George Sakkis
... pending = len(iterables)
... nexts = cycle(iter(it).next for it in iterables)
... while pending:
... try:
... for next in nexts:
... yield next()
... except StopIteration:
... pending -= 1
... nexts = cycle(islice(nexts, pending))
>>> def powerset(iterable):
... "powerset('ab') --> set([]), set(['a']), set(['b']), set(['a', 'b'])"
... # Recipe credited to Eric Raymond
... pairs = [(2**i, x) for i, x in enumerate(iterable)]
... for n in xrange(2**len(pairs)):
... yield set(x for m, x in pairs if m&n)
>>> def compress(data, selectors):
... "compress('ABCDEF', [1,0,1,0,1,1]) --> A C E F"
... return (d for d, s in izip(data, selectors) if s)
>>> def combinations_with_replacement(iterable, r):
... "combinations_with_replacement('ABC', 3) --> AA AB AC BB BC CC"
... pool = tuple(iterable)
... n = len(pool)
... indices = [0] * r
... yield tuple(pool[i] for i in indices)
... while 1:
... for i in reversed(range(r)):
... if indices[i] != n - 1:
... break
... else:
... return
... indices[i:] = [indices[i] + 1] * (r - i)
... yield tuple(pool[i] for i in indices)
This is not part of the examples but it tests to make sure the definitions
perform as purported.
>>> take(10, count())
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> list(enumerate('abc'))
[(0, 'a'), (1, 'b'), (2, 'c')]
>>> list(islice(tabulate(lambda x: 2*x), 4))
[0, 2, 4, 6]
>>> nth('abcde', 3)
['d']
>>> quantify(xrange(99), lambda x: x%2==0)
50
>>> a = [[1, 2, 3], [4, 5, 6]]
>>> flatten(a)
[1, 2, 3, 4, 5, 6]
>>> list(repeatfunc(pow, 5, 2, 3))
[8, 8, 8, 8, 8]
>>> import random
>>> take(5, imap(int, repeatfunc(random.random)))
[0, 0, 0, 0, 0]
>>> list(pairwise('abcd'))
[('a', 'b'), ('b', 'c'), ('c', 'd')]
>>> list(pairwise([]))
[]
>>> list(pairwise('a'))
[]
>>> list(islice(padnone('abc'), 0, 6))
['a', 'b', 'c', None, None, None]
>>> list(ncycles('abc', 3))
['a', 'b', 'c', 'a', 'b', 'c', 'a', 'b', 'c']
>>> dotproduct([1,2,3], [4,5,6])
32
>>> list(grouper(3, 'abcdefg', 'x'))
[('a', 'b', 'c'), ('d', 'e', 'f'), ('g', 'x', 'x')]
>>> list(roundrobin('abc', 'd', 'ef'))
['a', 'd', 'e', 'b', 'f', 'c']
>>> map(sorted, powerset('ab'))
[[], ['a'], ['b'], ['a', 'b']]
>>> list(compress('abcdef', [1,0,1,0,1,1]))
['a', 'c', 'e', 'f']
>>> list(combinations_with_replacement('abc', 2))
[('a', 'a'), ('a', 'b'), ('a', 'c'), ('b', 'b'), ('b', 'c'), ('c', 'c')]
"""
__test__ = {'libreftest' : libreftest}
def test_main(verbose=None):
test_classes = (TestBasicOps, TestVariousIteratorArgs, TestGC,
RegressionTests, LengthTransparency,
SubclassWithKwargsTest, TestExamples)
test_support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in xrange(len(counts)):
test_support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print counts
# doctest the examples in the library reference
test_support.run_doctest(sys.modules[__name__], verbose)
if __name__ == "__main__":
test_main(verbose=True)
| 39.742502 | 120 | 0.530979 |
ace5332373afce0a35ba97e4825b96b2cb979b3d | 696 | py | Python | apps/camera/camera.py | squirrelcom/TYOS | 8fa140fe5c46e5af26a5b504bd6554664abff463 | [
"MIT"
] | 80 | 2015-03-30T12:30:01.000Z | 2022-01-16T14:30:41.000Z | apps/camera/camera.py | squirrelcom/TYOS | 8fa140fe5c46e5af26a5b504bd6554664abff463 | [
"MIT"
] | 2 | 2017-03-21T20:29:45.000Z | 2020-10-20T11:39:44.000Z | apps/camera/camera.py | squirrelcom/TYOS | 8fa140fe5c46e5af26a5b504bd6554664abff463 | [
"MIT"
] | 31 | 2015-04-14T03:15:44.000Z | 2022-01-15T20:18:24.000Z | #Camera App
#copyright (c) 2015 Tyler Spadgenske
# MIT License
###############################
#To be packaged with stock TYOS
###############################
from subprocess import Popen
import sys
import pygame
class Run():
def __init__(self, fona):
#Stuff to follow app protocol
self.exit = False
self.blit_one_surface = {'surface':[], 'rects':[]}
self.blit = {'surfaces':[], 'rects':[]}
self.next_app = None
def run_app(self):
cam = Popen(['sudo', 'python', '/home/pi/tyos/apps/camera/app.py'])
pygame.quit()
sys.exit()
def get_events(self, event):
pass
def on_first_run(self):
pass
| 23.2 | 75 | 0.538793 |
ace533a67324ccb492acb94dc3eaffe48e35dcc9 | 652 | py | Python | basic_python/load_matlab_file.py | Zettergren-Courses/EP501_python | dabaa584e5158eb35197a43f38920a9ed7cc02b8 | [
"MIT"
] | null | null | null | basic_python/load_matlab_file.py | Zettergren-Courses/EP501_python | dabaa584e5158eb35197a43f38920a9ed7cc02b8 | [
"MIT"
] | 1 | 2020-10-06T13:29:01.000Z | 2020-10-06T13:29:01.000Z | basic_python/load_matlab_file.py | Zettergren-Courses/EP501_python | dabaa584e5158eb35197a43f38920a9ed7cc02b8 | [
"MIT"
] | 6 | 2020-09-01T10:35:59.000Z | 2020-09-18T10:12:59.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 8 07:50:40 2020
This file shows briefly how to load data from a MATLAB .mat file and organize it
@author: zettergm
"""
import scipy.io as spio
# presumes that we've cloned the EP501_assignments repo into ../../
filename="../../EP501_assignments/assignments/HW1/testproblem.mat"
datadictionary=spio.loadmat(filename)
A=datadictionary["A"]
b=datadictionary["b"]
b2=datadictionary["b2"]
b3=datadictionary["b3"]
filename="../../EP501_assignments/assignments/HW2/iterative_testproblem.mat"
datadictionary=spio.loadmat(filename)
Ait=datadictionary["Ait"]
bit=datadictionary["bit"]
| 25.076923 | 80 | 0.75 |
ace536077bffaa541c8916ac0f040e794c779988 | 862 | py | Python | summarizer/sentence_handler.py | yukku/bert-extractive-summarizer-gpu | 221a8c2f50385c0cf4ae9dce776b624423cc6881 | [
"MIT"
] | 2 | 2021-10-02T08:12:35.000Z | 2022-01-09T20:41:14.000Z | summarizer/sentence_handler.py | yukku/bert-extractive-summarizer-gpu | 221a8c2f50385c0cf4ae9dce776b624423cc6881 | [
"MIT"
] | null | null | null | summarizer/sentence_handler.py | yukku/bert-extractive-summarizer-gpu | 221a8c2f50385c0cf4ae9dce776b624423cc6881 | [
"MIT"
] | 1 | 2021-12-29T21:53:31.000Z | 2021-12-29T21:53:31.000Z | from spacy.lang.en import English
class SentenceHandler(object):
def __init__(self, language = English):
self.nlp = language()
self.nlp.add_pipe(self.nlp.create_pipe('sentencizer'))
def process(self, body: str, min_length: int = 40, max_length: int = 600):
"""
Processes the content sentences.
:param body: The raw string body to process
:param min_length: Minimum length that the sentences must be
:param max_length: Max length that the sentences mus fall under
:return: Returns a list of sentences.
"""
doc = self.nlp(body)
return [c.string.strip() for c in doc.sents if max_length > len(c.string.strip()) > min_length]
def __call__(self, body: str, min_length: int = 40, max_length: int = 600):
return self.process(body, min_length, max_length)
| 35.916667 | 103 | 0.655452 |
ace5361f7ff88a763cb55fd13543bbe02521cf99 | 20,018 | py | Python | train_180301_1_Dense_3rd_gen_1st_.py | OsciiArt/Cookpad | b2245f84db0650d6282c97c98600de825c6ed6e0 | [
"MIT"
] | null | null | null | train_180301_1_Dense_3rd_gen_1st_.py | OsciiArt/Cookpad | b2245f84db0650d6282c97c98600de825c6ed6e0 | [
"MIT"
] | null | null | null | train_180301_1_Dense_3rd_gen_1st_.py | OsciiArt/Cookpad | b2245f84db0650d6282c97c98600de825c6ed6e0 | [
"MIT"
] | null | null | null | import numpy as np # linear algebra
np.random.seed(42)
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
from sklearn.model_selection import train_test_split
from matplotlib import pyplot
import time
import os, glob
import cv2
# parameters
format = "%H%M"
ts = time.strftime(format)
base_name = os.path.splitext(__file__)[0] + "_ts" + ts
input_size = 221
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Input, Flatten, GaussianNoise
from keras.layers import GlobalMaxPooling2D, Reshape, UpSampling3D, Activation
from keras.layers.normalization import BatchNormalization
from keras.layers.merge import Concatenate
from keras.models import Model
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, Callback, EarlyStopping, CSVLogger, ReduceLROnPlateau, LearningRateScheduler
from keras import backend as K
def get_callbacks(save_path, lr=0.001, patience=64):
csv_logger = CSVLogger(save_path + '_log.csv', append=True)
# check_path = save_path + '_e{epoch:02d}_vl{val_loss:.5f}.hdf5'
check_path = save_path
save_checkpoint = ModelCheckpoint(filepath=check_path, monitor='val_loss', save_best_only=True)
lerning_rate_schedular = ReduceLROnPlateau(patience=8, min_lr=lr * 0.00001)
def lrs(epoch):
if epoch<100:
return 1e-3
elif epoch<200:
return 1e-4
else:
return 1e-5
learning_rate_Schedular = LearningRateScheduler(lambda epoch: lrs(epoch))
early_stopping = EarlyStopping(monitor='val_loss',
patience=16,
verbose=1,
min_delta=1e-4,
mode='min')
Callbacks = [csv_logger,
save_checkpoint,
learning_rate_Schedular,
early_stopping
]
return Callbacks
def swish(x):
return x * K.sigmoid(x)
from keras.applications.vgg16 import VGG16
from keras.applications.densenet import DenseNet121
from keras.optimizers import SGD, Adam
from keras.layers import GlobalAveragePooling2D
def get_model(num_class):
base_model = DenseNet121(weights=None, include_top=False,
input_shape=[input_size,input_size,3], classes=1)
print(base_model.summary())
x = base_model.get_layer("bn").output
# x = base_model.get_layer("block5_pool").output
x = GlobalAveragePooling2D()(x)
predictions = Dense(num_class, activation='softmax')(x)
model = Model(inputs=base_model.input, outputs=predictions)
# sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
# optimizer = Adam(lr=0.0001)
# model.compile(loss='categorical_crossentropy',
# optimizer=optimizer,
# metrics=['accuracy'])
optimizer = Adam(lr=0.0001)
model.compile(loss='categorical_crossentropy',
optimizer=optimizer,
metrics=['accuracy'])
return model
def randomHueSaturationValue(image, hue_shift_limit=(-180, 180),
sat_shift_limit=(-255, 255),
val_shift_limit=(-255, 255), u=0.5):
if np.random.random() < u:
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
h, s, v = cv2.split(image) # sikisou, saido, meido
hue_shift = np.random.uniform(hue_shift_limit[0], hue_shift_limit[1])
h = cv2.add(h, hue_shift)
sat_shift = np.random.uniform(sat_shift_limit[0], sat_shift_limit[1])
s = cv2.add(s, sat_shift)
val_shift = np.random.uniform(val_shift_limit[0], val_shift_limit[1])
v = cv2.add(v, val_shift)
image = cv2.merge((h, s, v))
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
return image
def randomShiftScaleRotate(image,
shift_limit=(-0.0625, 0.0625),
scale_limit=(-0.1, 0.1),
rotate_limit=(-45, 45), aspect_limit=(0, 0),
borderMode=cv2.BORDER_CONSTANT, u=0.5):
if np.random.random() < u:
height, width, channel = image.shape
angle = np.random.uniform(rotate_limit[0], rotate_limit[1]) # degree
scale = np.random.uniform(1 + scale_limit[0], 1 + scale_limit[1])
aspect = np.random.uniform(1 + aspect_limit[0], 1 + aspect_limit[1])
sx = scale * aspect / (aspect ** 0.5)
sy = scale / (aspect ** 0.5)
dx = round(np.random.uniform(shift_limit[0], shift_limit[1]) * width)
dy = round(np.random.uniform(shift_limit[0], shift_limit[1]) * height)
cc = np.math.cos(angle / 180 * np.math.pi) * sx
ss = np.math.sin(angle / 180 * np.math.pi) * sy
rotate_matrix = np.array([[cc, -ss], [ss, cc]])
box0 = np.array([[0, 0], [width, 0], [width, height], [0, height], ])
box1 = box0 - np.array([width / 2, height / 2])
box1 = np.dot(box1, rotate_matrix.T) + np.array([width / 2 + dx, height / 2 + dy])
box0 = box0.astype(np.float32)
box1 = box1.astype(np.float32)
mat = cv2.getPerspectiveTransform(box0, box1)
image = cv2.warpPerspective(image, mat, (width, height), flags=cv2.INTER_LINEAR, borderMode=borderMode,
borderValue=(
0, 0,
0,))
return image
def randomHorizontalFlip(image, u=0.5):
if np.random.random() < u:
image = cv2.flip(image, 1)
return image
def randomVerticalFlip(image, u=0.5):
if np.random.random() < u:
image = cv2.flip(image, 0)
return image
def get_mixer(p=0.5, s_l=0.02, s_h=0.4, r_1=0.3, r_2=1/0.3):
def mixer(img1, img2, y1, y2):
img_h, img_w, img_c = img1.shape
p_1 = np.random.rand()
if p_1 > p:
return img1, y1
while True:
s = np.random.uniform(s_l, s_h) * img_h * img_w
r = np.random.uniform(r_1, r_2)
w = int(np.sqrt(s / r))
h = int(np.sqrt(s * r))
left = np.random.randint(0, img_w)
top = np.random.randint(0, img_h)
if left + w <= img_w and top + h <= img_h:
break
img1[top:top + h, left:left + w, :] = img2[top:top + h, left:left + w, :]
return img1, mask1
return mixer
def get_random_eraser(p=0.5, s_l=0.02, s_h=0.4, r_1=0.3, r_2=1/0.3, v_l=0, v_h=255, pixel_level=False):
def eraser(input_img):
img_h, img_w, img_c = input_img.shape
p_1 = np.random.rand()
if p_1 > p:
return input_img
while True:
s = np.random.uniform(s_l, s_h) * img_h * img_w
r = np.random.uniform(r_1, r_2)
w = int(np.sqrt(s / r))
h = int(np.sqrt(s * r))
left = np.random.randint(0, img_w)
top = np.random.randint(0, img_h)
if left + w <= img_w and top + h <= img_h:
break
if pixel_level:
c = np.random.uniform(v_l, v_h, (h, w, img_c))
else:
c = np.random.uniform(v_l, v_h)
input_img[top:top + h, left:left + w, :] = c
return input_img
return eraser
from multiprocessing import Pool
def load_img(args):
img_path = args
img = cv2.imread(img_path)
# print("img shape", img.shape)
img = cv2.resize(img, (input_size, input_size))
img = randomHueSaturationValue(img,
hue_shift_limit=(-50, 50),
sat_shift_limit=(-5, 5),
val_shift_limit=(-15, 15),
u=0.5)
img = randomShiftScaleRotate(img,
shift_limit=(-0.2, 0.2),
scale_limit=(-0.2, 0.2),
rotate_limit=(-30, 30),
aspect_limit=(-0.2, 0.2),
u=0.5)
img = randomHorizontalFlip(img)
img = randomVerticalFlip(img)
return img
def load_img_valid(args):
img_path = args
img = cv2.imread(img_path)
img = cv2.resize(img, (input_size, input_size))
return img
def train_generator(x_train, y_train, img_dir, batch_size, shuffle=True):
# x_train = x_train.as_matrix()
# y_train = y_train.as_matrix()
y_train = np.eye(55)[y_train]
batch_index = 0
n = x_train.shape[0]
# print("n", n)
eraser = get_random_eraser(v_h=0.)
pool = Pool(16)
while 1:
if batch_index == 0:
index_array = np.arange(n)
if shuffle:
index_array = np.random.permutation(n)
current_index = (batch_index * batch_size) % n
if n >= current_index + batch_size:
current_batch_size = batch_size
batch_index += 1
else:
current_batch_size = n - current_index
batch_index = 0
batch_id = index_array[current_index: current_index + current_batch_size]
batch_x = pool.map(load_img,
[img_dir + '/{}'.format(x_train[id])
for id in batch_id])
for id in range(len(batch_x)):
img = batch_x[id]
# img =eraser(img)
# img =eraser(img)
# img =eraser(img)
# img =eraser(img)
# img =eraser(img)
batch_x[id] = img
batch_x = np.array(batch_x, np.float32) / 255
batch_y = y_train[index_array[current_index: current_index + current_batch_size]]
# print("batch shape", batch_x.shape, batch_y.shape)
yield (batch_x, batch_y)
def valid_generator(x_train, y_train, img_dir, batch_size, shuffle=True):
# x_train = x_train.as_matrix()
# y_train = y_train.as_matrix()
y_train = np.eye(55)[y_train]
batch_index = 0
n = x_train.shape[0]
# print("n", n)
eraser = get_random_eraser(v_h=0.)
pool = Pool(16)
while 1:
if batch_index == 0:
index_array = np.arange(n)
if shuffle:
index_array = np.random.permutation(n)
current_index = (batch_index * batch_size) % n
if n >= current_index + batch_size:
current_batch_size = batch_size
batch_index += 1
else:
current_batch_size = n - current_index
batch_index = 0
batch_id = index_array[current_index: current_index + current_batch_size]
batch_x = pool.map(load_img_valid,
[img_dir + '/{}'.format(x_train[id])
for id in batch_id])
for id in range(len(batch_x)):
img = batch_x[id]
# img =eraser(img)
# img =eraser(img)
# img =eraser(img)
# img =eraser(img)
# img =eraser(img)
batch_x[id] = img
batch_x = np.array(batch_x, np.float32) / 255
batch_y = y_train[index_array[current_index: current_index + current_batch_size]]
# print("batch shape", batch_x.shape, batch_y.shape)
yield (batch_x, batch_y)
def get_mixer(p=0.5, s_l=0.02, s_h=0.4, r_1=0.3, r_2=1/0.3):
def mixer(img1, img2, y1, y2):
img_h, img_w, img_c = img1.shape
p_1 = np.random.rand()
if p_1 > p:
return img1, y1
while True:
s = np.random.uniform(s_l, s_h) * img_h * img_w
r = np.random.uniform(r_1, r_2)
w = int(np.sqrt(s / r))
h = int(np.sqrt(s * r))
left = np.random.randint(0, img_w)
top = np.random.randint(0, img_h)
if left + w <= img_w and top + h <= img_h:
break
img1[top:top + h, left:left + w, :] = img2[top:top + h, left:left + w, :]
y = (1- h/img_h*w/img_w) * y1 + h/img_h*w/img_w * y2
return img1, y
return mixer
def mix_generator(X_train, Y_train, img_dir, batch_size, shuffle=True):
alpha = 0.2
gen1 = train_generator(X_train, Y_train, img_dir, batch_size, shuffle)
gen2 = train_generator(X_train, Y_train, img_dir, batch_size, shuffle)
while True:
batch1 = next(gen1)
batch2 = next(gen2)
current_batch_size = batch1[0].shape[0]
l = np.random.beta(alpha, alpha, current_batch_size)
X_l = l.reshape(current_batch_size, 1, 1, 1)
Y_l = l.reshape(current_batch_size, 1)
batch_x = batch1[0] * X_l + batch2[0] * (1 - X_l)
batch_y = batch1[1] * Y_l + batch2[1] * (1 - Y_l)
yield (batch_x, batch_y)
def mix_generator2(X_train, Y_train, img_dir, batch_size, shuffle=True):
alpha = 0.2
gen1 = mix_generator(X_train, Y_train, img_dir, batch_size, shuffle)
gen2 = mix_generator(X_train, Y_train, img_dir, batch_size, shuffle)
mixer =get_mixer()
while True:
batch1 = next(gen1)
batch2 = next(gen2)
batch_x = []
batch_y = []
for i in range(batch1[0].shape[0]):
x1, y1 = batch1[0][i], batch1[1][i]
x2, y2 = batch2[0][i], batch2[1][i]
new_x, new_y = mixer(x1, x2, y1, y2)
batch_x.append(new_x)
batch_y.append(new_y)
batch_x = np.array(batch_x)
batch_y = np.array(batch_y)
batch = (batch_x, batch_y)
yield batch
def noize_generator(X_train, Y_train, img_dir, batch_size, shuffle=True):
alpha = 0.2
gen = mix_generator2(X_train, Y_train, img_dir, batch_size, shuffle=True)
while True:
batch1 = next(gen)
current_batch_size = batch1[0].shape[0]
x1, y1_base= batch1[0], batch1[1]
y1 = np.zeros([current_batch_size, 56], np.float32)
y1[:, :55] = y1_base
x2 = np.random(x1.shape)
y2 = np.zeros_like(y1)
y2[:,55] = 1
l = np.random.beta(alpha, alpha, current_batch_size)
X_l = l.reshape(current_batch_size, 1, 1, 1)
Y_l = l.reshape(current_batch_size, 1)
batch_x = batch1[0] * X_l + batch2[0] * (1 - X_l)
batch_y = batch1[1] * Y_l + batch2[1] * (1 - Y_l)
yield (batch_x, batch_y)
def test_generator(x_train, img_dir, batch_size, shuffle=True):
# x_train = x_train.as_matrix()
# y_train = y_train.as_matrix()
batch_index = 0
n = x_train.shape[0]
# print("n", n)
eraser = get_random_eraser(v_h=0.)
while 1:
if batch_index == 0:
index_array = np.arange(n)
if shuffle:
index_array = np.random.permutation(n)
current_index = (batch_index * batch_size) % n
if n >= current_index + batch_size:
current_batch_size = batch_size
batch_index += 1
else:
current_batch_size = n - current_index
batch_index = 0
batch_x = []
batch_id = index_array[current_index: current_index + current_batch_size]
# print(batch_x_base)
for id in batch_id:
# print(x_train[0])
# print(x_train[id])
# print(img_dir + '/{}'.format(x_train[id]))
img = cv2.imread(img_dir + '/{}'.format(x_train[id]))
# print("img shape", img.shape)
img = cv2.resize(img, (input_size, input_size))
# img =eraser(img)
batch_x.append(img)
batch_x = np.array(batch_x, np.float32) / 255
# batch_y = y_train[index_array[current_index: current_index + current_batch_size]]
# print("batch shape", batch_x.shape, batch_y.shape)
yield batch_x
def load_data(train_path="input/train_master.tsv", test_path="input/sample_submit.tsv"):
train = pd.read_csv(train_path, delimiter="\t", index_col=False)
test = pd.read_csv(test_path, delimiter="\t", index_col=False, header=None)
print("train shape", train.shape)
print(train.head())
X_train = train['file_name'].as_matrix()
y_train = train['category_id'].as_matrix()
# y_train = np.eye(55)[y_train]
# print(y_train[:5])
# print(y_train.shape)
X_test = test.iloc[:,0]
return X_train, y_train, X_test
from sklearn.model_selection import StratifiedKFold, StratifiedShuffleSplit
from sklearn.metrics import log_loss
def reset_weights(model):
session = K.get_session()
for layer in model.layers:
if hasattr(layer, 'kernel_initializer'):
layer.kernel.initializer.run(session=session)
def train(epochs, seed):
# parameter
num_fold = 5
epochs = 300
batch_size = 64
num_class = 55
save_path = base_name
# Load data
X_train, y_train, X_test = load_data()
# get model
model = get_model(num_class)
# CV
skf = StratifiedKFold(n_splits=num_fold, random_state=42)
skf.get_n_splits(X_train, y_train)
y_preds_valid = np.zeros([X_train.shape[0], num_class], np.float32)
y_preds_test = np.zeros([num_fold, X_test.shape[0], num_class], np.float32)
k = 0
for ids_train_split, ids_valid_split in skf.split(X_train, y_train):
# data process
X_train_cv = X_train[ids_train_split]
y_train_cv = y_train[ids_train_split]
X_holdout = X_train[ids_valid_split]
Y_holdout = y_train[ids_valid_split]
# define file path and get callbacks
weight_path = "model/" + save_path + "_fold" + str(k) + '.hdf5'
callbacks = get_callbacks(weight_path, patience=16)
gen = train_generator(X_train_cv, y_train_cv, "/home/akiyama/PycharmProjects/Cookpad/input/train_1/",
batch_size)
gen_val = train_generator(X_holdout, Y_holdout, "/home/akiyama/PycharmProjects/Cookpad/input/train_1/",
batch_size, shuffle=False)
gen_val_pred = test_generator(X_holdout, "/home/akiyama/PycharmProjects/Cookpad/input/train_1/", batch_size,
shuffle=False)
gen_tst_pred = test_generator(X_test, "/home/akiyama/PycharmProjects/Cookpad/input/test/", batch_size,
shuffle=False)
reset_weights(model)
# model.load_weights(filepath="model/train_180201_2_Dense_4th_training_ts2017.hdf5")
model.fit_generator(generator=gen,
steps_per_epoch=np.ceil(X_train_cv.shape[0] / batch_size),
epochs=epochs,
verbose=1,
callbacks=callbacks,
validation_data=gen_val,
validation_steps=np.ceil(X_holdout.shape[0] / batch_size),
)
# Getting the Best Model
# model.save_weights(filepath=weight_path[:-4] + "_nostop.hdf5")
model.load_weights(filepath=weight_path)
# Getting validation prediction
print("predicting valid...")
y_preds_valid[ids_valid_split] = model.predict_generator(generator=gen_val_pred,
steps=np.ceil(X_holdout.shape[0]/batch_size))
# Getting Test prediction
print("predicting test...")
y_preds_test[k] = model.predict_generator(generator=gen_tst_pred,
steps=np.ceil(X_test.shape[0]/batch_size))
k += 1
# Getting Valid Score
y_onehot = np.eye(num_class)[y_train]
valid_loss = log_loss(y_onehot, y_preds_valid)
valid_acc = np.mean(y_train==np.argmax(y_preds_valid, axis=1))
print('Valid loss:', valid_loss)
print('Valid accuracy:', valid_acc)
# save
np.save("input/{}_valid_vloss{:.3f}_vcc{:.3f}.npy".format(base_name, valid_loss, valid_acc), y_preds_valid)
np.save("input/{}_valid_vloss{:.3f}_vcc{:.3f}.npy".format(base_name, valid_loss, valid_acc), y_preds_test)
def main():
train(epochs=300, seed=0)
if __name__ == "__main__": main()
| 34.454389 | 121 | 0.583825 |
ace5363c979d4775a6fbfb77d2533ad365f8b4c3 | 34,232 | py | Python | plotapp/controllers/figure_controller.py | maldata/matplotlib_qtquick_playground | f7da94093315d8f540124d5037406d004574dede | [
"MIT"
] | null | null | null | plotapp/controllers/figure_controller.py | maldata/matplotlib_qtquick_playground | f7da94093315d8f540124d5037406d004574dede | [
"MIT"
] | null | null | null | plotapp/controllers/figure_controller.py | maldata/matplotlib_qtquick_playground | f7da94093315d8f540124d5037406d004574dede | [
"MIT"
] | null | null | null | import os
import sys
import traceback
import matplotlib
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.backend_bases import cursors
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5 import TimerQT, MODIFIER_KEYS, SPECIAL_KEYS
import six
from PyQt5 import QtCore, QtGui, QtQuick, QtWidgets
DEBUG = False
class MatplotlibIconProvider(QtQuick.QQuickImageProvider):
""" This class provide the matplotlib icons for the navigation toolbar.
"""
def __init__(self, img_type=QtQuick.QQuickImageProvider.Pixmap):
self.basedir = os.path.join(matplotlib.rcParams['datapath'], 'images')
QtQuick.QQuickImageProvider.__init__(self, img_type)
def requestImage(self, id, size):
img = QtGui.QImage(os.path.join(self.basedir, id + '.png'))
size = img.size()
return img, size
def requestPixmap(self, id, size):
img, size = self.requestImage(id, size)
pixmap = QtGui.QPixmap.fromImage(img)
return pixmap, size
class FigureCanvasQtQuickAgg(QtQuick.QQuickPaintedItem, FigureCanvasAgg):
""" This class creates a QtQuick Item encapsulating a Matplotlib
Figure and all the functions to interact with the 'standard'
Matplotlib navigation toolbar.
"""
# map Qt button codes to MouseEvent's ones:
buttond = {
QtCore.Qt.LeftButton: 1,
QtCore.Qt.MidButton: 2,
QtCore.Qt.RightButton: 3,
# QtCore.Qt.XButton1: None,
# QtCore.Qt.XButton2: None,
}
cursord = {
cursors.MOVE: QtCore.Qt.SizeAllCursor,
cursors.HAND: QtCore.Qt.PointingHandCursor,
cursors.POINTER: QtCore.Qt.ArrowCursor,
cursors.SELECT_REGION: QtCore.Qt.CrossCursor,
}
messageChanged = QtCore.pyqtSignal(str)
leftChanged = QtCore.pyqtSignal()
rightChanged = QtCore.pyqtSignal()
topChanged = QtCore.pyqtSignal()
bottomChanged = QtCore.pyqtSignal()
wspaceChanged = QtCore.pyqtSignal()
hspaceChanged = QtCore.pyqtSignal()
def __init__(self, figure, parent=None, coordinates=True):
if DEBUG:
print('FigureCanvasQtQuickAgg qtquick5: ', figure)
# _create_qApp()
if figure is None:
figure = Figure((6.0, 4.0))
QtQuick.QQuickPaintedItem.__init__(self, parent=parent)
FigureCanvasAgg.__init__(self, figure=figure)
self._drawRect = None
self.blitbox = None
# Activate hover events and mouse press events
self.setAcceptHoverEvents(True)
self.setAcceptedMouseButtons(QtCore.Qt.AllButtons)
self._agg_draw_pending = False
def getFigure(self):
return self.figure
def drawRectangle(self, rect):
self._drawRect = rect
self.update()
def paint(self, p):
"""
Copy the image from the Agg canvas to the qt.drawable.
In Qt, all drawing should be done inside of here when a widget is
shown onscreen.
"""
# if the canvas does not have a renderer, then give up and wait for
# FigureCanvasAgg.draw(self) to be called
if not hasattr(self, 'renderer'):
return
if DEBUG:
print('FigureCanvasQtQuickAgg.paint: ', self,
self.get_width_height())
if self.blitbox is None:
# matplotlib is in rgba byte order. QImage wants to put the bytes
# into argb format and is in a 4 byte unsigned int. Little endian
# system is LSB first and expects the bytes in reverse order
# (bgra).
if QtCore.QSysInfo.ByteOrder == QtCore.QSysInfo.LittleEndian:
stringBuffer = self.buffer_rgba()
else:
stringBuffer = self.tostring_argb()
refcnt = sys.getrefcount(stringBuffer)
# convert the Agg rendered image -> qImage
qImage = QtGui.QImage(stringBuffer, self.renderer.width,
self.renderer.height,
QtGui.QImage.Format_ARGB32)
# get the rectangle for the image
rect = qImage.rect()
# p = QtGui.QPainter(self)
# reset the image area of the canvas to be the back-ground color
p.eraseRect(rect)
# draw the rendered image on to the canvas
p.drawPixmap(QtCore.QPoint(0, 0), QtGui.QPixmap.fromImage(qImage))
# draw the zoom rectangle to the QPainter
if self._drawRect is not None:
p.setPen(QtGui.QPen(QtCore.Qt.black, 1, QtCore.Qt.DotLine))
x, y, w, h = self._drawRect
p.drawRect(x, y, w, h)
else:
bbox = self.blitbox
l, b, r, t = bbox.extents
w = int(r) - int(l)
h = int(t) - int(b)
t = int(b) + h
reg = self.copy_from_bbox(bbox)
stringBuffer = reg.to_string_argb()
qImage = QtGui.QImage(stringBuffer, w, h,
QtGui.QImage.Format_ARGB32)
pixmap = QtGui.QPixmap.fromImage(qImage)
p.drawPixmap(QtCore.QPoint(l, self.renderer.height - t), pixmap)
# draw the zoom rectangle to the QPainter
if self._drawRect is not None:
p.setPen(QtGui.QPen(QtCore.Qt.black, 1, QtCore.Qt.DotLine))
x, y, w, h = self._drawRect
p.drawRect(x, y, w, h)
self.blitbox = None
def draw(self):
"""
Draw the figure with Agg, and queue a request for a Qt draw.
"""
# The Agg draw is done here; delaying causes problems with code that
# uses the result of the draw() to update plot elements.
FigureCanvasAgg.draw(self)
self.update()
def draw_idle(self):
"""
Queue redraw of the Agg buffer and request Qt paintEvent.
"""
# The Agg draw needs to be handled by the same thread matplotlib
# modifies the scene graph from. Post Agg draw request to the
# current event loop in order to ensure thread affinity and to
# accumulate multiple draw requests from event handling.
# TODO: queued signal connection might be safer than singleShot
if not self._agg_draw_pending:
self._agg_draw_pending = True
QtCore.QTimer.singleShot(0, self.__draw_idle_agg)
def __draw_idle_agg(self, *args):
if self.height() < 0 or self.width() < 0:
self._agg_draw_pending = False
return
try:
FigureCanvasAgg.draw(self)
self.update()
except Exception:
# Uncaught exceptions are fatal for PyQt5, so catch them instead.
traceback.print_exc()
finally:
self._agg_draw_pending = False
def blit(self, bbox=None):
"""
Blit the region in bbox
"""
# If bbox is None, blit the entire canvas. Otherwise
# blit only the area defined by the bbox.
if bbox is None and self.figure:
bbox = self.figure.bbox
self.blitbox = bbox
l, b, w, h = bbox.bounds
t = b + h
self.repaint(l, self.renderer.height - t, w, h)
def geometryChanged(self, new_geometry, old_geometry):
w = new_geometry.width()
h = new_geometry.height()
if (w <= 0.0) and (h <= 0.0):
return
if DEBUG:
print('resize (%d x %d)' % (w, h))
print("FigureCanvasQtQuickAgg.geometryChanged(%d, %d)" % (w, h))
dpival = self.figure.dpi
winch = w / dpival
hinch = h / dpival
self.figure.set_size_inches(winch, hinch)
FigureCanvasAgg.resize_event(self)
self.draw_idle()
QtQuick.QQuickPaintedItem.geometryChanged(self, new_geometry, old_geometry)
def hoverEnterEvent(self, event):
FigureCanvasAgg.enter_notify_event(self, guiEvent=event)
def hoverLeaveEvent(self, event):
QtWidgets.QApplication.restoreOverrideCursor()
FigureCanvasAgg.leave_notify_event(self, guiEvent=event)
def hoverMoveEvent(self, event):
x = event.pos().x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.pos().y()
FigureCanvasAgg.motion_notify_event(self, x, y, guiEvent=event)
# if DEBUG:
# print('hover move')
# hoverMoveEvent kicks in when no mouse buttons are pressed
# otherwise mouseMoveEvent are emitted
def mouseMoveEvent(self, event):
x = event.x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.y()
FigureCanvasAgg.motion_notify_event(self, x, y, guiEvent=event)
# if DEBUG:
# print('mouse move')
def mousePressEvent(self, event):
x = event.pos().x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.pos().y()
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasAgg.button_press_event(self, x, y, button,
guiEvent=event)
if DEBUG:
print('button pressed:', event.button())
def mouseReleaseEvent(self, event):
x = event.x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.y()
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasAgg.button_release_event(self, x, y, button,
guiEvent=event)
if DEBUG:
print('button released')
def mouseDoubleClickEvent(self, event):
x = event.pos().x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.pos().y()
button = self.buttond.get(event.button())
if button is not None:
FigureCanvasAgg.button_press_event(self, x, y,
button, dblclick=True,
guiEvent=event)
if DEBUG:
print('button doubleclicked:', event.button())
def wheelEvent(self, event):
x = event.x()
# flipy so y=0 is bottom of canvas
y = self.figure.bbox.height - event.y()
# from QWheelEvent::delta doc
if event.pixelDelta().x() == 0 and event.pixelDelta().y() == 0:
steps = event.angleDelta().y() / 120
else:
steps = event.pixelDelta().y()
if steps != 0:
FigureCanvasAgg.scroll_event(self, x, y, steps, guiEvent=event)
if DEBUG:
print('scroll event: '
'steps = %i ' % (steps))
def keyPressEvent(self, event):
key = self._get_key(event)
if key is None:
return
FigureCanvasAgg.key_press_event(self, key, guiEvent=event)
if DEBUG:
print('key press', key)
def keyReleaseEvent(self, event):
key = self._get_key(event)
if key is None:
return
FigureCanvasAgg.key_release_event(self, key, guiEvent=event)
if DEBUG:
print('key release', key)
def _get_key(self, event):
if event.isAutoRepeat():
return None
event_key = event.key()
event_mods = int(event.modifiers()) # actually a bitmask
# get names of the pressed modifier keys
# bit twiddling to pick out modifier keys from event_mods bitmask,
# if event_key is a MODIFIER, it should not be duplicated in mods
mods = [name for name, mod_key, qt_key in MODIFIER_KEYS
if event_key != qt_key and (event_mods & mod_key) == mod_key]
try:
# for certain keys (enter, left, backspace, etc) use a word for the
# key, rather than unicode
key = SPECIAL_KEYS[event_key]
except KeyError:
# unicode defines code points up to 0x0010ffff
# QT will use Key_Codes larger than that for keyboard keys that are
# are not unicode characters (like multimedia keys)
# skip these
# if you really want them, you should add them to SPECIAL_KEYS
MAX_UNICODE = 0x10ffff
if event_key > MAX_UNICODE:
return None
key = six.unichr(event_key)
# qt delivers capitalized letters. fix capitalization
# note that capslock is ignored
if 'shift' in mods:
mods.remove('shift')
else:
key = key.lower()
mods.reverse()
return '+'.join(mods + [key])
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of
:class:`backend_bases.Timer`. This is useful for getting
periodic events through the backend's native event
loop. Implemented only for backends with GUIs.
optional arguments:
*interval*
Timer interval in milliseconds
*callbacks*
Sequence of (func, args, kwargs) where func(*args, **kwargs)
will be executed by the timer every *interval*.
"""
return TimerQT(*args, **kwargs)
def flush_events(self):
global qApp
qApp.processEvents()
def start_event_loop(self, timeout):
FigureCanvasAgg.start_event_loop(self, timeout)
start_event_loop.__doc__ = \
FigureCanvasAgg.start_event_loop.__doc__
def stop_event_loop(self):
FigureCanvasAgg.stop_event_loop(self)
stop_event_loop.__doc__ = FigureCanvasAgg.stop_event_loop.__doc__
class FigureQtQuickAggToolbar(FigureCanvasQtQuickAgg):
""" This class creates a QtQuick Item encapsulating a Matplotlib
Figure and all the functions to interact with the 'standard'
Matplotlib navigation toolbar.
"""
cursord = {
cursors.MOVE: QtCore.Qt.SizeAllCursor,
cursors.HAND: QtCore.Qt.PointingHandCursor,
cursors.POINTER: QtCore.Qt.ArrowCursor,
cursors.SELECT_REGION: QtCore.Qt.CrossCursor,
}
messageChanged = QtCore.pyqtSignal(str)
leftChanged = QtCore.pyqtSignal()
rightChanged = QtCore.pyqtSignal()
topChanged = QtCore.pyqtSignal()
bottomChanged = QtCore.pyqtSignal()
wspaceChanged = QtCore.pyqtSignal()
hspaceChanged = QtCore.pyqtSignal()
def __init__(self, figure, parent=None, coordinates=True):
if DEBUG:
print('FigureQtQuickAggToolbar qtquick5: ', figure)
FigureCanvasQtQuickAgg.__init__(self, figure=figure, parent=parent)
self._message = ""
#
# Attributes from NavigationToolbar2QT
#
self.coordinates = coordinates
self._actions = {}
# reference holder for subplots_adjust window
self.adj_window = None
#
# Attributes from NavigationToolbar2
#
self.canvas = self.figure.canvas
self.toolbar = self
# a dict from axes index to a list of view limits
self._views = matplotlib.cbook.Stack()
self._positions = matplotlib.cbook.Stack() # stack of subplot positions
self._xypress = None # the location and axis info at the time
# of the press
self._idPress = None
self._idRelease = None
self._active = None
self._lastCursor = None
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
self._ids_zoom = []
self._zoom_mode = None
self._button_pressed = None # determined by the button pressed
# at start
self.mode = '' # a mode string for the status bar
self.set_history_buttons()
#
# Store margin
#
self._defaults = {}
for attr in ('left', 'bottom', 'right', 'top', 'wspace', 'hspace',):
val = getattr(self.figure.subplotpars, attr)
self._defaults[attr] = val
setattr(self, attr, val)
@QtCore.pyqtProperty('QString', notify=messageChanged)
def message(self):
return self._message
@message.setter
def message(self, msg):
if msg != self._message:
self._message = msg
self.messageChanged.emit(msg)
@QtCore.pyqtProperty('QString', constant=True)
def defaultDirectory(self):
startpath = matplotlib.rcParams.get('savefig.directory', '')
return os.path.expanduser(startpath)
@QtCore.pyqtProperty('QStringList', constant=True)
def fileFilters(self):
filetypes = self.canvas.get_supported_filetypes_grouped()
sorted_filetypes = list(six.iteritems(filetypes))
sorted_filetypes.sort()
filters = []
for name, exts in sorted_filetypes:
exts_list = " ".join(['*.%s' % ext for ext in exts])
filter = '%s (%s)' % (name, exts_list)
filters.append(filter)
return filters
@QtCore.pyqtProperty('QString', constant=True)
def defaultFileFilter(self):
default_filetype = self.canvas.get_default_filetype()
selectedFilter = None
for filter in self.fileFilters:
exts = filter.split('(', maxsplit=1)[1]
exts = exts[:-1].split()
if default_filetype in exts:
selectedFilter = filter
break
if selectedFilter is None:
selectedFilter = self.fileFilters[0]
return selectedFilter
@QtCore.pyqtProperty(float, notify=leftChanged)
def left(self):
return self.figure.subplotpars.left
@left.setter
def left(self, value):
if value != self.figure.subplotpars.left:
self.figure.subplots_adjust(left=value)
self.leftChanged.emit()
self.figure.canvas.draw_idle()
@QtCore.pyqtProperty(float, notify=rightChanged)
def right(self):
return self.figure.subplotpars.right
@right.setter
def right(self, value):
if value != self.figure.subplotpars.right:
self.figure.subplots_adjust(right=value)
self.rightChanged.emit()
self.figure.canvas.draw_idle()
@QtCore.pyqtProperty(float, notify=topChanged)
def top(self):
return self.figure.subplotpars.top
@top.setter
def top(self, value):
if value != self.figure.subplotpars.top:
self.figure.subplots_adjust(top=value)
self.topChanged.emit()
self.figure.canvas.draw_idle()
@QtCore.pyqtProperty(float, notify=bottomChanged)
def bottom(self):
return self.figure.subplotpars.bottom
@bottom.setter
def bottom(self, value):
if value != self.figure.subplotpars.bottom:
self.figure.subplots_adjust(bottom=value)
self.bottomChanged.emit()
self.figure.canvas.draw_idle()
@QtCore.pyqtProperty(float, notify=hspaceChanged)
def hspace(self):
return self.figure.subplotpars.hspace
@hspace.setter
def hspace(self, value):
if value != self.figure.subplotpars.hspace:
self.figure.subplots_adjust(hspace=value)
self.hspaceChanged.emit()
self.figure.canvas.draw_idle()
@QtCore.pyqtProperty(float, notify=wspaceChanged)
def wspace(self):
return self.figure.subplotpars.wspace
@wspace.setter
def wspace(self, value):
if value != self.figure.subplotpars.wspace:
self.figure.subplots_adjust(wspace=value)
self.wspaceChanged.emit()
self.figure.canvas.draw_idle()
def mouse_move(self, event):
self._set_cursor(event)
if event.inaxes and event.inaxes.get_navigate():
try:
s = event.inaxes.format_coord(event.xdata, event.ydata)
except (ValueError, OverflowError):
pass
else:
artists = [a for a in event.inaxes.mouseover_set
if a.contains(event)]
if artists:
a = max(enumerate(artists), key=lambda x: x[1].zorder)[1]
if a is not event.inaxes.patch:
data = a.get_cursor_data(event)
if data is not None:
s += ' [{:s}]'.format(a.format_cursor_data(data))
if len(self.mode):
self.message = '{:s}, {:s}'.format(self.mode, s)
else:
self.message = s
else:
self.message = self.mode
def dynamic_update(self):
self.canvas.draw_idle()
def push_current(self):
"""push the current view limits and position onto the stack"""
views = []
pos = []
for a in self.canvas.figure.get_axes():
views.append(a._get_view())
# Store both the original and modified positions
pos.append((
a.get_position(True).frozen(),
a.get_position().frozen()))
self._views.push(views)
self._positions.push(pos)
self.set_history_buttons()
def set_history_buttons(self):
"""Enable or disable back/forward button"""
pass
def _update_view(self):
"""Update the viewlim and position from the view and
position stack for each axes
"""
views = self._views()
if views is None:
return
pos = self._positions()
if pos is None:
return
for i, a in enumerate(self.canvas.figure.get_axes()):
a._set_view(views[i])
# Restore both the original and modified positions
a.set_position(pos[i][0], 'original')
a.set_position(pos[i][1], 'active')
self.canvas.draw_idle()
@QtCore.pyqtSlot()
def home(self, *args):
"""Restore the original view"""
self._views.home()
self._positions.home()
self.set_history_buttons()
self._update_view()
@QtCore.pyqtSlot()
def forward(self, *args):
"""Move forward in the view lim stack"""
self._views.forward()
self._positions.forward()
self.set_history_buttons()
self._update_view()
@QtCore.pyqtSlot()
def back(self, *args):
"""move back up the view lim stack"""
self._views.back()
self._positions.back()
self.set_history_buttons()
self._update_view()
def _set_cursor(self, event):
if not event.inaxes or not self._active:
if self._lastCursor != cursors.POINTER:
self.set_cursor(cursors.POINTER)
self._lastCursor = cursors.POINTER
else:
if self._active == 'ZOOM':
if self._lastCursor != cursors.SELECT_REGION:
self.set_cursor(cursors.SELECT_REGION)
self._lastCursor = cursors.SELECT_REGION
elif (self._active == 'PAN' and
self._lastCursor != cursors.MOVE):
self.set_cursor(cursors.MOVE)
self._lastCursor = cursors.MOVE
def set_cursor(self, cursor):
"""
Set the current cursor to one of the :class:`Cursors`
enums values
"""
if DEBUG:
print('Set cursor', cursor)
self.canvas.setCursor(self.cursord[cursor])
def draw_with_locators_update(self):
"""Redraw the canvases, update the locators"""
for a in self.canvas.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.canvas.draw_idle()
def press(self, event):
"""Called whenever a mouse button is pressed."""
pass
def press_pan(self, event):
"""the press mouse button in pan/zoom mode callback"""
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty():
self.push_current()
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_pan()):
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect('motion_notify_event',
self.drag_pan)
self.press(event)
def release(self, event):
"""this will be called whenever mouse button is released"""
pass
def release_pan(self, event):
"""the release mouse button callback in pan/zoom mode"""
if self._button_pressed is None:
return
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
for a, ind in self._xypress:
a.end_pan()
if not self._xypress:
return
self._xypress = []
self._button_pressed = None
self.push_current()
self.release(event)
self.draw_with_locators_update()
def drag_pan(self, event):
"""the drag callback in pan/zoom mode"""
for a, ind in self._xypress:
# safer to use the recorded button at the press than current button:
# multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.dynamic_update()
@QtCore.pyqtSlot()
def pan(self, *args):
"""Activate the pan/zoom tool. pan with left button, zoom with right"""
# set the pointer icon and button press funcs to the
# appropriate callbacks
if self._active == 'PAN':
self._active = None
else:
self._active = 'PAN'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect(
'button_press_event', self.press_pan)
self._idRelease = self.canvas.mpl_connect(
'button_release_event', self.release_pan)
self.mode = 'pan/zoom'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.message = self.mode
def draw_rubberband(self, event, x0, y0, x1, y1):
"""Draw a rectangle rubberband to indicate zoom limits"""
height = self.canvas.figure.bbox.height
y1 = height - y1
y0 = height - y0
w = abs(x1 - x0)
h = abs(y1 - y0)
rect = [int(val) for val in (min(x0, x1), min(y0, y1), w, h)]
self.canvas.drawRectangle(rect)
def remove_rubberband(self):
"""Remove the rubberband"""
self.canvas.drawRectangle(None)
def _switch_on_zoom_mode(self, event):
self._zoom_mode = event.key
self.mouse_move(event)
def _switch_off_zoom_mode(self, event):
self._zoom_mode = None
self.mouse_move(event)
def drag_zoom(self, event):
"""the drag callback in zoom mode"""
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, view = self._xypress[0]
# adjust x, last, y, last
x1, y1, x2, y2 = a.bbox.extents
x, lastx = max(min(x, lastx), x1), min(max(x, lastx), x2)
y, lasty = max(min(y, lasty), y1), min(max(y, lasty), y2)
if self._zoom_mode == "x":
x1, y1, x2, y2 = a.bbox.extents
y, lasty = y1, y2
elif self._zoom_mode == "y":
x1, y1, x2, y2 = a.bbox.extents
x, lastx = x1, x2
self.draw_rubberband(event, x, y, lastx, lasty)
def press_zoom(self, event):
"""the press mouse button in zoom to rect mode callback"""
# If we're already in the middle of a zoom, pressing another
# button works to "cancel"
if self._ids_zoom != []:
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self.release(event)
self.draw_with_locators_update()
self._xypress = None
self._button_pressed = None
self._ids_zoom = []
return
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty():
self.push_current()
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_zoom()):
self._xypress.append((x, y, a, i, a._get_view()))
id1 = self.canvas.mpl_connect('motion_notify_event', self.drag_zoom)
id2 = self.canvas.mpl_connect('key_press_event',
self._switch_on_zoom_mode)
id3 = self.canvas.mpl_connect('key_release_event',
self._switch_off_zoom_mode)
self._ids_zoom = id1, id2, id3
self._zoom_mode = event.key
self.press(event)
def release_zoom(self, event):
"""the release mouse button callback in zoom to rect mode"""
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self._ids_zoom = []
self.remove_rubberband()
if not self._xypress:
return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, view = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
# allows the user to "cancel" a zoom action
# by zooming by less than 5 pixels
if ((abs(x - lastx) < 5 and self._zoom_mode != "y") or
(abs(y - lasty) < 5 and self._zoom_mode != "x")):
self._xypress = None
self.release(event)
self.draw_with_locators_update()
return
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a, la):
twinx = True
if a.get_shared_y_axes().joined(a, la):
twiny = True
last_a.append(a)
if self._button_pressed == 1:
direction = 'in'
elif self._button_pressed == 3:
direction = 'out'
else:
continue
a._set_view_from_bbox((lastx, lasty, x, y), direction,
self._zoom_mode, twinx, twiny)
self.draw_with_locators_update()
self._xypress = None
self._button_pressed = None
self._zoom_mode = None
self.push_current()
self.release(event)
@QtCore.pyqtSlot()
def zoom(self, *args):
"""Activate zoom to rect mode"""
if self._active == 'ZOOM':
self._active = None
else:
self._active = 'ZOOM'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect('button_press_event',
self.press_zoom)
self._idRelease = self.canvas.mpl_connect('button_release_event',
self.release_zoom)
self.mode = 'zoom rect'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.message = self.mode
@QtCore.pyqtSlot()
def tight_layout(self):
self.figure.tight_layout()
# self._setSliderPositions()
self.draw_idle()
@QtCore.pyqtSlot()
def reset_margin(self):
self.figure.subplots_adjust(**self._defaults)
# self._setSliderPositions()
self.draw_idle()
@QtCore.pyqtSlot(str)
def print_figure(self, fname, *args, **kwargs):
if fname:
fname = QtCore.QUrl(fname).toLocalFile()
# save dir for next time
savefig_dir = os.path.dirname(six.text_type(fname))
matplotlib.rcParams['savefig.directory'] = savefig_dir
fname = six.text_type(fname)
FigureCanvasAgg.print_figure(self, fname, *args, **kwargs)
self.draw()
FigureCanvasQTAgg = FigureCanvasQtQuickAgg
FigureCanvasQTAggToolbar = FigureQtQuickAggToolbar
| 33.692913 | 83 | 0.580539 |
ace5377e7eb26160e1814b7fa18d4832324d33d8 | 818 | py | Python | module2-sql-for-analysis/elephant_queries.py | DAVIDCRUZ0202/DS-Unit-3-Sprint-2-SQL-and-Databases | 1e19166138552cece578a3b857de27c945a4d1bd | [
"MIT"
] | null | null | null | module2-sql-for-analysis/elephant_queries.py | DAVIDCRUZ0202/DS-Unit-3-Sprint-2-SQL-and-Databases | 1e19166138552cece578a3b857de27c945a4d1bd | [
"MIT"
] | null | null | null | module2-sql-for-analysis/elephant_queries.py | DAVIDCRUZ0202/DS-Unit-3-Sprint-2-SQL-and-Databases | 1e19166138552cece578a3b857de27c945a4d1bd | [
"MIT"
] | null | null | null | import psycopg2
from dotenv import load_dotenv
import os
load_dotenv() #> loads contents of the .env file into the script's environment
DB_NAME = os.getenv("DB_NAME", default="OOPS")
DB_USER = os.getenv("DB_USER", default="OOPS")
DB_PASSWORD = os.getenv("DB_PASSWORD", default="OOPS")
DB_HOST = os.getenv("DB_HOST", default="OOPS")
connection = psycopg2.connect(dbname=DB_NAME, user=DB_USER,
password=DB_PASSWORD, host=DB_HOST)
print("CONNECTION", type(connection))
cursor = connection.cursor()
print("CURSOR", type(cursor))
cursor.execute('SELECT * from test_table;')
result = cursor.fetchall()
print(result)
insertion_sql= "INSERT INTO test_table (name, data) VALUES(%s, %s)"
cursor.execute(insertion_sql, ("A Row", "null"))
connection.commit()
cursor.close()
connection.close() | 23.371429 | 78 | 0.721271 |
ace537fb29bd1ea21197f2b83be5e2796e960095 | 3,144 | py | Python | apps/core/views/gmm.py | CosmosTUe/Cosmos | cec2541d3f8ea1944edfaab2090916ba66f9d2f3 | [
"MIT"
] | 1 | 2021-02-01T19:27:07.000Z | 2021-02-01T19:27:07.000Z | apps/core/views/gmm.py | CosmosTUe/Cosmos | cec2541d3f8ea1944edfaab2090916ba66f9d2f3 | [
"MIT"
] | 79 | 2020-08-05T09:01:00.000Z | 2022-03-24T11:27:21.000Z | apps/core/views/gmm.py | CosmosTUe/Cosmos | cec2541d3f8ea1944edfaab2090916ba66f9d2f3 | [
"MIT"
] | 3 | 2021-02-22T18:36:52.000Z | 2021-10-13T17:05:44.000Z | from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.db import transaction
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import CreateView, DeleteView, UpdateView
from apps.core.forms.gmm import GMMForm, GMMFormSet, GMMFormSetHelper
from apps.core.models.gmm import GMM
class GMMCreate(LoginRequiredMixin, PermissionRequiredMixin, CreateView):
model = GMM
template_name = "gmm/gmm_create.html"
form_class = GMMForm
success_url = None
# Permissions
permission_required = "cosmos.add_gmm"
raise_exception = True
def get_context_data(self, **kwargs):
data = super(GMMCreate, self).get_context_data(**kwargs)
if self.request.POST:
data["files"] = GMMFormSet(self.request.POST, self.request.FILES, instance=self.object)
data["helper"] = GMMFormSetHelper()
else:
data["files"] = GMMFormSet(instance=self.object)
data["helper"] = GMMFormSetHelper()
return data
def form_valid(self, form):
context = self.get_context_data()
files = context["files"]
with transaction.atomic():
self.object = form.save()
if files.is_valid():
files.instance = self.object
files.save()
else:
return False
return super(GMMCreate, self).form_valid(form)
def get_success_url(self):
return reverse_lazy("cosmos_core:gmm-list")
class GMMUpdate(LoginRequiredMixin, PermissionRequiredMixin, UpdateView):
model = GMM
template_name = "gmm/gmm_update.html"
form_class = GMMForm
success_url = None
# Permissions
permission_required = "cosmos.change_gmm"
raise_exception = True
def get_context_data(self, **kwargs):
data = super(GMMUpdate, self).get_context_data(**kwargs)
if self.request.POST:
data["files"] = GMMFormSet(self.request.POST, self.request.FILES, instance=self.object)
data["helper"] = GMMFormSetHelper()
else:
data["files"] = GMMFormSet(instance=self.object)
data["helper"] = GMMFormSetHelper()
return data
def form_valid(self, form):
context = self.get_context_data()
files = context["files"]
with transaction.atomic():
self.object = form.save()
if files.is_valid():
files.instance = self.object
files.save()
return super(GMMUpdate, self).form_valid(form)
def get_success_url(self):
return reverse_lazy("cosmos_core:gmm-list")
class GMMDelete(LoginRequiredMixin, PermissionRequiredMixin, DeleteView):
model = GMM
template_name = "gmm/gmm_confirm_delete.html"
success_url = reverse_lazy("cosmos_core:gmm-list")
# Permissions
permission_required = "cosmos.delete_gmm"
raise_exception = True
def gmm_list(request):
gmm_list = GMM.objects.order_by("-date").all()
context = {
"gmm_list": gmm_list,
}
return render(request, "gmm/gmm_list.html", context)
| 32.412371 | 99 | 0.659351 |
ace538edbab695d2b02403029592c1a49add65c4 | 1,366 | py | Python | ros/src/twist_controller/yaw_controller.py | JJMats/programming_a_real_self_driving_car | 73fcc24826479774311c62c3eecf224966dc05a3 | [
"MIT"
] | null | null | null | ros/src/twist_controller/yaw_controller.py | JJMats/programming_a_real_self_driving_car | 73fcc24826479774311c62c3eecf224966dc05a3 | [
"MIT"
] | 11 | 2019-04-23T21:28:22.000Z | 2022-02-10T00:07:40.000Z | ros/src/twist_controller/yaw_controller.py | JJMats/programming_a_real_self_driving_car | 73fcc24826479774311c62c3eecf224966dc05a3 | [
"MIT"
] | 3 | 2019-04-27T03:56:13.000Z | 2020-05-06T14:17:35.000Z | import rospy
from math import atan
class YawController(object):
def __init__(self, wheel_base, steer_ratio, min_speed, max_lat_accel, max_steer_angle):
self.wheel_base = wheel_base
self.steer_ratio = steer_ratio
self.min_speed = min_speed
self.max_lat_accel = max_lat_accel
self.min_angle = -max_steer_angle
self.max_angle = max_steer_angle
def get_angle(self, radius):
angle = atan(self.wheel_base / radius) * self.steer_ratio
return max(self.min_angle, min(self.max_angle, angle))
def get_steering(self, linear_velocity, angular_velocity, current_velocity):
ang_vel_orig = angular_velocity
angular_velocity = current_velocity * angular_velocity / linear_velocity if abs(linear_velocity) > 0. else 0.
if abs(current_velocity) > 0.1:
max_yaw_rate = abs(self.max_lat_accel / current_velocity)
angular_velocity = max(-max_yaw_rate, min(max_yaw_rate, angular_velocity))
#rospy.logwarn("Angular_vel: {0}, Ang Vel 2: {1}, Max Yaw Rate: {2}, Current_vel: {3}, Linear_vel {4}, Min Speed: {5}".format(angular_velocity, ang_vel_orig, max_yaw_rate, current_velocity, linear_velocity, self.min_speed))
return self.get_angle(max(current_velocity, self.min_speed) / angular_velocity) if abs(angular_velocity) > 0. else 0.0
| 45.533333 | 231 | 0.70937 |
ace5391be75d1acb5ace0883684fc01f58423332 | 52,737 | py | Python | anchore_engine/services/policy_engine/engine/loaders.py | octarinesec/anchore-engine | c045794dde9d282a6393c60febb4852fefe20353 | [
"Apache-2.0"
] | null | null | null | anchore_engine/services/policy_engine/engine/loaders.py | octarinesec/anchore-engine | c045794dde9d282a6393c60febb4852fefe20353 | [
"Apache-2.0"
] | null | null | null | anchore_engine/services/policy_engine/engine/loaders.py | octarinesec/anchore-engine | c045794dde9d282a6393c60febb4852fefe20353 | [
"Apache-2.0"
] | null | null | null | import base64
import hashlib
import json
import re
from anchore_engine.utils import ensure_str, ensure_bytes
from anchore_engine.db import DistroNamespace
from anchore_engine.db import (
Image,
ImagePackage,
FilesystemAnalysis,
ImageNpm,
ImageGem,
AnalysisArtifact,
ImagePackageManifestEntry,
ImageCpe,
) # , ImageJava, ImagePython
from anchore_engine.subsys import logger
from anchore_engine.util.rpm import split_rpm_filename
from anchore_engine.common.helpers import safe_extract_json_value
# this is a static mapping of known package names (keys) to official cpe names for each package
nomatch_inclusions = {
"java": {
"springframework": ["spring_framework", "springsource_spring_framework"],
"spring-core": ["spring_framework", "springsource_spring_framework"],
},
"npm": {
"hapi": ["hapi_server_framework"],
"handlebars.js": ["handlebars"],
"is-my-json-valid": ["is_my_json_valid"],
"mustache": ["mustache.js"],
},
"gem": {
"Arabic-Prawn": ["arabic_prawn"],
"bio-basespace-sdk": ["basespace_ruby_sdk"],
"cremefraiche": ["creme_fraiche"],
"html-sanitizer": ["html_sanitizer"],
"sentry-raven": ["raven-ruby"],
"RedCloth": ["redcloth_library"],
"VladTheEnterprising": ["vladtheenterprising"],
"yajl-ruby": ["yajl-ruby_gem"],
},
"python": {
"python-rrdtool": ["rrdtool"],
},
}
class ImageLoader(object):
"""
Takes an image analysis json and converts it to a set of records for commit to the db.
Assumes there is a global session wrapper and will add items to the session but does not
commit the session itself.
"""
def __init__(self, analysis_json):
self.start_time = None
self.stop_time = None
self.image_export_json = analysis_json
def load(self):
"""
Loads the exported image data into this system for usage.
:param image_export_json:
:return: an initialized Image() record, not persisted to DB yet
"""
logger.info("Loading image json")
if type(self.image_export_json) == list and len(self.image_export_json) == 1:
image_id = self.image_export_json[0].get("image", {}).get("imageId")
assert image_id
self.image_export_json = (
self.image_export_json[0].get("image", {}).get("imagedata", {})
)
logger.info(
"Detected a direct export format for image id: {} rather than a catalog analysis export".format(
image_id
)
)
analysis_report = self.image_export_json.get("analysis_report")
image_report = self.image_export_json.get("image_report")
image = Image()
image.id = image_report.get("meta", {}).get("imageId")
image.size = int(image_report.get("meta", {}).get("sizebytes", -1))
repo_digests = image_report.get("docker_data", {}).get("RepoDigests", [])
repo_tags = image_report.get("docker_data", {}).get("RepoTags", [])
if len(repo_digests) > 1:
logger.warn(
"Found more than one digest for the image {}. Using the first. Digests: {}, Tags: {}".format(
image.id, repo_digests, repo_tags
)
)
image.digest = repo_digests[0].split("@", 1)[1] if repo_digests else None
# Tags handled in another phase using the docker_data in the image record.
# get initial metadata
analyzer_meta = (
analysis_report.get("analyzer_meta", {})
.get("analyzer_meta", {})
.get("base", {})
)
if "LIKEDISTRO" in analyzer_meta:
like_dist = analyzer_meta.get("LIKEDISTRO")
else:
like_dist = analyzer_meta.get("DISTRO")
image.distro_name = analyzer_meta.get("DISTRO")
image.distro_version = analyzer_meta.get("DISTROVERS")
image.like_distro = like_dist
image.dockerfile_mode = image_report.get("dockerfile_mode")
# JSON data
image.docker_data_json = image_report.get("docker_data")
image.docker_history_json = image_report.get("docker_history")
image.dockerfile_contents = image_report.get("dockerfile_contents")
image.layers_to_dockerfile_json = analysis_report.get("layer_info")
image.layers_json = image_report.get("layers")
image.familytree_json = image_report.get("familytree")
image.analyzer_manifest = self.image_export_json.get("analyzer_manifest")
# Image content
packages = []
handled_ptypes = []
# Packages
logger.info("Loading image packages")
os_packages, handled = self.load_and_normalize_packages(
analysis_report.get("package_list", {}), image
)
packages = packages + os_packages
handled_ptypes = handled_ptypes + handled
# FileSystem
logger.info("Loading image files")
image.fs, handled = self.load_fsdump(analysis_report)
handled_ptypes = handled_ptypes + handled
# Npms
logger.info("Loading image npms")
npm_image_packages, handled = self.load_npms(analysis_report, image)
packages = packages + npm_image_packages
handled_ptypes = handled_ptypes + handled
# Gems
logger.info("Loading image gems")
gem_image_packages, handled = self.load_gems(analysis_report, image)
packages = packages + gem_image_packages
handled_ptypes = handled_ptypes + handled
## Python
logger.info("Loading image python packages")
python_packages, handled = self.load_pythons(analysis_report, image)
packages = packages + python_packages
handled_ptypes = handled_ptypes + handled
## Java
logger.info("Loading image java packages")
java_packages, handled = self.load_javas(analysis_report, image)
packages = packages + java_packages
handled_ptypes = handled_ptypes + handled
logger.info("Loading image generic package types")
generic_packages, handled = self.load_generic_packages(
analysis_report, image, excludes=handled_ptypes
)
packages = packages + generic_packages
handled_ptypes = handled_ptypes + handled
image.packages = packages
# Package metadata
logger.info("Loading image package db entries")
self.load_package_verification(analysis_report, image)
# CPEs
logger.info("Loading image cpes")
image.cpes = self.load_cpes(analysis_report, image)
analysis_artifact_loaders = [
self.load_retrieved_files,
self.load_content_search,
self.load_secret_search,
self.load_malware_findings
# self.load_package_verification
]
# Content searches
image.analysis_artifacts = []
for loader in analysis_artifact_loaders:
for r in loader(analysis_report, image):
image.analysis_artifacts.append(r)
image.state = "analyzed"
return image
def load_package_verification(self, analysis_report, image_obj):
"""
Loads package verification analysis data.
Adds the package db metadata records to respective packages in the image_obj
:param analysis_report:
:param image_obj:
:return: True on success
"""
logger.info("Loading package verification data")
analyzer = "file_package_verify"
pkgfile_meta = "distro.pkgfilemeta"
verify_result = "distro.verifyresult"
digest_algos = ["sha1", "sha256", "md5"]
package_verify_json = analysis_report.get(analyzer)
if not package_verify_json:
return []
file_records = package_verify_json.get(pkgfile_meta, {}).get("base", {})
verify_records = package_verify_json.get(verify_result, {}).get("base", {})
# Re-organize the data from file-keyed to package keyed for efficient filtering
packages = {}
for path, file_meta in list(file_records.items()):
for r in safe_extract_json_value(file_meta):
pkg = r.pop("package")
if not pkg:
continue
if pkg not in packages:
packages[pkg] = {}
# Add the entry for the file in the package
packages[pkg][path] = r
for package in image_obj.packages:
pkg_entry = packages.get(package.name)
entries = []
if not pkg_entry:
continue
for f_name, entry in list(pkg_entry.items()):
meta = ImagePackageManifestEntry()
meta.pkg_name = package.name
meta.pkg_version = package.version
meta.pkg_type = package.pkg_type
meta.pkg_arch = package.arch
meta.image_id = package.image_id
meta.image_user_id = package.image_user_id
meta.file_path = f_name
meta.digest_algorithm = entry.get("digestalgo")
meta.digest = entry.get("digest")
meta.file_user_name = entry.get("user")
meta.file_group_name = entry.get("group")
meta.is_config_file = entry.get("conffile")
m = entry.get("mode")
s = entry.get("size")
meta.mode = (
int(m, 8) if m is not None else m
) # Convert from octal to decimal int
meta.size = int(s) if s is not None else None
entries.append(meta)
package.pkg_db_entries = entries
return True
# records = []
# for pkg_name, paths in packages.items():
#
# r = AnalysisArtifact()
# r.image_user_id = image_obj.user_id
# r.image_id = image_obj.id
# r.analyzer_type = 'base'
# r.analyzer_id = 'file_package_verify'
# r.analyzer_artifact = 'distro.pkgfilemeta'
# r.artifact_key = pkg_name
# r.json_value = paths
# records.append(r)
# return records
def load_retrieved_files(self, analysis_report, image_obj):
"""
Loads the analyzer retrieved files from the image, saves them in the db
:param retrieve_files_json:
:param image_obj:
:return:
"""
logger.info("Loading retrieved files")
retrieve_files_json = analysis_report.get("retrieve_files")
if not retrieve_files_json:
return []
matches = retrieve_files_json.get("file_content.all", {}).get("base", {})
records = []
for filename, match_string in list(matches.items()):
match = AnalysisArtifact()
match.image_user_id = image_obj.user_id
match.image_id = image_obj.id
match.analyzer_id = "retrieve_files"
match.analyzer_type = "base"
match.analyzer_artifact = "file_content.all"
match.artifact_key = filename
try:
match.binary_value = base64.b64decode(ensure_bytes(match_string))
except:
logger.exception(
"Could not b64 decode the file content for {}".format(filename)
)
raise
records.append(match)
return records
def load_content_search(self, analysis_report, image_obj):
"""
Load content search results from analysis if present
:param content_search_json:
:param image_obj:
:return:
"""
logger.info("Loading content search results")
content_search_json = analysis_report.get("content_search")
if not content_search_json:
return []
matches = content_search_json.get("regexp_matches.all", {}).get("base", {})
records = []
for filename, match_string in list(matches.items()):
match = AnalysisArtifact()
match.image_user_id = image_obj.user_id
match.image_id = image_obj.id
match.analyzer_id = "content_search"
match.analyzer_type = "base"
match.analyzer_artifact = "regexp_matches.all"
match.artifact_key = filename
try:
match.json_value = safe_extract_json_value(match_string)
except:
logger.exception(
"json decode failed for regex match record on {}. Saving as raw text".format(
filename
)
)
match.str_value = match_string
records.append(match)
return records
def load_secret_search(self, analysis_report, image_obj):
"""
Load content search results from analysis if present
:param content_search_json:
:param image_obj:
:return:
"""
logger.info("Loading secret search results")
content_search_json = analysis_report.get("secret_search")
if not content_search_json:
return []
matches = content_search_json.get("regexp_matches.all", {}).get("base", {})
records = []
for filename, match_string in list(matches.items()):
match = AnalysisArtifact()
match.image_user_id = image_obj.user_id
match.image_id = image_obj.id
match.analyzer_id = "secret_search"
match.analyzer_type = "base"
match.analyzer_artifact = "regexp_matches.all"
match.artifact_key = filename
try:
match.json_value = safe_extract_json_value(match_string)
except:
logger.exception(
"json decode failed for regex match record on {}. Saving as raw text".format(
filename
)
)
match.str_value = match_string
records.append(match)
return records
def load_malware_findings(self, analysis_report, image_obj):
"""
Load malware results from analysis if present.
Example malware analysis result:
{
...
"malware": {
"malware": {
"base": {
"clamav": "{\"scanner\": \"clamav\", \"findings\": [{\"path\": \"elf_payload1\", \"signature\": \"Unix.Trojan.MSShellcode-40\"}], \"metadata\": {\"db_version\": {\"daily\": \"\", \"main\": \"59\", \"bytecode\": \"331\"}}}"
}
}
},
...
}
The key is the scanner name, and the result is the findings for that scanner (e.g. clamav)
:param analysis_report:
:param image_obj:
:return:
"""
malware_analyzer_name = "malware"
base_default = "base"
logger.info("Loading malware scan findings")
malware_json = analysis_report.get(malware_analyzer_name)
if not malware_json:
return []
matches = malware_json.get(malware_analyzer_name, {}).get(base_default, {})
records = []
for scanner_name, scan_result in matches.items():
scan_artifact = AnalysisArtifact()
scan_artifact.image_user_id = image_obj.user_id
scan_artifact.image_id = image_obj.id
scan_artifact.analyzer_id = malware_analyzer_name
scan_artifact.analyzer_type = "base"
scan_artifact.analyzer_artifact = malware_analyzer_name
scan_artifact.artifact_key = scanner_name
try:
scan_artifact.json_value = safe_extract_json_value(scan_result)
except:
logger.exception(
"json decode failed for malware scan result on {}. Saving as raw text".format(
scan_result
)
)
scan_artifact.str_value = scan_result
records.append(scan_artifact)
return records
def load_and_normalize_packages(self, package_analysis_json, image_obj):
"""
Loads and normalizes package data from all distros
:param image_obj:
:param package_analysis_json:
:return: list of Package objects that can be added to an image
"""
pkgs = []
handled_pkgtypes = ["pkgs.allinfo", "pkgs.all"]
img_distro = DistroNamespace.for_obj(image_obj)
# pkgs.allinfo handling
pkgs_all = list(package_analysis_json.get("pkgs.allinfo", {}).values())
if not pkgs_all:
return [], handled_pkgtypes
else:
pkgs_all = pkgs_all[0]
for pkg_name, metadata_str in list(pkgs_all.items()):
metadata = safe_extract_json_value(metadata_str)
p = ImagePackage()
p.distro_name = image_obj.distro_name
p.distro_version = image_obj.distro_version
p.like_distro = image_obj.like_distro
p.name = pkg_name
p.version = metadata.get("version")
p.origin = metadata.get("origin")
try:
psize = int(metadata.get("size", 0))
except:
psize = 0
p.size = psize
# p.size = metadata.get('size')
p.arch = metadata.get("arch")
p.license = (
metadata.get("license")
if metadata.get("license")
else metadata.get("lics")
)
p.release = metadata.get("release", "N/A")
p.pkg_type = metadata.get("type")
p.src_pkg = metadata.get("sourcepkg")
p.image_user_id = image_obj.user_id
p.image_id = image_obj.id
# if 'files' in metadata:
# # Handle file data
# p.files = metadata.get('files')
if p.release != "N/A":
p.fullversion = p.version + "-" + p.release
else:
p.fullversion = p.version
if img_distro.flavor == "DEB":
cleanvers = re.sub(re.escape("+b") + "\d+.*", "", p.version)
spkg = re.sub(re.escape("-" + cleanvers), "", p.src_pkg)
else:
spkg = re.sub(re.escape("-" + p.version) + ".*", "", p.src_pkg)
p.normalized_src_pkg = spkg
pkgs.append(p)
if pkgs:
return pkgs, handled_pkgtypes
else:
logger.warn("Pkg Allinfo not found, reverting to using pkgs.all")
all_pkgs = package_analysis_json["pkgs.all"]["base"]
all_pkgs_src = package_analysis_json["pkgs_plus_source.all"]["base"]
for pkg_name, version in list(all_pkgs.items()):
p = ImagePackage()
p.image_user_id = image_obj.user_id
p.image_id = image_obj.id
p.name = pkg_name
p.version = version
p.fullversion = all_pkgs_src[pkg_name]
if img_distro.flavor == "RHEL":
name, parsed_version, release, epoch, arch = split_rpm_filename(
pkg_name + "-" + version + ".tmparch.rpm"
)
p.version = parsed_version
p.release = release
p.pkg_type = "RPM"
p.origin = "N/A"
p.src_pkg = "N/A"
p.license = "N/A"
p.arch = "N/A"
elif img_distro.flavor == "DEB":
try:
p.version, p.release = version.split("-")
except:
p.version = version
p.release = None
return pkgs, handled_pkgtypes
def load_fsdump(self, analysis_report_json):
"""
Returns a single FSDump entity composed of a the compressed and hashed json of the fs entries along with some statistics.
This function will pull necessariy bits from the fully analysis to construct a view of the FS suitable for gate eval.
:param analysis_report_json: the full json analysis report
:return:
"""
handled_pkgtypes = [
"files.allinfo",
"files.all",
"files.md5sums",
"files.sha256sums",
"files.sha1sums",
"files.suids",
"pkgfiles.all",
]
file_entries = {}
all_infos = (
analysis_report_json.get("file_list", {})
.get("files.allinfo", {})
.get("base", {})
)
file_perms = (
analysis_report_json.get("file_list", {})
.get("files.all", {})
.get("base", {})
)
md5_checksums = (
analysis_report_json.get("file_checksums", {})
.get("files.md5sums", {})
.get("base", {})
)
sha256_checksums = (
analysis_report_json.get("file_checksums", {})
.get("files.sha256sums", {})
.get("base", {})
)
sha1_checksums = (
analysis_report_json.get("file_checksums", {})
.get("files.sha1sums", {})
.get("base", {})
)
suids = (
analysis_report_json.get("file_suids", {})
.get("files.suids", {})
.get("base", {})
)
pkgd = (
analysis_report_json.get("package_list", {})
.get("pkgfiles.all", {})
.get("base", {})
)
path_map = {
path: safe_extract_json_value(value)
for path, value in list(all_infos.items())
}
entry = FilesystemAnalysis()
entry.file_count = 0
entry.directory_count = 0
entry.non_packaged_count = 0
entry.suid_count = 0
entry.total_entry_count = 0
# TODO: replace this with the load_fs_item call and convert the returned items to JSON for clarity and consistency.
# items = self.load_files(all_infos, suids, checksums, pkgd)
# for item in items:
# f = item.json()
for path, metadata in list(path_map.items()):
try:
full_path = metadata["fullpath"]
f = {
"fullpath": full_path,
"name": metadata["name"],
"mode": metadata["mode"],
"permissions": file_perms.get(path),
"linkdst_fullpath": metadata["linkdst_fullpath"],
"linkdst": metadata["linkdst"],
"size": metadata["size"],
"entry_type": metadata["type"],
"is_packaged": path in pkgd,
"md5_checksum": md5_checksums.get(path, "DIRECTORY_OR_OTHER"),
"sha256_checksum": sha256_checksums.get(path, "DIRECTORY_OR_OTHER"),
"sha1_checksum": sha1_checksums.get(path, "DIRECTORY_OR_OTHER")
if sha1_checksums
else None,
"othernames": [],
"suid": suids.get(path),
}
except KeyError as e:
logger.exception("Could not find data for {}".format(e))
raise
# Increment counters as needed
if f["suid"]:
entry.suid_count += 1
if not f["is_packaged"]:
entry.non_packaged_count += 1
if f["entry_type"] == "file":
entry.file_count += 1
elif f["entry_type"] == "dir":
entry.directory_count += 1
file_entries[path] = f
# Compress and set the data
entry.total_entry_count = len(file_entries)
entry.files = file_entries
return entry, handled_pkgtypes
def load_npms(self, analysis_json, containing_image):
handled_pkgtypes = ["pkgs.npms"]
npms_json = (
analysis_json.get("package_list", {}).get("pkgs.npms", {}).get("base")
)
if not npms_json:
return [], handled_pkgtypes
npms = []
image_packages = []
for path, npm_str in list(npms_json.items()):
npm_json = safe_extract_json_value(npm_str)
# TODO: remove this usage of ImageNPM, that is deprecated
n = ImageNpm()
n.path_hash = hashlib.sha256(ensure_bytes(path)).hexdigest()
n.path = path
n.name = npm_json.get("name")
n.src_pkg = npm_json.get("src_pkg")
n.origins_json = npm_json.get("origins")
n.licenses_json = npm_json.get("lics")
n.latest = npm_json.get("latest")
n.versions_json = npm_json.get("versions")
n.image_user_id = containing_image.user_id
n.image_id = containing_image.id
# npms.append(n)
np = ImagePackage()
# primary keys
np.name = n.name
if len(n.versions_json):
version = n.versions_json[0]
else:
version = "N/A"
np.version = version
np.pkg_type = "npm"
np.arch = "N/A"
np.image_user_id = n.image_user_id
np.image_id = n.image_id
np.pkg_path = n.path
# other
np.pkg_path_hash = n.path_hash
np.distro_name = "npm"
np.distro_version = "N/A"
np.like_distro = "npm"
np.fullversion = np.version
np.license = " ".join(n.licenses_json)
np.origin = " ".join(n.origins_json)
fullname = np.name
np.normalized_src_pkg = fullname
np.src_pkg = fullname
image_packages.append(np)
return image_packages, handled_pkgtypes
def load_gems(self, analysis_json, containing_image):
handled_pkgtypes = ["pkgs.gems"]
gems_json = (
analysis_json.get("package_list", {}).get("pkgs.gems", {}).get("base")
)
if not gems_json:
return [], handled_pkgtypes
gems = []
image_packages = []
for path, gem_str in list(gems_json.items()):
gem_json = safe_extract_json_value(gem_str)
# TODO: remove this usage of ImageGem, that is deprecated
n = ImageGem()
n.path_hash = hashlib.sha256(ensure_bytes(path)).hexdigest()
n.path = path
n.name = gem_json.get("name")
n.src_pkg = gem_json.get("src_pkg")
n.origins_json = gem_json.get("origins")
n.licenses_json = gem_json.get("lics")
n.versions_json = gem_json.get("versions")
n.latest = gem_json.get("latest")
n.image_user_id = containing_image.user_id
n.image_id = containing_image.id
# gems.append(n)
np = ImagePackage()
# primary keys
np.name = n.name
if len(n.versions_json):
version = n.versions_json[0]
else:
version = "N/A"
np.version = version
np.pkg_type = "gem"
np.arch = "N/A"
np.image_user_id = n.image_user_id
np.image_id = n.image_id
np.pkg_path = n.path
# other
np.pkg_path_hash = n.path_hash
np.distro_name = "gem"
np.distro_version = "N/A"
np.like_distro = "gem"
np.fullversion = np.version
np.license = " ".join(n.licenses_json)
np.origin = " ".join(n.origins_json)
fullname = np.name
np.normalized_src_pkg = fullname
np.src_pkg = fullname
image_packages.append(np)
return image_packages, handled_pkgtypes
def load_pythons(self, analysis_json, containing_image):
handled_pkgtypes = ["pkgs.python"]
pkgs_json = (
analysis_json.get("package_list", {}).get("pkgs.python", {}).get("base")
)
if not pkgs_json:
return [], handled_pkgtypes
pkgs = []
for path, pkg_str in list(pkgs_json.items()):
pkg_json = safe_extract_json_value(pkg_str)
n = ImagePackage()
# primary keys
n.name = pkg_json.get("name")
n.pkg_path = path
n.version = pkg_json.get("version")
n.pkg_type = "python"
n.arch = "N/A"
n.image_user_id = n.image_user_id
n.image_id = n.image_id
# other
n.pkg_path_hash = hashlib.sha256(ensure_bytes(path)).hexdigest()
n.distro_name = "python"
n.distro_version = "N/A"
n.like_distro = "python"
n.fullversion = n.version
n.license = pkg_json.get("license")
n.origin = pkg_json.get("origin")
m = {"files": pkg_json.get("files")}
n.metadata_json = m
fullname = n.name
n.normalized_src_pkg = fullname
n.src_pkg = fullname
pkgs.append(n)
return pkgs, handled_pkgtypes
def load_javas(self, analysis_json, containing_image):
handled_pkgtypes = ["pkgs.java"]
pkgs_json = (
analysis_json.get("package_list", {}).get("pkgs.java", {}).get("base")
)
if not pkgs_json:
return [], handled_pkgtypes
pkgs = []
for path, pkg_str in list(pkgs_json.items()):
pkg_json = safe_extract_json_value(pkg_str)
n = ImagePackage()
# primary keys
# TODO - some java names have a version in it, need to clean that up
n.name = pkg_json.get("name")
n.pkg_type = "java"
n.arch = "N/A"
n.pkg_path = path
metaversion = None
versions_json = {}
for k in [
"maven-version",
"implementation-version",
"specification-version",
]:
if not metaversion and pkg_json.get(k, "N/A") != "N/A":
metaversion = pkg_json.get(k)
versions_json[k] = pkg_json.get(k, "N/A")
n.image_user_id = containing_image.user_id
n.image_id = containing_image.id
# other non-PK values
n.pkg_path_hash = hashlib.sha256(ensure_bytes(path)).hexdigest()
n.distro_name = "java"
n.distro_version = "N/A"
n.like_distro = "java"
fullname = n.name
pomprops = n.get_pom_properties()
pomversion = None
if pomprops:
fullname = "{}:{}".format(
pomprops.get("groupId"), pomprops.get("artifactId")
)
pomversion = pomprops.get("version", None)
n.normalized_src_pkg = fullname
n.src_pkg = fullname
# final version decision - try our best to get an accurate version/name pair
n.version = "N/A"
if pomversion:
n.version = pomversion
elif metaversion:
n.version = metaversion
else:
try:
patt = re.match(r"(.*)-(([\d]\.)+.*)", n.name)
if patt and patt.group(1):
n.version = patt.group(2)
n.name = patt.group(1)
except Exception as err:
pass
n.fullversion = n.version
pkgs.append(n)
return pkgs, handled_pkgtypes
def load_generic_packages(self, analysis_json, containing_image, excludes=[]):
pkgs = []
handled_pkgtypes = []
package_types = analysis_json.get("package_list", {})
for package_type in package_types:
if package_type not in excludes:
patt = re.match(r"pkgs\.(.*)", package_type)
if patt:
ptype = patt.group(1)
handled_pkgtypes.append(ptype)
pkgs_json = (
analysis_json.get("package_list", {})
.get(package_type, {})
.get("base", {})
)
if not pkgs_json:
return [], handled_pkgtypes
for path, pkg_str in list(pkgs_json.items()):
pkg_json = safe_extract_json_value(pkg_str)
n = ImagePackage()
# primary keys
n.name = pkg_json.get("name")
n.pkg_path = path
n.version = pkg_json.get("version")
n.pkg_type = pkg_json.get("type", "N/A")
n.arch = "N/A"
n.image_user_id = n.image_user_id
n.image_id = n.image_id
# other
n.pkg_path_hash = hashlib.sha256(ensure_bytes(path)).hexdigest()
n.distro_name = n.pkg_type
n.distro_version = "N/A"
n.like_distro = n.pkg_type
n.fullversion = n.version
n.license = pkg_json.get("license", "N/A")
n.origin = pkg_json.get("origin", "N/A")
fullname = n.name
n.normalized_src_pkg = fullname
n.src_pkg = fullname
pkgs.append(n)
return pkgs, handled_pkgtypes
def _fuzzy_go(self, input_el_name, input_el_version):
ret_names = [input_el_name]
ret_versions = [input_el_version]
patt = re.match(".*([0-9]+\.[0-9]+\.[0-9]+).*", input_el_version)
if patt:
candidate_version = patt.group(1)
if candidate_version not in ret_versions:
ret_versions.append(candidate_version)
return ret_names, ret_versions
def _fuzzy_python(self, input_el):
global nomatch_inclusions
known_nomatch_inclusions = nomatch_inclusions.get("python", {})
ret_names = [input_el]
if input_el in known_nomatch_inclusions:
for n in known_nomatch_inclusions[input_el]:
if n not in ret_names:
ret_names.append(n)
return ret_names
def _fuzzy_npm(self, input_el):
global nomatch_inclusions
known_nomatch_inclusions = nomatch_inclusions.get("npm", {})
ret_names = [input_el]
if input_el in known_nomatch_inclusions:
for n in known_nomatch_inclusions[input_el]:
if n not in ret_names:
ret_names.append(n)
return ret_names
def _fuzzy_gem(self, input_el):
global nomatch_inclusions
known_nomatch_inclusions = nomatch_inclusions.get("gem", {})
ret_names = [input_el]
if input_el in known_nomatch_inclusions:
for n in known_nomatch_inclusions[input_el]:
if n not in ret_names:
ret_names.append(n)
return ret_names
def _fuzzy_java(self, input_el):
global nomatch_inclusions
known_nomatch_inclusions = nomatch_inclusions.get("java", {})
ret_names = []
ret_versions = []
iversion = input_el.get("implementation-version", "N/A")
if iversion != "N/A":
ret_versions.append(iversion)
sversion = input_el.get("specification-version", "N/A")
if sversion != "N/A":
if sversion not in ret_versions:
ret_versions.append(sversion)
mversion = input_el.get("maven-version", "N/A")
if mversion != "N/A" and mversion not in ret_versions:
if mversion not in ret_versions:
ret_versions.append(mversion)
for rversion in ret_versions:
clean_version = re.sub("\.(RELEASE|GA|SEC.*)$", "", rversion)
if clean_version not in ret_versions:
ret_versions.append(clean_version)
# do some heuristic tokenizing
try:
toks = re.findall("[^-]+", input_el["name"])
firstname = None
fullname = []
firstversion = None
fullversion = []
doingname = True
for tok in toks:
if re.match("^[0-9]", tok):
doingname = False
if doingname:
if not firstname:
firstname = tok
else:
fullname.append(tok)
else:
if not firstversion:
firstversion = tok
else:
fullversion.append(tok)
if firstname:
firstname_nonums = re.sub("[0-9].*$", "", firstname)
for gthing in [firstname, firstname_nonums]:
if gthing not in ret_names:
ret_names.append(gthing)
if "-".join([gthing] + fullname) not in ret_names:
ret_names.append("-".join([gthing] + fullname))
if firstversion:
firstversion_nosuffix = re.sub(
"\.(RELEASE|GA|SEC.*)$", "", firstversion
)
for gthing in [firstversion, firstversion_nosuffix]:
if gthing not in ret_versions:
ret_versions.append(gthing)
if "-".join([gthing] + fullversion) not in ret_versions:
ret_versions.append("-".join([gthing] + fullversion))
# attempt to get some hints from the manifest, if available
try:
manifest = input_el["metadata"].get("MANIFEST.MF", None)
if manifest:
pnames = []
manifest = re.sub("\r\n ", "", manifest)
for mline in manifest.splitlines():
if mline:
key, val = mline.split(" ", 1)
if key.lower() == "export-package:":
val = re.sub(';uses:=".*?"', "", val)
val = re.sub(';version=".*?"', "", val)
val = val.split(";")[0]
pnames = pnames + val.split(",")
# elif key.lower() == 'bundle-symbolicname:':
# pnames.append(val)
# elif key.lower() == 'name:':
# tmp = val.split("/")
# pnames.append('.'.join(tmp[:-1]))
packagename = None
if pnames:
shortest = min(pnames)
longest = max(pnames)
if shortest == longest:
packagename = shortest
else:
for i in range(0, len(shortest)):
if i > 0 and shortest[i] != longest[i]:
packagename = shortest[: i - 1]
break
if packagename:
candidate = packagename.split(".")[-1]
if candidate in list(known_nomatch_inclusions.keys()):
for matchmap_candidate in known_nomatch_inclusions[
candidate
]:
if matchmap_candidate not in ret_names:
ret_names.append(matchmap_candidate)
elif (
candidate not in ["com", "org", "net"]
and len(candidate) > 2
):
for r in list(ret_names):
if r in candidate and candidate not in ret_names:
ret_names.append(candidate)
except Exception as err:
logger.err(err)
except Exception as err:
logger.warn(
"failed to detect java package name/version guesses - exception: "
+ str(err)
)
for rname in list(ret_names):
underscore_name = re.sub("-", "_", rname)
if underscore_name not in ret_names:
ret_names.append(underscore_name)
for rname in list(ret_names):
if rname in list(known_nomatch_inclusions.keys()):
for matchmap_candidate in known_nomatch_inclusions[rname]:
if matchmap_candidate not in ret_names:
ret_names.append(matchmap_candidate)
return ret_names, ret_versions
def load_cpes(self, analysis_json, containing_image):
allcpes = {}
cpes = []
# do java first (from analysis)
java_json_raw = (
analysis_json.get("package_list", {}).get("pkgs.java", {}).get("base")
)
if java_json_raw:
for path, java_str in list(java_json_raw.items()):
java_json = safe_extract_json_value(java_str)
try:
guessed_names, guessed_versions = self._fuzzy_java(java_json)
except Exception as err:
guessed_names = guessed_versions = []
for n in guessed_names:
for v in guessed_versions:
rawcpe = "cpe:/a:-:{}:{}".format(n, v)
toks = rawcpe.split(":")
final_cpe = ["cpe", "-", "-", "-", "-", "-", "-"]
for i in range(1, len(final_cpe)):
try:
if toks[i]:
final_cpe[i] = toks[i]
else:
final_cpe[i] = "-"
except:
final_cpe[i] = "-"
cpekey = ":".join(final_cpe + [path])
if cpekey not in allcpes:
allcpes[cpekey] = True
cpe = ImageCpe()
cpe.pkg_type = "java"
cpe.pkg_path = path
cpe.cpetype = final_cpe[1]
cpe.vendor = final_cpe[2]
cpe.name = final_cpe[3]
cpe.version = final_cpe[4]
cpe.update = final_cpe[5]
cpe.meta = final_cpe[6]
cpe.image_user_id = containing_image.user_id
cpe.image_id = containing_image.id
cpes.append(cpe)
python_json_raw = (
analysis_json.get("package_list", {}).get("pkgs.python", {}).get("base")
)
if python_json_raw:
for path, python_str in list(python_json_raw.items()):
python_json = safe_extract_json_value(python_str)
guessed_names = self._fuzzy_python(python_json["name"])
guessed_versions = [python_json["version"]]
for n in guessed_names:
for v in guessed_versions:
rawcpe = "cpe:/a:-:{}:{}:-:~~~python~~".format(n, v)
toks = rawcpe.split(":")
final_cpe = ["cpe", "-", "-", "-", "-", "-", "-"]
for i in range(1, len(final_cpe)):
try:
if toks[i]:
final_cpe[i] = toks[i]
else:
final_cpe[i] = "-"
except:
final_cpe[i] = "-"
cpekey = ":".join(final_cpe + [path])
if cpekey not in allcpes:
allcpes[cpekey] = True
cpe = ImageCpe()
cpe.pkg_type = "python"
cpe.pkg_path = path
cpe.cpetype = final_cpe[1]
cpe.vendor = final_cpe[2]
cpe.name = final_cpe[3]
cpe.version = final_cpe[4]
cpe.update = final_cpe[5]
cpe.meta = final_cpe[6]
cpe.image_user_id = containing_image.user_id
cpe.image_id = containing_image.id
cpes.append(cpe)
gem_json_raw = (
analysis_json.get("package_list", {}).get("pkgs.gems", {}).get("base")
)
if gem_json_raw:
for path, gem_str in list(gem_json_raw.items()):
gem_json = safe_extract_json_value(gem_str)
guessed_names = self._fuzzy_gem(gem_json["name"])
guessed_versions = gem_json["versions"]
for n in guessed_names:
for v in guessed_versions:
rawcpe = "cpe:/a:-:{}:{}:-:~~~ruby~~".format(n, v)
toks = rawcpe.split(":")
final_cpe = ["cpe", "-", "-", "-", "-", "-", "-"]
for i in range(1, len(final_cpe)):
try:
if toks[i]:
final_cpe[i] = toks[i]
else:
final_cpe[i] = "-"
except:
final_cpe[i] = "-"
cpekey = ":".join(final_cpe + [path])
if cpekey not in allcpes:
allcpes[cpekey] = True
cpe = ImageCpe()
cpe.pkg_type = "gem"
cpe.pkg_path = path
cpe.cpetype = final_cpe[1]
cpe.vendor = final_cpe[2]
cpe.name = final_cpe[3]
cpe.version = final_cpe[4]
cpe.update = final_cpe[5]
cpe.meta = final_cpe[6]
cpe.image_user_id = containing_image.user_id
cpe.image_id = containing_image.id
cpes.append(cpe)
npm_json_raw = (
analysis_json.get("package_list", {}).get("pkgs.npms", {}).get("base")
)
if npm_json_raw:
for path, npm_str in list(npm_json_raw.items()):
npm_json = safe_extract_json_value(npm_str)
guessed_names = self._fuzzy_npm(npm_json["name"])
guessed_versions = npm_json["versions"]
for n in guessed_names:
for v in guessed_versions:
rawcpe = "cpe:/a:-:{}:{}:-:~~~node.js~~".format(n, v)
toks = rawcpe.split(":")
final_cpe = ["cpe", "-", "-", "-", "-", "-", "-"]
for i in range(1, len(final_cpe)):
try:
if toks[i]:
final_cpe[i] = toks[i]
else:
final_cpe[i] = "-"
except:
final_cpe[i] = "-"
cpekey = ":".join(final_cpe + [path])
if cpekey not in allcpes:
allcpes[cpekey] = True
cpe = ImageCpe()
cpe.pkg_type = "npm"
cpe.pkg_path = path
cpe.cpetype = final_cpe[1]
cpe.vendor = final_cpe[2]
cpe.name = final_cpe[3]
cpe.version = final_cpe[4]
cpe.update = final_cpe[5]
cpe.meta = final_cpe[6]
cpe.image_user_id = containing_image.user_id
cpe.image_id = containing_image.id
cpes.append(cpe)
go_json_raw = (
analysis_json.get("package_list", {}).get("pkgs.go", {}).get("base")
)
if go_json_raw:
for path, go_str in list(go_json_raw.items()):
go_json = safe_extract_json_value(go_str)
guessed_names, guessed_versions = self._fuzzy_go(
go_json["name"], go_json["version"]
)
# guessed_names = [go_json['name']]
# guessed_versions = [go_json['version']]
for n in guessed_names:
for v in guessed_versions:
rawcpe = "cpe:/a:-:{}:{}:-:".format(n, v)
toks = rawcpe.split(":")
final_cpe = ["cpe", "-", "-", "-", "-", "-", "-"]
for i in range(1, len(final_cpe)):
try:
if toks[i]:
final_cpe[i] = toks[i]
else:
final_cpe[i] = "-"
except:
final_cpe[i] = "-"
cpekey = ":".join(final_cpe + [path])
if cpekey not in allcpes:
allcpes[cpekey] = True
cpe = ImageCpe()
cpe.pkg_type = "go"
cpe.pkg_path = path
cpe.cpetype = final_cpe[1]
cpe.vendor = final_cpe[2]
cpe.name = final_cpe[3]
cpe.version = final_cpe[4]
cpe.update = final_cpe[5]
cpe.meta = final_cpe[6]
cpe.image_user_id = containing_image.user_id
cpe.image_id = containing_image.id
cpes.append(cpe)
bin_json_raw = (
analysis_json.get("package_list", {}).get("pkgs.binary", {}).get("base")
)
if bin_json_raw:
for path, bin_str in list(bin_json_raw.items()):
bin_json = safe_extract_json_value(bin_str)
guessed_names = [bin_json["name"]]
guessed_versions = [bin_json["version"]]
for n in guessed_names:
for v in guessed_versions:
rawcpe = "cpe:/a:-:{}:{}:-:".format(n, v)
toks = rawcpe.split(":")
final_cpe = ["cpe", "-", "-", "-", "-", "-", "-"]
for i in range(1, len(final_cpe)):
try:
if toks[i]:
final_cpe[i] = toks[i]
else:
final_cpe[i] = "-"
except:
final_cpe[i] = "-"
cpekey = ":".join(final_cpe + [path])
if cpekey not in allcpes:
allcpes[cpekey] = True
cpe = ImageCpe()
cpe.pkg_type = "binary"
cpe.pkg_path = path
cpe.cpetype = final_cpe[1]
cpe.vendor = final_cpe[2]
cpe.name = final_cpe[3]
cpe.version = final_cpe[4]
cpe.update = final_cpe[5]
cpe.meta = final_cpe[6]
cpe.image_user_id = containing_image.user_id
cpe.image_id = containing_image.id
cpes.append(cpe)
return cpes
| 37.750179 | 240 | 0.497563 |
ace53932eb8dde0c24bf5e78c63902e80984eb0c | 138 | py | Python | courses/test-1113-review/3_logic_function.py | GalvinGao/2019-ProgrammingCourse | b668bc9bab902959a574aa3db73ae481131c0c27 | [
"MIT"
] | null | null | null | courses/test-1113-review/3_logic_function.py | GalvinGao/2019-ProgrammingCourse | b668bc9bab902959a574aa3db73ae481131c0c27 | [
"MIT"
] | null | null | null | courses/test-1113-review/3_logic_function.py | GalvinGao/2019-ProgrammingCourse | b668bc9bab902959a574aa3db73ae481131c0c27 | [
"MIT"
] | null | null | null | # def (function) - Logic Control
def add(first_number, second_number):
return first_number + second_number
x = add(1, 2)
print(x)
| 13.8 | 39 | 0.702899 |
ace539a952dd06853860e9cd5e1d55275479cb68 | 4,522 | py | Python | tests/test_scancode_glc_plugin.py | nexB/scancodeio.io-pipeline-glc_scan | 9c02d7993911fe8113bffda455e66fa74243b8d0 | [
"Apache-2.0"
] | null | null | null | tests/test_scancode_glc_plugin.py | nexB/scancodeio.io-pipeline-glc_scan | 9c02d7993911fe8113bffda455e66fa74243b8d0 | [
"Apache-2.0"
] | 3 | 2021-07-27T05:17:56.000Z | 2021-07-28T14:56:14.000Z | tests/test_scancode_glc_plugin.py | nexB/scancode.io-pipeline-glc_scan | 9c02d7993911fe8113bffda455e66fa74243b8d0 | [
"Apache-2.0"
] | 2 | 2021-09-17T14:22:31.000Z | 2022-01-22T17:49:39.000Z | #
# Copyright (c) nexB Inc. and others.
# SPDX-License-Identifier: Apache-2.0
#
# Visit https://aboutcode.org and https://github.com/nexB/ for support and download.
# ScanCode is a trademark of nexB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.test import TestCase
from pathlib import Path
from LicenseClassifier.classifier import LicenseClassifier
from scancode_glc_plugin.pipes import glc
from scanpipe.models import Project
from scanpipe.models import CodebaseResource
from scanpipe.pipes.input import copy_inputs
class GLCPipeTest(TestCase):
data_location = Path(__file__).parent / "data"
def test_scanpipe_pipes_glc_scan_file(self):
input_location = str(self.data_location / "apache-1.1.txt")
classifier = LicenseClassifier()
scan_results = glc.scan_file(classifier, input_location)
expected_result = {
"path": input_location,
"licenses": [
{
"key": "apache-1.1",
"score": 1,
"start_line": 1,
"end_line": 39,
"start_index": 0,
"end_index": 338,
"category": "Permissive",
}
],
"license_expressions": ["apache-1.1"],
"copyrights": [
{
"value": "Copyright (c) 2000 The Apache Software"
" Foundation. All rights reserved.",
"start_index": 25,
"end_index": 96,
}
],
"holders": [
{
"value": "The Apache Software Foundation",
"start_index": 44,
"end_index": 74,
}
],
"scan_errors": [],
}
self.assertEqual(expected_result, scan_results)
def test_scanpipe_pipes_glc_scan_directory(self):
input_location = str(self.data_location)
scan_results = glc.scan_directory(input_location)
expected_keys = [
"header",
"files",
]
self.assertEqual(sorted(expected_keys), sorted(scan_results.keys()))
self.assertEqual(1, len(scan_results.get("files", [])))
def test_scanpipe_pipes_glc_scan_and_update_codebase_resources(self):
project1 = Project.objects.create(name="Analysis")
codebase_resource1 = CodebaseResource.objects.create(
project=project1, path="not available"
)
self.assertEqual(0, project1.projecterrors.count())
glc.scan_and_update_codebase_resources(project1)
codebase_resource1.refresh_from_db()
self.assertEqual("scanned-with-error", codebase_resource1.status)
self.assertEqual(2, project1.projecterrors.count())
copy_inputs([self.data_location / "apache-1.1.txt"], project1.codebase_path)
codebase_resource2 = CodebaseResource.objects.create(
project=project1, path="apache-1.1.txt"
)
glc.scan_and_update_codebase_resources(project1)
codebase_resource2.refresh_from_db()
self.assertEqual("scanned", codebase_resource2.status)
expected = {
"licenses": ["apache-1.1"],
"holders": [
{
"value": "The Apache Software Foundation",
"start_index": 44,
"end_index": 74,
}
],
"copyrights": [
{
"value": "Copyright (c) 2000 The Apache Software Foundation."
" All rights reserved.",
"start_index": 25,
"end_index": 96,
}
],
}
self.assertEqual(expected["licenses"], codebase_resource2.license_expressions)
self.assertEqual(expected["copyrights"], codebase_resource2.copyrights)
self.assertEqual(expected["holders"], codebase_resource2.holders) | 36.467742 | 86 | 0.58912 |
ace53ac3f65bf1d2a880458346278a29ff0ad641 | 12,561 | py | Python | pytype/tests/test_typevar.py | CraftSpider/pytype | 9e1fa17e56944df311f244281015ef1551fd38a2 | [
"Apache-2.0"
] | 1 | 2020-03-31T13:05:05.000Z | 2020-03-31T13:05:05.000Z | pytype/tests/test_typevar.py | CraftSpider/pytype | 9e1fa17e56944df311f244281015ef1551fd38a2 | [
"Apache-2.0"
] | null | null | null | pytype/tests/test_typevar.py | CraftSpider/pytype | 9e1fa17e56944df311f244281015ef1551fd38a2 | [
"Apache-2.0"
] | null | null | null | """Tests for TypeVar."""
from pytype import file_utils
from pytype.tests import test_base
class TypeVarTest(test_base.TargetIndependentTest):
"""Tests for TypeVar."""
def testUnusedTypeVar(self):
ty = self.Infer("""
from typing import TypeVar
T = TypeVar("T")
""", deep=False)
self.assertTypesMatchPytd(ty, """
from typing import TypeVar
T = TypeVar("T")
""")
def testImportTypeVar(self):
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """T = TypeVar("T")""")
ty = self.Infer("""\
from a import T
""", deep=False, pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
from typing import TypeVar
T = TypeVar("T")
""")
def testInvalidTypeVar(self):
ty, errors = self.InferWithErrors("""\
from typing import TypeVar
typevar = TypeVar
T = typevar()
T = typevar("T") # ok
T = typevar(42)
T = typevar(str())
T = typevar("T", str, int if __random__ else float)
T = typevar("T", 0, float)
T = typevar("T", str)
# pytype: disable=not-supported-yet
S = typevar("S", covariant=False) # ok
T = typevar("T", covariant=False) # duplicate ok
# pytype: enable=not-supported-yet
""")
self.assertTypesMatchPytd(ty, """
from typing import TypeVar
typevar = ... # type: type
S = TypeVar("S")
T = TypeVar("T")
""")
self.assertErrorLogIs(errors, [
(3, "invalid-typevar", r"wrong arguments"),
(5, "invalid-typevar", r"Expected.*str.*Actual.*int"),
(6, "invalid-typevar", r"constant str"),
(7, "invalid-typevar", r"must be constant"),
(8, "invalid-typevar", r"Expected.*_1:.*type.*Actual.*_1: int"),
(9, "invalid-typevar", r"0 or more than 1"),
])
def testPrintConstraints(self):
ty = self.Infer("""
from typing import List, TypeVar
S = TypeVar("S", int, float, covariant=True) # pytype: disable=not-supported-yet
T = TypeVar("T", int, float)
U = TypeVar("U", List[int], List[float])
""", deep=False)
# The "covariant" keyword is ignored for now.
self.assertTypesMatchPytd(ty, """
from typing import List, TypeVar
S = TypeVar("S", int, float)
T = TypeVar("T", int, float)
U = TypeVar("U", List[int], List[float])
""")
def testInferTypeVars(self):
ty = self.Infer("""
def id(x):
return x
def wrap_tuple(x, y):
return (x, y)
def wrap_list(x, y):
return [x, y]
def wrap_dict(x, y):
return {x: y}
def return_second(x, y):
return y
""")
self.assertTypesMatchPytd(ty, """
from typing import Dict, List, Tuple, Union
_T0 = TypeVar("_T0")
_T1 = TypeVar("_T1")
def id(x: _T0) -> _T0
def wrap_tuple(x: _T0, y: _T1) -> Tuple[_T0, _T1]
def wrap_list(x: _T0, y: _T1) -> List[Union[_T0, _T1]]
def wrap_dict(x: _T0, y: _T1) -> Dict[_T0, _T1]
def return_second(x, y: _T1) -> _T1
""")
def testInferUnion(self):
ty = self.Infer("""
def return_either(x, y):
return x or y
def return_arg_or_42(x):
return x or 42
""")
self.assertTypesMatchPytd(ty, """
from typing import Union
_T0 = TypeVar("_T0")
_T1 = TypeVar("_T1")
def return_either(x: _T0, y: _T1) -> Union[_T0, _T1]
def return_arg_or_42(x: _T0) -> Union[_T0, int]
""")
def testTypeVarInTypeComment(self):
_, errors = self.InferWithErrors("""\
from typing import List, TypeVar
T = TypeVar("T")
x = None # type: T
y = None # type: List[T]
""")
self.assertErrorLogIs(errors, [(3, "not-supported-yet"),
(4, "not-supported-yet")])
def testBaseClassWithTypeVar(self):
ty = self.Infer("""\
from typing import List, TypeVar
T = TypeVar("T")
class A(List[T]): pass
""")
self.assertTypesMatchPytd(ty, """
from typing import List, TypeVar
T = TypeVar("T")
class A(List[T]): ...
""")
def testOverwriteBaseClassWithTypeVar(self):
self.Check("""
from typing import List, TypeVar
T = TypeVar("T")
l = List[T]
l = list
class X(l): pass
""")
def testBound(self):
_, errors = self.InferWithErrors("""\
from typing import TypeVar
T = TypeVar("T", int, float, bound=str)
S = TypeVar("S", bound="")
U = TypeVar("U", bound=str) # ok
V = TypeVar("V", bound=int if __random__ else float)
""")
self.assertErrorLogIs(errors, [
(2, "invalid-typevar", r"mutually exclusive"),
(3, "invalid-typevar", r"empty string"),
(5, "invalid-typevar", r"must be constant")])
def testCovariant(self):
_, errors = self.InferWithErrors("""\
from typing import TypeVar
T = TypeVar("T", covariant=True)
S = TypeVar("S", covariant=42)
U = TypeVar("U", covariant=True if __random__ else False)
""")
self.assertErrorLogIs(errors, [
(2, "not-supported-yet"),
(3, "invalid-typevar", r"Expected.*bool.*Actual.*int"),
(4, "invalid-typevar", r"constant")])
def testContravariant(self):
_, errors = self.InferWithErrors("""\
from typing import TypeVar
T = TypeVar("T", contravariant=True)
S = TypeVar("S", contravariant=42)
U = TypeVar("U", contravariant=True if __random__ else False)
""")
self.assertErrorLogIs(errors, [
(2, "not-supported-yet"),
(3, "invalid-typevar", r"Expected.*bool.*Actual.*int"),
(4, "invalid-typevar", r"constant")])
def testDontPropagatePyval(self):
# in functions like f(x: T) -> T, if T has constraints we should not copy
# the value of constant types between instances of the typevar.
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar
AnyInt = TypeVar('AnyInt', int)
def f(x: AnyInt) -> AnyInt
""")
ty = self.Infer("""
import a
if a.f(0):
x = 3
if a.f(1):
y = 3
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
a = ... # type: module
x = ... # type: int
y = ... # type: int
""")
def testPropertyTypeParam(self):
# We should allow property signatures of the form f(self: T) -> X[T]
# without complaining about the class not being parametrised over T
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar, List
T = TypeVar('T')
class A(object):
@property
def foo(self: T) -> List[T]: ...
class B(A): ...
""")
ty = self.Infer("""
import a
x = a.A().foo
y = a.B().foo
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
from typing import List
import a
a = ... # type: module
x = ... # type: List[a.A]
y = ... # type: List[a.B]
""")
def testPropertyTypeParam2(self):
# Test for classes inheriting from Generic[X]
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar, List, Generic
T = TypeVar('T')
U = TypeVar('U')
class A(Generic[U]):
@property
def foo(self: T) -> List[T]: ...
class B(A, Generic[U]): ...
def make_A() -> A[int]: ...
def make_B() -> B[int]: ...
""")
ty = self.Infer("""
import a
x = a.make_A().foo
y = a.make_B().foo
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
from typing import List
import a
a = ... # type: module
x = ... # type: List[a.A[int]]
y = ... # type: List[a.B[int]]
""")
# Skipping due to b/66005735
@test_base.skip("Type parameter bug")
def testPropertyTypeParam3(self):
# Don't mix up the class parameter and the property parameter
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar, List, Generic
T = TypeVar('T')
U = TypeVar('U')
class A(Generic[U]):
@property
def foo(self: T) -> List[U]: ...
def make_A() -> A[int]: ...
""")
ty = self.Infer("""
import a
x = a.make_A().foo
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
a = ... # type: module
x = ... # type: List[int]
""")
def testPropertyTypeParamWithConstraints(self):
# Test setting self to a constrained type
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar, List, Generic
T = TypeVar('T')
U = TypeVar('U', int, str)
X = TypeVar('X', int)
class A(Generic[U]):
@property
def foo(self: A[X]) -> List[X]: ...
def make_A() -> A[int]: ...
""")
ty = self.Infer("""
import a
x = a.make_A().foo
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
from typing import List
a = ... # type: module
x = ... # type: List[int]
""")
def testClassMethodTypeParam(self):
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar, List, Type
T = TypeVar('T')
class A(object):
@classmethod
def foo(self: Type[T]) -> List[T]: ...
class B(A): ...
""")
ty = self.Infer("""
import a
v = a.A.foo()
w = a.B.foo()
x = a.A().foo()
y = a.B().foo()
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
from typing import List
import a
a = ... # type: module
v = ... # type: List[a.A]
w = ... # type: List[a.B]
x = ... # type: List[a.A]
y = ... # type: List[a.B]
""")
def testMetaclassPropertyTypeParam(self):
with file_utils.Tempdir() as d:
d.create_file("a.pyi", """
from typing import TypeVar, Type, List
T = TypeVar('T')
class Meta():
@property
def foo(self: Type[T]) -> List[T]
class A(metaclass=Meta):
pass
""")
ty = self.Infer("""
import a
x = a.A.foo
""", pythonpath=[d.path])
self.assertTypesMatchPytd(ty, """
from typing import List
import a
a = ... # type: module
x = ... # type: List[a.A]
""")
def testTopLevelUnion(self):
ty = self.Infer("""
from typing import TypeVar
if __random__:
T = TypeVar("T")
else:
T = 42
""")
self.assertTypesMatchPytd(ty, """
from typing import Any
T = ... # type: Any
""")
def testStoreTypeVarInDict(self):
"""Convert a typevar to Any when stored as a dict value."""
# See abstract.Dict.setitem_slot for why this is needed.
ty = self.Infer("""
from typing import TypeVar
T = TypeVar("T")
a = {'key': T}
""")
self.assertTypesMatchPytd(ty, """
from typing import Any, Dict, TypeVar
a = ... # type: Dict[str, Any]
T = TypeVar('T')
""")
def testLateBound(self):
_, errors = self.InferWithErrors("""\
from typing import TypeVar, Union
T = TypeVar("T", int, float, bound="str")
S = TypeVar("S", bound="")
U = TypeVar("U", bound="str") # ok
V = TypeVar("V", bound="int if __random__ else float")
W = TypeVar("W", bound="Foo") # ok, forward reference
X = TypeVar("X", bound="Bar")
class Foo:
pass
""")
self.assertErrorLogIs(errors, [
(2, "invalid-typevar", r"mutually exclusive"),
(3, "invalid-typevar", r"empty string"),
(5, "invalid-typevar", r"Must be constant"),
(7, "name-error", r"Name.*Bar")])
def testLateConstraints(self):
ty = self.Infer("""
from typing import List, TypeVar
S = TypeVar("S", int, float)
T = TypeVar("T", "int", "float")
U = TypeVar("U", "List[int]", List[float])
V = TypeVar("V", "Foo", "List[Foo]")
class Foo:
pass
""", deep=False)
self.assertTypesMatchPytd(ty, """
from typing import List, TypeVar
S = TypeVar("S", int, float)
T = TypeVar("T", int, float)
U = TypeVar("U", List[int], List[float])
V = TypeVar("V", Foo, List[Foo])
class Foo:
pass
""")
test_base.main(globals(), __name__ == "__main__")
| 29.907143 | 87 | 0.536661 |
ace53b7ba584037d54361650ebad29b018f29c1a | 2,632 | py | Python | basic/list2.py | zffx/GooglePythonExercises | 51973edb0b7d019a951efe87e54d6bc67224b00f | [
"Apache-2.0"
] | null | null | null | basic/list2.py | zffx/GooglePythonExercises | 51973edb0b7d019a951efe87e54d6bc67224b00f | [
"Apache-2.0"
] | null | null | null | basic/list2.py | zffx/GooglePythonExercises | 51973edb0b7d019a951efe87e54d6bc67224b00f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Additional basic list exercises
# D. Given a list of numbers, return a list where
# all adjacent == elements have been reduced to a single element,
# so [1, 2, 2, 3] returns [1, 2, 3]. You may create a new list or
# modify the passed in list.
def remove_adjacent(nums):
# +++your code here+++
ret = []
for number in nums:
if not ret or number != ret[-1]:
ret.append(number)
return ret
# E. Given two lists sorted in increasing order, create and return a merged
# list of all the elements in sorted order. You may modify the passed in lists.
# Ideally, the solution should work in "linear" time, making a single
# pass of both lists.
def linear_merge(list1, list2):
# +++your code here+++
ret = []
x = 0
y = 0
while x < len(list1) and y < len(list2):
if list1[x] < list2[y]:
ret.append(list1[x])
x = x + 1
else:
ret.append(list2[y])
y = y + 1
if x == len(list1):
ret = ret + list2[y + 1:]
else:
ret = ret + list1[x + 1:]
return ret
# Note: the solution above is kind of cute, but unforunately list.pop(0)
# is not constant time with the standard python list implementation, so
# the above is not strictly linear time.
# An alternate approach uses pop(-1) to remove the endmost elements
# from each list, building a solution list which is backwards.
# Then use reversed() to put the result back in the correct order. That
# solution works in linear time, but is more ugly.
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Calls the above functions with interesting inputs.
def main():
print 'remove_adjacent'
test(remove_adjacent([1, 2, 2, 3]), [1, 2, 3])
test(remove_adjacent([2, 2, 3, 3, 3]), [2, 3])
test(remove_adjacent([]), [])
print
print 'linear_merge'
test(linear_merge(['aa', 'xx', 'zz'], ['bb', 'cc']),
['aa', 'bb', 'cc', 'xx', 'zz'])
test(linear_merge(['aa', 'xx'], ['bb', 'cc', 'zz']),
['aa', 'bb', 'cc', 'xx', 'zz'])
test(linear_merge(['aa', 'aa'], ['aa', 'bb', 'bb']),
['aa', 'aa', 'aa', 'bb', 'bb'])
if __name__ == '__main__':
main()
| 30.964706 | 79 | 0.614742 |
ace53cedc68d06b1e6f18755b222427392a79b12 | 6,005 | py | Python | openstack/tests/functional/object_store/v1/test_obj.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | 43 | 2018-12-19T08:39:15.000Z | 2021-07-21T02:45:43.000Z | openstack/tests/functional/object_store/v1/test_obj.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | 11 | 2019-03-17T13:28:56.000Z | 2020-09-23T23:57:50.000Z | openstack/tests/functional/object_store/v1/test_obj.py | teresa-ho/stx-openstacksdk | 7d723da3ffe9861e6e9abcaeadc1991689f782c5 | [
"Apache-2.0"
] | 47 | 2018-12-19T05:14:25.000Z | 2022-03-19T15:28:30.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
import uuid
from openstack.tests.functional import base
@unittest.skipUnless(base.service_exists(service_type='object-store'),
'Object Storage service does not exist')
class TestObject(base.BaseFunctionalTest):
FOLDER = uuid.uuid4().hex
FILE = uuid.uuid4().hex
DATA = 'abc'
@classmethod
def setUpClass(cls):
super(TestObject, cls).setUpClass()
cls.conn.object_store.create_container(name=cls.FOLDER)
cls.sot = cls.conn.object_store.upload_object(
container=cls.FOLDER, name=cls.FILE, data=cls.DATA)
@classmethod
def tearDownClass(cls):
super(TestObject, cls).tearDownClass()
cls.conn.object_store.delete_object(cls.sot, ignore_missing=False)
cls.conn.object_store.delete_container(cls.FOLDER)
def test_list(self):
names = [o.name for o
in self.conn.object_store.objects(container=self.FOLDER)]
self.assertIn(self.FILE, names)
def test_get_object(self):
result = self.conn.object_store.get_object(
self.FILE, container=self.FOLDER)
self.assertEqual(self.DATA, result)
result = self.conn.object_store.get_object(self.sot)
self.assertEqual(self.DATA, result)
def test_system_metadata(self):
# get system metadata
obj = self.conn.object_store.get_object_metadata(
self.FILE, container=self.FOLDER)
self.assertGreaterEqual(0, obj.bytes)
self.assertIsNotNone(obj.etag)
# set system metadata
obj = self.conn.object_store.get_object_metadata(
self.FILE, container=self.FOLDER)
self.assertIsNone(obj.content_disposition)
self.assertIsNone(obj.content_encoding)
self.conn.object_store.set_object_metadata(
obj, content_disposition='attachment', content_encoding='gzip')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertEqual('attachment', obj.content_disposition)
self.assertEqual('gzip', obj.content_encoding)
# update system metadata
self.conn.object_store.set_object_metadata(
obj, content_encoding='deflate')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertEqual('attachment', obj.content_disposition)
self.assertEqual('deflate', obj.content_encoding)
# set custom metadata
self.conn.object_store.set_object_metadata(obj, k0='v0')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertIn('k0', obj.metadata)
self.assertEqual('v0', obj.metadata['k0'])
self.assertEqual('attachment', obj.content_disposition)
self.assertEqual('deflate', obj.content_encoding)
# unset more system metadata
self.conn.object_store.delete_object_metadata(
obj, keys=['content_disposition'])
obj = self.conn.object_store.get_object_metadata(obj)
self.assertIn('k0', obj.metadata)
self.assertEqual('v0', obj.metadata['k0'])
self.assertIsNone(obj.content_disposition)
self.assertEqual('deflate', obj.content_encoding)
self.assertIsNone(obj.delete_at)
def test_custom_metadata(self):
# get custom metadata
obj = self.conn.object_store.get_object_metadata(
self.FILE, container=self.FOLDER)
self.assertFalse(obj.metadata)
# set no custom metadata
self.conn.object_store.set_object_metadata(obj)
obj = self.conn.object_store.get_object_metadata(obj)
self.assertFalse(obj.metadata)
# set empty custom metadata
self.conn.object_store.set_object_metadata(obj, k0='')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertFalse(obj.metadata)
# set custom metadata
self.conn.object_store.set_object_metadata(obj, k1='v1')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertTrue(obj.metadata)
self.assertEqual(1, len(obj.metadata))
self.assertIn('k1', obj.metadata)
self.assertEqual('v1', obj.metadata['k1'])
# set more custom metadata by named object and container
self.conn.object_store.set_object_metadata(self.FILE, self.FOLDER,
k2='v2')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertTrue(obj.metadata)
self.assertEqual(2, len(obj.metadata))
self.assertIn('k1', obj.metadata)
self.assertEqual('v1', obj.metadata['k1'])
self.assertIn('k2', obj.metadata)
self.assertEqual('v2', obj.metadata['k2'])
# update custom metadata
self.conn.object_store.set_object_metadata(obj, k1='v1.1')
obj = self.conn.object_store.get_object_metadata(obj)
self.assertTrue(obj.metadata)
self.assertEqual(2, len(obj.metadata))
self.assertIn('k1', obj.metadata)
self.assertEqual('v1.1', obj.metadata['k1'])
self.assertIn('k2', obj.metadata)
self.assertEqual('v2', obj.metadata['k2'])
# unset custom metadata
self.conn.object_store.delete_object_metadata(obj, keys=['k1'])
obj = self.conn.object_store.get_object_metadata(obj)
self.assertTrue(obj.metadata)
self.assertEqual(1, len(obj.metadata))
self.assertIn('k2', obj.metadata)
self.assertEqual('v2', obj.metadata['k2'])
| 40.85034 | 75 | 0.673106 |
ace53dc65436df177ea4c5835a03b4121c9a3adf | 1,656 | py | Python | backend/sharing.py | illicitonion/edfringeplanner | ab6d4a3218ee211de5078b3205fd39da1fbdfb50 | [
"BSD-3-Clause"
] | null | null | null | backend/sharing.py | illicitonion/edfringeplanner | ab6d4a3218ee211de5078b3205fd39da1fbdfb50 | [
"BSD-3-Clause"
] | null | null | null | backend/sharing.py | illicitonion/edfringeplanner | ab6d4a3218ee211de5078b3205fd39da1fbdfb50 | [
"BSD-3-Clause"
] | null | null | null | from db import cursor
def share(config, *, shared_by, shared_with_email):
with cursor(config) as cur:
cur.execute(
"INSERT INTO shares (shared_by, shared_with_email) VALUES (%s, %s) ON CONFLICT DO NOTHING",
(shared_by, shared_with_email),
)
def unshare(config, *, shared_by, shared_with_email):
with cursor(config) as cur:
cur.execute(
"DELETE FROM shares WHERE shared_by = %s AND shared_with_email = %s",
(shared_by, shared_with_email),
)
def get_shared_by_user_ids_and_emails(config, user_id):
with cursor(config) as cur:
cur.execute("SELECT email FROM users WHERE id = %s", (user_id,))
row = cur.fetchone()
if row is None:
raise ValueError("Couldn't find email address for user {}".format(user_id))
user_email = row[0]
cur.execute(
"SELECT users.id, users.email FROM users INNER JOIN shares ON users.id = shares.shared_by WHERE shares.shared_with_email = %s ORDER BY users.email ASC",
(user_email,),
)
return [(row[0], row[1]) for row in cur.fetchall()]
def get_share_emails(config, user_id):
with cursor(config) as cur:
cur.execute(
"SELECT shared_with_email FROM shares WHERE shared_by = %s ORDER BY shared_with_email ASC",
(user_id,),
)
shared_with_user = [row[0] for row in cur.fetchall()]
shared_by_ids_and_emails = get_shared_by_user_ids_and_emails(config, user_id)
shared_by_user = [email for id, email in shared_by_ids_and_emails]
return shared_by_user, shared_with_user
| 33.12 | 164 | 0.643116 |
ace53e75246dc5099504e202e6464efb09f51aa4 | 10,506 | py | Python | anuga/parallel/parallel_meshes.py | samcom12/anuga_core | f4378114dbf02d666fe6423de45798add5c42806 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | anuga/parallel/parallel_meshes.py | samcom12/anuga_core | f4378114dbf02d666fe6423de45798add5c42806 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | anuga/parallel/parallel_meshes.py | samcom12/anuga_core | f4378114dbf02d666fe6423de45798add5c42806 | [
"Python-2.0",
"OLDAP-2.7"
] | null | null | null | """parallel-meshes -
2D triangular domains for parallel finite-volume computations of
the advection equation, with extra structures to define the
sending and receiving communications define in dictionaries
full_send_dict and ghost_recv_dict
Ole Nielsen, Stephen Roberts, Duncan Gray, Christopher Zoppou
Geoscience Australia, 2005
Modified by Linda Stals, March 2006, to include ghost boundaries
"""
from __future__ import absolute_import
from builtins import range
from builtins import object
import sys
import numpy as num
from anuga.config import epsilon
from .parallel_api import distribute
from .parallel_api import myid, numprocs, get_processor_name
from .parallel_api import send, receive
from .parallel_api import pypar_available, barrier, finalize
def parallel_rectangle(m_g, n_g, len1_g=1.0, len2_g=1.0, origin_g = (0.0, 0.0)):
"""Setup a rectangular grid of triangles
with m+1 by n+1 grid points
and side lengths len1, len2. If side lengths are omitted
the mesh defaults to the unit square, divided between all the
processors
len1: x direction (left to right)
len2: y direction (bottom to top)
"""
from anuga.utilities import parallel_abstraction as pypar
m_low, m_high = pypar.balance(m_g, numprocs, myid)
n = n_g
m_low = m_low-1
m_high = m_high+1
#print 'm_low, m_high', m_low, m_high
m = m_high - m_low
delta1 = float(len1_g)/m_g
delta2 = float(len2_g)/n_g
len1 = len1_g*float(m)/float(m_g)
len2 = len2_g
origin = ( origin_g[0]+float(m_low)/float(m_g)*len1_g, origin_g[1] )
#Calculate number of points
Np = (m+1)*(n+1)
class VIndex(object):
def __init__(self, n,m):
self.n = n
self.m = m
def __call__(self, i,j):
return j+i*(self.n+1)
class EIndex(object):
def __init__(self, n,m):
self.n = n
self.m = m
def __call__(self, i,j):
return 2*(j+i*self.n)
I = VIndex(n,m)
E = EIndex(n,m)
points = num.zeros( (Np,2), float)
for i in range(m+1):
for j in range(n+1):
points[I(i,j),:] = [i*delta1 + origin[0], j*delta2 + origin[1]]
#Construct 2 triangles per rectangular element and assign tags to boundary
#Calculate number of triangles
Nt = 2*m*n
elements = num.zeros( (Nt,3), int)
boundary = {}
Idgl = []
Idfl = []
Idgr = []
Idfr = []
full_send_dict = {}
ghost_recv_dict = {}
nt = -1
for i in range(m):
for j in range(n):
i1 = I(i,j+1)
i2 = I(i,j)
i3 = I(i+1,j+1)
i4 = I(i+1,j)
#Lower Element
nt = E(i,j)
if i == 0:
Idgl.append(nt)
if i == 1:
Idfl.append(nt)
if i == m-2:
Idfr.append(nt)
if i == m-1:
Idgr.append(nt)
if i == m-1:
if myid == numprocs-1:
boundary[nt, 2] = 'right'
else:
boundary[nt, 2] = 'ghost'
if j == 0:
boundary[nt, 1] = 'bottom'
elements[nt,:] = [i4,i3,i2]
#Upper Element
nt = E(i,j)+1
if i == 0:
Idgl.append(nt)
if i == 1:
Idfl.append(nt)
if i == m-2:
Idfr.append(nt)
if i == m-1:
Idgr.append(nt)
if i == 0:
if myid == 0:
boundary[nt, 2] = 'left'
else:
boundary[nt, 2] = 'ghost'
if j == n-1:
boundary[nt, 1] = 'top'
elements[nt,:] = [i1,i2,i3]
if numprocs==1:
Idfl.extend(Idfr)
Idgr.extend(Idgl)
#print Idfl
#print Idgr
Idfl = num.array(Idfl,int)
Idgr = num.array(Idgr,int)
#print Idfl
#print Idgr
full_send_dict[myid] = [Idfl, Idfl]
ghost_recv_dict[myid] = [Idgr, Idgr]
elif numprocs == 2:
Idfl.extend(Idfr)
Idgr.extend(Idgl)
Idfl = num.array(Idfl,int)
Idgr = num.array(Idgr,int)
full_send_dict[(myid-1)%numprocs] = [Idfl, Idfl]
ghost_recv_dict[(myid-1)%numprocs] = [Idgr, Idgr]
else:
Idfl = num.array(Idfl,int)
Idgl = num.array(Idgl,int)
Idfr = num.array(Idfr,int)
Idgr = num.array(Idgr,int)
full_send_dict[(myid-1)%numprocs] = [Idfl, Idfl]
ghost_recv_dict[(myid-1)%numprocs] = [Idgl, Idgl]
full_send_dict[(myid+1)%numprocs] = [Idfr, Idfr]
ghost_recv_dict[(myid+1)%numprocs] = [Idgr, Idgr]
#print full_send_dict
#print ghost_recv_dict
return points, elements, boundary, full_send_dict, ghost_recv_dict
def rectangular_periodic(m, n, len1=1.0, len2=1.0, origin = (0.0, 0.0)):
"""Setup a rectangular grid of triangles
with m+1 by n+1 grid points
and side lengths len1, len2. If side lengths are omitted
the mesh defaults to the unit square.
len1: x direction (left to right)
len2: y direction (bottom to top)
Return to lists: points and elements suitable for creating a Mesh or
FVMesh object, e.g. Mesh(points, elements)
"""
delta1 = float(len1)/m
delta2 = float(len2)/n
#Calculate number of points
Np = (m+1)*(n+1)
class VIndex(object):
def __init__(self, n,m):
self.n = n
self.m = m
def __call__(self, i,j):
return j+i*(self.n+1)
class EIndex(object):
def __init__(self, n,m):
self.n = n
self.m = m
def __call__(self, i,j):
return 2*(j+i*self.n)
I = VIndex(n,m)
E = EIndex(n,m)
points = num.zeros( (Np,2), float)
for i in range(m+1):
for j in range(n+1):
points[I(i,j),:] = [i*delta1 + origin[0], j*delta2 + origin[1]]
#Construct 2 triangles per rectangular element and assign tags to boundary
#Calculate number of triangles
Nt = 2*m*n
elements = num.zeros( (Nt,3), int)
boundary = {}
ghosts = {}
nt = -1
for i in range(m):
for j in range(n):
i1 = I(i,j+1)
i2 = I(i,j)
i3 = I(i+1,j+1)
i4 = I(i+1,j)
#Lower Element
nt = E(i,j)
if i == m-1:
ghosts[nt] = E(1,j)
if i == 0:
ghosts[nt] = E(m-2,j)
if j == n-1:
ghosts[nt] = E(i,1)
if j == 0:
ghosts[nt] = E(i,n-2)
if i == m-1:
if myid == numprocs-1:
boundary[nt, 2] = 'right'
else:
boundary[nt, 2] = 'ghost'
if j == 0:
boundary[nt, 1] = 'bottom'
elements[nt,:] = [i4,i3,i2]
#Upper Element
nt = E(i,j)+1
if i == m-1:
ghosts[nt] = E(1,j)+1
if i == 0:
ghosts[nt] = E(m-2,j)+1
if j == n-1:
ghosts[nt] = E(i,1)+1
if j == 0:
ghosts[nt] = E(i,n-2)+1
if i == 0:
if myid == 0:
boundary[nt, 2] = 'left'
else:
boundary[nt, 2] = 'ghost'
if j == n-1:
boundary[nt, 1] = 'top'
elements[nt,:] = [i1,i2,i3]
#bottom left
nt = E(0,0)
nf = E(m-2,n-2)
ghosts[nt] = nf
ghosts[nt+1] = nf+1
#bottom right
nt = E(m-1,0)
nf = E(1,n-2)
ghosts[nt] = nf
ghosts[nt+1] = nf+1
#top left
nt = E(0,n-1)
nf = E(m-2,1)
ghosts[nt] = nf
ghosts[nt+1] = nf+1
#top right
nt = E(m-1,n-1)
nf = E(1,1)
ghosts[nt] = nf
ghosts[nt+1] = nf+1
return points, elements, boundary, ghosts
def rectangular_periodic_lr(m, n, len1=1.0, len2=1.0, origin = (0.0, 0.0)):
"""Setup a rectangular grid of triangles
with m+1 by n+1 grid points
and side lengths len1, len2. If side lengths are omitted
the mesh defaults to the unit square.
len1: x direction (left to right)
len2: y direction (bottom to top)
Return to lists: points and elements suitable for creating a Mesh or
Domain object, e.g. Mesh(points, elements)
"""
delta1 = float(len1)/m
delta2 = float(len2)/n
#Calculate number of points
Np = (m+1)*(n+1)
class VIndex(object):
def __init__(self, n,m):
self.n = n
self.m = m
def __call__(self, i,j):
return j+i*(self.n+1)
class EIndex(object):
def __init__(self, n,m):
self.n = n
self.m = m
def __call__(self, i,j):
return 2*(j+i*self.n)
I = VIndex(n,m)
E = EIndex(n,m)
points = num.zeros( (Np,2), float)
for i in range(m+1):
for j in range(n+1):
points[I(i,j),:] = [i*delta1 + origin[0], j*delta2 + origin[1]]
#Construct 2 triangles per rectangular element and assign tags to boundary
#Calculate number of triangles
Nt = 2*m*n
elements = num.zeros( (Nt,3), int)
boundary = {}
ghosts = {}
nt = -1
for i in range(m):
for j in range(n):
i1 = I(i,j+1)
i2 = I(i,j)
i3 = I(i+1,j+1)
i4 = I(i+1,j)
#Lower Element
nt = E(i,j)
if i == m-1:
ghosts[nt] = E(1,j)
if i == 0:
ghosts[nt] = E(m-2,j)
if i == m-1:
if myid == numprocs-1:
boundary[nt, 2] = 'right'
else:
boundary[nt, 2] = 'ghost'
if j == 0:
boundary[nt, 1] = 'bottom'
elements[nt,:] = [i4,i3,i2]
#Upper Element
nt = E(i,j)+1
if i == m-1:
ghosts[nt] = E(1,j)+1
if i == 0:
ghosts[nt] = E(m-2,j)+1
if i == 0:
if myid == 0:
boundary[nt, 2] = 'left'
else:
boundary[nt, 2] = 'ghost'
if j == n-1:
boundary[nt, 1] = 'top'
elements[nt,:] = [i1,i2,i3]
return points, elements, boundary, ghosts
| 23.398664 | 80 | 0.489054 |
ace53ed20a97f08894eb176a2621130de6ebc5cc | 1,981 | py | Python | crowd_anki/history/archiver_vendor.py | lukas-mertens/CrowdAnki | 2961f9c073bc799f192235414408907632fc8211 | [
"MIT"
] | null | null | null | crowd_anki/history/archiver_vendor.py | lukas-mertens/CrowdAnki | 2961f9c073bc799f192235414408907632fc8211 | [
"MIT"
] | null | null | null | crowd_anki/history/archiver_vendor.py | lukas-mertens/CrowdAnki | 2961f9c073bc799f192235414408907632fc8211 | [
"MIT"
] | null | null | null | from dataclasses import field, dataclass
from pathlib import Path
from typing import Any
from .anki_deck_archiver import AnkiDeckArchiver
from .archiver import AllDeckArchiver
from .dulwich_repo import DulwichAnkiRepo
from ..anki.adapters.deck_manager import AnkiStaticDeckManager, DeckManager
from ..anki.ui.utils import progress_indicator
from ..export.anki_exporter import AnkiJsonExporter
from ..utils.notifier import Notifier, AnkiUiNotifier
from ..config.config_settings import ConfigSettings
@dataclass
class ArchiverVendor:
window: Any
config: ConfigSettings
notifier: Notifier = field(default_factory=AnkiUiNotifier)
@property
def deck_manager(self) -> DeckManager:
return AnkiStaticDeckManager(self.window.col.decks)
def all_deck_archiver(self):
return AllDeckArchiver(
self.deck_manager,
lambda deck: AnkiDeckArchiver(deck,
self.snapshot_path().joinpath(self.window.pm.name),
AnkiJsonExporter(self.window.col, self.config),
DulwichAnkiRepo))
def snapshot_path(self):
return Path(self.config.snapshot_path)
def do_manual_snapshot(self):
self.do_snapshot('CrowdAnki: Manual snapshot')
def snapshot_on_sync(self):
if self.config.automated_snapshot:
self.do_snapshot('CrowdAnki: Snapshot on sync')
def do_snapshot(self, reason):
with progress_indicator(self.window, 'Taking CrowdAnki snapshot of all decks'):
self.all_deck_archiver().archive(overrides=self.overrides(),
reason=reason)
self.notifier.info("Snapshot successful",
f"The CrowdAnki snapshot to {self.snapshot_path().resolve()} successfully completed")
def overrides(self):
return self.deck_manager.for_names(self.config.snapshot_root_decks)
| 38.096154 | 116 | 0.676931 |
ace53f9d0933b4e990cb0829696d53f4d1586bb3 | 2,410 | py | Python | api/accounts/models.py | ucpr/onlinejudge | 472b4671dc8fde8bd2f2b139ce61bc52e8137fcc | [
"MIT"
] | null | null | null | api/accounts/models.py | ucpr/onlinejudge | 472b4671dc8fde8bd2f2b139ce61bc52e8137fcc | [
"MIT"
] | null | null | null | api/accounts/models.py | ucpr/onlinejudge | 472b4671dc8fde8bd2f2b139ce61bc52e8137fcc | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, _user_has_perm
from django.core import validators
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
# Create your models here.
class AccountManager(BaseUserManager):
def create_user(self, request_data, **kwargs):
now = timezone.now()
if not request_data['email']:
raise ValueError('Users must have an email address')
user = self.model(
username=request_data['username'],
email=self.normalize_email(request_data['email']),
is_active=True,
last_login=now,
date_joined=now,
)
user.set_password(request_data['password'])
user.save(using=self._db)
return user
def create_superuser(self, username, email, password, **extra_fields):
request_data = {
'username': username,
'email': email,
'password': password
}
user = self.create_user(request_data)
user.is_active = True
user.is_staff = True
user.is_admin = True
# user.is_superuser = True
user.save(using=self._db)
return user
class Account(AbstractBaseUser):
username = models.CharField(('username'), max_length=30, unique=True)
first_name = models.CharField(('first name'), max_length=30, blank=True)
last_name = models.CharField(('last name'), max_length=30, blank=True)
email = models.EmailField(verbose_name='email address', max_length=255, unique=True)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
date_joined = models.DateTimeField(('date joined'), default=timezone.now)
objects = AccountManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
def user_has_perm(user, perm, obj):
return _user_has_perm(user, perm, obj)
def has_perm(self, perm, obj=None):
return _user_has_perm(self, perm, obj=obj)
def has_module_perms(self, app_label):
return self.is_admin
def get_short_name(self):
return self.first_name
@property
def is_superuser(self):
return self.is_admin
class Meta:
db_table = 'api_user'
swappable = 'AUTH_USER_MODEL'
| 31.710526 | 88 | 0.6639 |
ace540924b45ae8eca1549a35b400ab40da41774 | 2,789 | py | Python | tests/adspygoogle/adwords/oauth2_from_file_integration_test.py | cherry-wb/googleads-python-lib | 24a1ecb7c1cca5af3624a3b03ebaa7f5147b4a04 | [
"Apache-2.0"
] | null | null | null | tests/adspygoogle/adwords/oauth2_from_file_integration_test.py | cherry-wb/googleads-python-lib | 24a1ecb7c1cca5af3624a3b03ebaa7f5147b4a04 | [
"Apache-2.0"
] | null | null | null | tests/adspygoogle/adwords/oauth2_from_file_integration_test.py | cherry-wb/googleads-python-lib | 24a1ecb7c1cca5af3624a3b03ebaa7f5147b4a04 | [
"Apache-2.0"
] | 2 | 2020-04-02T19:00:31.000Z | 2020-08-06T03:28:38.000Z | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration test for the AdWords API library using cached OAuth2 values."""
__author__ = 'api.jdilallo@gmail.com (Joseph DiLallo)'
import os
import pickle
import sys
import tempfile
import unittest
sys.path.insert(0, os.path.join('..', '..', '..'))
from adspygoogle.adwords.AdWordsClient import AdWordsClient
from adspygoogle.common import Utils
# Values used for the user agent, developer token, client customer ID, and
# OAuth2 credentials in our test code.
USER_AGENT = 'oauth2_from_file_integration_test'
DEVELOPER_TOKEN = 'INSERT_DEVELOPER_TOKEN_HERE'
CLIENT_CUSTOMER_ID = 'INSERT_CLIENT_CUSTOMER_ID_HERE'
CLIENT_ID = 'INSERT_CLIENT_ID_HERE'
CLIENT_SECRET = 'INSERT_CLIENT_SECRET_HERE'
REFRESH_TOKEN = 'INSERT_REFRESH_TOKEN_HERE'
class AdWordsIntegrationTest(unittest.TestCase):
"""Tests end-to-end usage of the AdWords library."""
def testRequestWithOAuth2FromFile(self):
"""Tests making a request against AdWords using cached OAuth2 values."""
path = tempfile.mkdtemp()
auth_credentials = {
'clientCustomerId': CLIENT_CUSTOMER_ID,
'developerToken': DEVELOPER_TOKEN,
'userAgent': USER_AGENT,
'clientId': CLIENT_ID,
'clientSecret': CLIENT_SECRET,
'refreshToken': REFRESH_TOKEN
}
with open(os.path.join(path, 'adwords_api_auth.pkl'), 'w') as handle:
pickle.dump(auth_credentials, handle)
handle.close()
with open(os.path.join(path, 'adwords_api_config.pkl'), 'w') as handle:
pickle.dump({}, handle)
handle.close()
budget = {
'name': 'Interplanetary budget #%s' % Utils.GetUniqueName(),
'amount': {
'microAmount': '50000000'
},
'deliveryMethod': 'STANDARD',
'period': 'DAILY'
}
budget_operations = [{
'operator': 'ADD',
'operand': budget
}]
client = AdWordsClient(path=path)
budget_service = client.GetBudgetService()
response = budget_service.Mutate(budget_operations)[0]
self.assertEqual('BudgetReturnValue', response.get('ListReturnValue_Type'))
self.assertEqual(1, len(response.get('value', [])))
if __name__ == '__main__':
unittest.main()
| 31.693182 | 79 | 0.708856 |
ace541646bb335f8bc9f1309aa39abf82c82d8e6 | 3,818 | py | Python | tests/job/test_jobs.py | CPWstatic/nebula | 4d6da3aac0b9aa3db1eaf7251ef5bd2700ef7af0 | [
"Apache-2.0"
] | 816 | 2020-08-17T09:51:45.000Z | 2022-03-31T11:04:38.000Z | tests/job/test_jobs.py | zzl200012/nebula-graph | c08b248c69d7db40c0ba9011d4429083cf539bbd | [
"Apache-2.0"
] | 615 | 2020-08-18T01:26:52.000Z | 2022-02-18T08:19:54.000Z | tests/job/test_jobs.py | zzl200012/nebula-graph | c08b248c69d7db40c0ba9011d4429083cf539bbd | [
"Apache-2.0"
] | 147 | 2020-08-17T09:40:52.000Z | 2022-03-15T06:21:27.000Z | # --coding:utf-8--
#
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
import re
import time
from nebula2.common import ttypes
from tests.common.nebula_test_suite import NebulaTestSuite
class TestJobs(NebulaTestSuite):
def test_failed(self):
# submit without space
resp = self.client.execute('SUBMIT JOB COMPACT;')
self.check_resp_failed(resp, ttypes.ErrorCode.E_SEMANTIC_ERROR)
# show one not exists
resp = self.client.execute('SHOW JOB 233;')
self.check_resp_failed(resp, ttypes.ErrorCode.E_EXECUTION_ERROR)
# stop one not exists
resp = self.client.execute('STOP JOB 233;')
self.check_resp_failed(resp, ttypes.ErrorCode.E_EXECUTION_ERROR)
def test_succeeded(self):
def check_jobs_resp_obj(resp_row, job_name):
assert resp_row[1].as_string() == job_name
assert resp_row[2].is_string()
assert resp_row[3].is_datetime()
assert resp_row[4].is_datetime()
resp = self.client.execute('CREATE SPACE IF NOT EXISTS space_for_jobs(partition_num=9, replica_factor=1, vid_type=FIXED_STRING(20));'
'USE space_for_jobs;')
self.check_resp_succeeded(resp)
resp = self.client.execute('SUBMIT JOB COMPACT;')
self.check_resp_succeeded(resp)
expect_col_names = ['New Job Id']
self.check_column_names(resp, expect_col_names)
expect_values = [[re.compile(r'\d+')]]
self.check_result(resp, expect_values, is_regex=True)
time.sleep(1)
resp = self.client.execute('SUBMIT JOB FLUSH;')
self.check_resp_succeeded(resp)
expect_col_names = ['New Job Id']
self.check_column_names(resp, expect_col_names)
expect_values = [[re.compile(r'\d+')]]
self.check_result(resp, expect_values, is_regex=True)
time.sleep(1)
resp = self.client.execute('SUBMIT JOB STATS;')
self.check_resp_succeeded(resp)
expect_col_names = ['New Job Id']
self.check_column_names(resp, expect_col_names)
expect_values = [[re.compile(r'\d+')]]
self.check_result(resp, expect_values, is_regex=True)
time.sleep(10)
resp = self.client.execute('SHOW JOBS;')
self.check_resp_succeeded(resp)
expect_col_names = ['Job Id', 'Command', 'Status', 'Start Time', 'Stop Time']
self.check_column_names(resp, expect_col_names)
check_jobs_resp_obj(resp.row_values(0), 'STATS')
check_jobs_resp_obj(resp.row_values(1), 'FLUSH')
check_jobs_resp_obj(resp.row_values(2), 'COMPACT')
job_id = resp.row_values(0)[0].as_int()
resp = self.client.execute('SHOW JOB {};'.format(job_id))
self.check_resp_succeeded(resp)
expect_col_names = ['Job Id(TaskId)', 'Command(Dest)', 'Status', 'Start Time', 'Stop Time']
check_jobs_resp_obj(resp.row_values(0), 'STATS')
job_id = resp.row_values(0)[0].as_int()
stop_job_resp = self.client.execute('STOP JOB {};'.format(job_id))
if resp.row_values(0)[2].as_string() == "FINISHED":
# Executin error if the job is finished
self.check_resp_failed(stop_job_resp, ttypes.ErrorCode.E_EXECUTION_ERROR)
else:
self.check_resp_succeeded(stop_job_resp)
# This is skipped becuase it is hard to simulate the situation
# resp = self.client.execute('RECOVER JOB;')
# self.check_resp_succeeded(resp)
# expect_col_names = ['Recovered job num']
# self.check_column_names(resp, expect_col_names)
# expect_values = [[0]]
# self.check_result(resp, expect_values)
| 42.422222 | 141 | 0.657936 |
ace5419c29241d66e4c1a2d0e51ce5919c875020 | 3,294 | py | Python | backend/animal_adoption/apps/account/tests/validator_test.py | PedroHenriqueDevBR/aplicacao-para-adocao-de-animais | 041f041ebbc86147b0192734fd9651fb317fbe36 | [
"MIT"
] | 3 | 2021-08-31T13:27:17.000Z | 2021-09-06T12:04:03.000Z | backend/animal_adoption/apps/account/tests/validator_test.py | PedroHenriqueDevBR/carteira-de-vacinacao-animal | 50cd7b7f53ff0409768e182474d8ffabdb6b9ce0 | [
"MIT"
] | null | null | null | backend/animal_adoption/apps/account/tests/validator_test.py | PedroHenriqueDevBR/carteira-de-vacinacao-animal | 50cd7b7f53ff0409768e182474d8ffabdb6b9ce0 | [
"MIT"
] | null | null | null | from django.contrib.auth.models import User
from django.test import TestCase
from apps.core.models import City, State
from apps.account.validators.user_validators import (
person_register_is_valid_or_errors,
person_update_is_valid_or_errors,
)
class ValidatorTestCase(TestCase):
def setUp(self) -> None:
state = State.objects.create(name="Estado")
City.objects.create(name="cidade", state=state)
def test_person_validator_should_errrors_caused_no_data(self):
person = {}
self.assertEqual(len(person_register_is_valid_or_errors(person)), 5)
def test_person_validator_errrors(self):
person = {
"name": "Pe",
"username": "Pers",
"password": "passw12",
"contact": "1234567",
"city": 2,
}
self.assertEqual(len(person_register_is_valid_or_errors(person)), 5)
def test_person_validator_should_be_valid(self):
person = {
"name": "Per",
"username": "Perso",
"password": "passw123",
"contact": "123456789",
"city": 1,
}
self.assertEqual(len(person_register_is_valid_or_errors(person)), 0)
def test_username_in_use_error(self):
User.objects.create(first_name="person", username="person", password="passw123")
person = {
"name": "Per",
"username": "person",
"password": "passw123",
"contact": "123456789",
"city": 1,
}
self.assertEqual(len(person_register_is_valid_or_errors(person)), 1)
class UpdateValidatorTestCase(TestCase):
def setUp(self) -> None:
state = State.objects.create(name="Estado")
City.objects.create(name="cidade", state=state)
def test_update_validator_should_be_valid_with_no_data(self):
person = {}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 0)
def test_person_validator_errrors(self):
person = {
"name": "Pe",
"password": "passw12",
"contact": "1234567",
"city": 10,
}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 4)
def test_person_validator_should_be_valid(self):
person = {
"name": "Per",
"password": "passw123",
"contact": "123456789",
"city": 1,
}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 0)
def test_person_validator_should_be_valid_name_only(self):
person = {
"name": "Per",
}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 0)
def test_person_validator_should_be_valid_password_only(self):
person = {
"password": "passw123",
}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 0)
def test_person_validator_should_be_valid_contact_only(self):
person = {
"contact": "123456789",
}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 0)
def test_person_validator_should_be_valid_city_only(self):
person = {
"city": 1,
}
self.assertEqual(len(person_update_is_valid_or_errors(person)), 0)
| 32.613861 | 88 | 0.619004 |
ace54338bd96fcfbae293b87eeed8c72e77865c0 | 48 | py | Python | python/hello-world/hello_world.py | sudopluto/exercism | d18f637c6ab0c857ea8623dacb47e73af74dfa96 | [
"MIT"
] | null | null | null | python/hello-world/hello_world.py | sudopluto/exercism | d18f637c6ab0c857ea8623dacb47e73af74dfa96 | [
"MIT"
] | null | null | null | python/hello-world/hello_world.py | sudopluto/exercism | d18f637c6ab0c857ea8623dacb47e73af74dfa96 | [
"MIT"
] | null | null | null | def hello(name=''):
return("Hello, World!")
| 16 | 27 | 0.583333 |
ace545356daa4575fd841ba7425354e6705ace43 | 171 | py | Python | problem0632.py | kmarcini/Project-Euler-Python | d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3 | [
"BSD-3-Clause"
] | null | null | null | problem0632.py | kmarcini/Project-Euler-Python | d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3 | [
"BSD-3-Clause"
] | null | null | null | problem0632.py | kmarcini/Project-Euler-Python | d644e8e1ec4fac70a9ab407ad5e1f0a75547c8d3 | [
"BSD-3-Clause"
] | null | null | null | ###########################
#
# #632 Square prime factors - Project Euler
# https://projecteuler.net/problem=632
#
# Code by Kevin Marciniak
#
###########################
| 19 | 43 | 0.48538 |
ace545bfb0e3b9a938e4d2ccac83d5410c5797e7 | 2,792 | py | Python | aliyun-python-sdk-polardb/aliyunsdkpolardb/request/v20170801/DescribeAutoRenewAttributeRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 1,001 | 2015-07-24T01:32:41.000Z | 2022-03-25T01:28:18.000Z | aliyun-python-sdk-polardb/aliyunsdkpolardb/request/v20170801/DescribeAutoRenewAttributeRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 363 | 2015-10-20T03:15:00.000Z | 2022-03-08T12:26:19.000Z | aliyun-python-sdk-polardb/aliyunsdkpolardb/request/v20170801/DescribeAutoRenewAttributeRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 682 | 2015-09-22T07:19:02.000Z | 2022-03-22T09:51:46.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkpolardb.endpoint import endpoint_data
class DescribeAutoRenewAttributeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'polardb', '2017-08-01', 'DescribeAutoRenewAttribute','polardb')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_DBClusterIds(self):
return self.get_query_params().get('DBClusterIds')
def set_DBClusterIds(self,DBClusterIds):
self.add_query_param('DBClusterIds',DBClusterIds) | 34.9 | 93 | 0.775072 |
ace545c24c56f6a867d7e000fff49d5876dd2ef6 | 487 | py | Python | final_project/machinetranslation/tests/tests.py | parsae80/xzceb-flask_eng_fr | 90f1d2a45c971ec04b64a526e5928be8175651be | [
"Apache-2.0"
] | null | null | null | final_project/machinetranslation/tests/tests.py | parsae80/xzceb-flask_eng_fr | 90f1d2a45c971ec04b64a526e5928be8175651be | [
"Apache-2.0"
] | null | null | null | final_project/machinetranslation/tests/tests.py | parsae80/xzceb-flask_eng_fr | 90f1d2a45c971ec04b64a526e5928be8175651be | [
"Apache-2.0"
] | null | null | null | import unittest
from translator import english_to_french, french_to_english
class TestEnglish(unittest.TestCase):
def test1(self):
self.assertNotEqual(english_to_french('Helo'),'Hello')
self.assertEqual(english_to_french('Hello'),'Bonjour')
class TestFrench(unittest.TestCase):
def test1(self):
self.assertNotEqual(french_to_english('Bonjour'),'Bonjour')
self.assertEqual(french_to_english('Bonjour'),'Hello')
unittest.main() | 28.647059 | 67 | 0.714579 |
ace545f6f0c76a82e46368f05f39c932d4abd10b | 14,327 | py | Python | SafeCV/MCTS.py | matthewwicker/SafeCV | 22eafb8c61003c290fdab1ca42eab4eac675575c | [
"MIT"
] | 12 | 2017-10-29T10:30:38.000Z | 2021-06-30T08:38:08.000Z | SafeCV/MCTS.py | matthewwicker/SafeCV | 22eafb8c61003c290fdab1ca42eab4eac675575c | [
"MIT"
] | 3 | 2018-08-01T08:43:29.000Z | 2019-12-10T12:39:30.000Z | SafeCV/MCTS.py | matthewwicker/SafeCV | 22eafb8c61003c290fdab1ca42eab4eac675575c | [
"MIT"
] | 5 | 2017-11-22T03:00:01.000Z | 2019-12-10T12:46:23.000Z | import cv2
import numpy as np
from pomegranate import *
import numpy as np
import copy
from copy import deepcopy
import DFMCS
from DFMCS import DFMCS
from DFMCS import DFMCS_Parameters
import math
def RUN_UCB(keypoint_distribution, plays_per_node, TOTAL_PLAYS):
retval = []
for i in range(len(keypoint_distribution)):
retval.append(keypoint_distribution[i] + math.sqrt(log(plays_per_node[i])/TOTAL_PLAYS))
retval = np.asarray(retval)
return retval/sum(retval)
class TreeNode(object):
visited = False
num_visits = 1
visits_per_node = []
lst = []
dst = []
lvl = []
id_num = 0
def __init__(self, lst, dst, lvl, id_num, params):
if(id_num == -1):
self.kp_list = None
self.kp_dist = None
self.level = -1
self.id = -1
""" Creates an node object with a di"""
self.kp_list = lst
self.kp_dist = dst
self.level = lvl
self.id = id_num
self.visits_per_node = np.ones(len(lst))
self.params = params
def selection(self):
""" Returns a selection from the list of keypoints"""
val = np.random.choice(range(len(self.kp_list)), p=self.kp_dist)
self.visits_per_node[val]+=1
return val
def exploration(self):
""" Returns a keypoint based on the UCB"""
ucb = RUN_UCB(self.kp_dist, self.visits_per_node, self.num_visits)
return np.random.choice(range(len(self.kp_list)), p=ucb)
def visit_helper(self, k):
""" Returns a tuple x,y that coresponds
to the coords which we will manipulate"""
mu_x, mu_y, sigma = int(round(k.pt[0])), int(round(k.pt[1])), k.size
# Remember, it may be wise to expand simga - greater varience = less honed attack
sigma += self.params.SIGMA_CONSTANT
d_x = NormalDistribution(mu_x, sigma)
d_y = NormalDistribution(mu_y, sigma)
x = d_x.sample()
y = d_y.sample()
if(self.params.small_image):
x/=self.params.inflation_constant
y/=self.params.inflation_constant
if(x >= self.params.X_SHAPE):
x = self.params.X_SHAPE-1
elif(x < 0):
x = 0
if(y >= self.params.Y_SHAPE):
y = self.params.Y_SHAPE-1
elif(y < 0):
y = 0
return int(x), int(y)
def visit(self, im, vc, manip_list):
for i in range(vc):
attempts = 0
while(True):
if(attempts == 5):
break
x, y = self.visit_helper(self.kp_list[self.id])
try:
if(((x,y) not in manip_list) and (list(self.params.MANIP(im[y][x], 3)) != list(im[y][x]))):
manip_list.append((x,y))
im[y][x] = self.params.MANIP(im[y][x], 3)
attempts = 0
break
else:
attempts +=1
except:
if(((x,y) not in manip_list) and ((self.params.MANIP(im[y][x], 3)) != (im[y][x]))):
manip_list.append((x,y))
im[y][x] = self.params.MANIP(im[y][x], 3)
attempts = 0
break
else:
attempts +=1
return im, manip_list
def visit_random(self, im, vc):
val = np.random.choice(range(len(self.kp_list)), p=self.kp_dist)
for i in range(vc):
x, y = self.visit_helper(self.kp_list[val])
im[y][x] = MANIP(im[y][x], 3)
return im
def backprop(self, index, reward, severity):
""" Updates the distribution based upon the
reward passed in"""
#severity /=10
self.kp_dist[index] += (float(reward)/severity)
if(self.kp_dist[index] < 0):
self.kp_dist[index] = 0
self.kp_dist = self.kp_dist/sum(self.kp_dist)
def white_manipulation(val, dim):
return [255, 255, 255]
class MCTS_Parameters(object):
def __init__(self, image, true_class, model, predshape = (1,224, 224, 3)):
self.model = model
self.ORIGINAL_IMAGE = copy.deepcopy(image)
self.TRUE_CLASS = true_class
self.MANIP = white_manipulation
self.VISIT_CONSTANT = 100
self.SIGMA_CONSTANT = 15
self.X_SHAPE = 224
self.Y_SHAPE = 224
self.predshape = predshape
self.kp, self.des, self.r = [],[],[]
self.verbose = False
self.small_image = False
self.inflation_constant = 15
self.simulations_cutoff = 10
def preproc(im):
im_pred = im.reshape(self.predshape)
im_pred = im_pred.astype('float')
return im_pred
self.preprocess = preproc
def predi(im):
im_pred = self.preprocess(im)
prob = self.model.predict(im_pred, batch_size=1, verbose=0)
pred = np.argmax(np.asarray(prob))
return pred, prob
self.predict = predi
pred, prob = self.predict(image)
self.PROBABILITY = max(max(prob))
self.backtracking_constant = 10
def SIFT_Filtered(image, parameters, threshold=0.00):
# We need to expand the image to get good keypoints
if(parameters.small_image):
xs = parameters.X_SHAPE * parameters.inflation_constant;
ys = parameters.Y_SHAPE * parameters.inflation_constant;
image = cv2.resize(image, (xs,ys))
sift = cv2.xfeatures2d.SIFT_create()
kp, des = sift.detectAndCompute(image,None)
#FILTER RESPONSES:
responses = []
for x in kp:
responses.append(x.response)
responses.sort()
ret = []
index_tracker = 0
for x in kp:
if(x.response >= threshold):
ret.append((x, des[index_tracker], x.response))
index_tracker = index_tracker + 1
retval = sorted(ret, key=lambda tup: tup[2])
return zip(*retval)
def MCTS(params):
params.kp, params.des, params.r = SIFT_Filtered(params.ORIGINAL_IMAGE, params)
params.r = np.asarray(params.r)
params.r = params.r/sum(params.r)
root = TreeNode(params.kp, params.r, 0, 0, params)
levels = [[root]]
current_level = 0
node = root
manipulation = []
visited = [root]
severities_over_time = []
IMAGE = copy.deepcopy(params.ORIGINAL_IMAGE)
MISCLASSIFIED = False
raw_searches = params.simulations_cutoff
count_searches = 0
min_severity = -1
best_image = None
severities_over_time = []
raw_severities = []
avg_severities = []
count_prior_saturation = 0
while(True):
#print(count_searches)
if(count_searches == raw_searches):
break
count_searches += 1
explored = False
nxt = node.exploration()
# First, we need to make sure that the layer we are going to initialize even exists
try:
test = levels[current_level+1]
except:
# If the layer does not exist, then we create the layers
levels.append([TreeNode(params.kp, params.r, -1, -1, params) for i in range(len(params.kp))])
if(params.verbose == True):
print("Exploring new keypoints on a new layer: %s on node: %s"%(current_level+1, nxt))
#Initialize the new node in the tree
levels[current_level+1][nxt] = TreeNode(params.kp, params.r, current_level+1, nxt, params)
IMAGE, manipulation = levels[current_level+1][nxt].visit(IMAGE, params.VISIT_CONSTANT, manipulation)
visited.append(levels[current_level+1][nxt])
#Visit the new node
pred, prob = params.predict(IMAGE)
NEW_PROBABILITY = prob[0][pred]
if(pred != int(params.TRUE_CLASS)):
MISCLASSIFIED = True
break
#Generate a DFS Adversarial example
prior_severity = len(manipulation)
if(prior_severity > min_severity):
count_prior_saturation +=1
if(MISCLASSIFIED == True):
print("Satisfied before simulation")
adv = copy.deepcopy(IMAGE)
softmax = copy.deepcopy(prob)
severity = prior_severity
else:
dparams = DFMCS_Parameters(params, IMAGE)
adv, softmax, severity, kpd = DFMCS(dparams, cutoff=min_severity)
if(severity != -1):
severity += prior_severity
elif(severity == -1 and count_searches == 1):
break
if((severity < min_severity or min_severity == -1) and severity != -1):
severities_over_time.append(severity)
min_severity = severity
best_image = copy.deepcopy(adv)
else:
severities_over_time.append(min_severity)
if(severity != -1):
raw_severities.append(severity)
else:
raw_severities.append(min_severity)
avg_severities.append(np.average(raw_severities[-10:]))
if(params.verbose == True):
print("Back propogating and restarting search. Current Severity: %s"%(severity))
print("Best severity: %s"%(min_severity))
print("=================================================================\n")
#Backprop
for i in range(len(visited)):
if(i == (len(visited)-1)):
break
else:
visited[i].backprop(visited[i+1].id, params.PROBABILITY - NEW_PROBABILITY, current_level+1)
IMAGE = copy.deepcopy(params.ORIGINAL_IMAGE)
current_level = 0
visited = [root]
manipulations = []
node = root
explored = True
if(not explored):
if( not (levels[current_level+1][nxt].id == -1)):
pass
else:
if(params.verbose == True):
print("Exploring new keypoints on an existing layer: %s on node: %s"%(current_level+1, nxt))
#Initialize the new node in the tree
levels[current_level+1][nxt] = TreeNode(params.kp, params.r, current_level+1, nxt, params)
IMAGE, manipulation = levels[current_level+1][nxt].visit(IMAGE, params.VISIT_CONSTANT, manipulation)
visited.append(levels[current_level+1][nxt])
pred, prob = params.predict(IMAGE)
NEW_PROBABILITY = prob[0][pred]
if(pred != int(params.TRUE_CLASS)):
MISCLASSIFIED = True
break
#Generate a DFS Adversarial example
prior_severity = (current_level+1)*params.VISIT_CONSTANT
if(prior_severity > min_severity):
count_prior_saturation +=1
if(MISCLASSIFIED == True):
adv = copy.deepcopy(IMAGE)
softmax = copy.deepcopy(prob)
severity = prior_severity
else:
dparams = DFMCS_Parameters(params, IMAGE)
adv, softmax, severity, kpd = DFMCS(dparams, cutoff=min_severity)
if(severity != -1):
severity += prior_severity
if((severity < min_severity or min_severity == -1) and severity != -1):
severities_over_time.append(severity)
min_severity = severity
best_image = copy.deepcopy(adv)
else:
severities_over_time.append(min_severity)
if(severity != -1):
raw_severities.append(severity)
else:
raw_severities.append(min_severity)
avg_severities.append(np.average(raw_severities[-10:]))
if(params.verbose == True):
print("Back propogating and restarting search. Current Severity: %s"%(severity))
print("Best severity: %s"%(min_severity))
print("=================================================================\n")
for i in range(len(visited)):
if(i == (len(visited)-1)):
break
else:
visited[i].backprop(visited[i+1].id, params.PROBABILITY - NEW_PROBABILITY, current_level+1)
IMAGE = copy.deepcopy(params.ORIGINAL_IMAGE)
current_level = 0
visited = [root]
manipulations = []
node = root
explored = True
if(not explored):
if(params.verbose == True):
print("manipulating and continuing the search: %s"%(nxt))
# Visit this node
count_searches -= 1
IMAGE, manipulation = levels[current_level+1][nxt].visit(IMAGE, params.VISIT_CONSTANT, manipulation)
# Predict image class
pred, prob = params.predict(IMAGE)
NEW_PROBABILITY = prob[0][pred]
if(pred != int(params.TRUE_CLASS)):
MISCLASSIFIED = True
break
visited.append(node)
node = levels[current_level+1][nxt]
current_level = current_level + 1
if(count_prior_saturation >= float(len(params.kp))):
break
if best_image is not None:
pred, prob = params.predict(IMAGE)
NEW_PROBABILITY = prob[0][pred]
else:
pred, prob = params.predict(IMAGE)
NEW_PROBABILITY = prob[0][pred]
best_image = params.ORIGINAL_IMAGE
min_severity = 0
stats = (severities_over_time, raw_severities, avg_severities)
return best_image, min_severity, prob, stats
| 37.702632 | 116 | 0.531584 |
ace5464ca32f173a7bf94bfd12fe529de7012304 | 1,529 | py | Python | QWeb/internal/alert.py | kivipe/qweb | abf5881aa67412e4a243b13a59528a3c80aa2f52 | [
"Apache-2.0"
] | 33 | 2021-03-16T12:26:44.000Z | 2022-03-30T17:44:57.000Z | QWeb/internal/alert.py | kivipe/qweb | abf5881aa67412e4a243b13a59528a3c80aa2f52 | [
"Apache-2.0"
] | 24 | 2021-03-18T16:21:37.000Z | 2022-03-24T17:52:14.000Z | QWeb/internal/alert.py | kivipe/qweb | abf5881aa67412e4a243b13a59528a3c80aa2f52 | [
"Apache-2.0"
] | 13 | 2021-03-24T17:48:50.000Z | 2022-02-25T03:22:01.000Z | # -*- coding: utf-8 -*-
# --------------------------
# Copyright © 2014 - Qentinel Group.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ---------------------------
from QWeb.internal.exceptions import QWebDriverError
from QWeb.internal import browser, decorators
@decorators.timeout_decorator_for_actions
def close_alert(alert, action): # pylint: disable=unused-argument
if action.upper() == 'ACCEPT':
alert.accept()
elif action.upper() == 'DISMISS':
alert.dismiss()
elif action.upper() == 'NOTHING':
return
else:
raise QWebDriverError(
"Invalid alert action '{}'. Must be ACCEPT, DISMISS or LEAVE".
format(action))
@decorators.timeout_decorator_for_actions
def wait_alert(timeout): # pylint: disable=unused-argument
driver = browser.get_current_browser()
return driver.switch_to.alert
@decorators.timeout_decorator_for_actions
def type_alert(alert, text, timeout): # pylint: disable=unused-argument
alert.send_keys(text)
| 33.977778 | 74 | 0.691956 |
ace54745d5a2ead53c745bdda6d733b8ff0098b5 | 1,139 | py | Python | setup.py | rionagreally/SingleWebPageApp | 5b6bd393ab08a1b2a778e7bd7ac31f583e13217b | [
"MIT"
] | null | null | null | setup.py | rionagreally/SingleWebPageApp | 5b6bd393ab08a1b2a778e7bd7ac31f583e13217b | [
"MIT"
] | null | null | null | setup.py | rionagreally/SingleWebPageApp | 5b6bd393ab08a1b2a778e7bd7ac31f583e13217b | [
"MIT"
] | null | null | null | #Ríona Greally - G00325504
#3rd Yr Software Development, GMIT
import couchdb
import time
couch = couchdb.Server()
couch = couchdb.Server('http://127.0.0.1:5984/')
#creating the database called 'stories'
db = couch.create('stories')
#formatting Date and Time
currDate = time.strftime("%c")
#creating Documents in stories
doc ={'story': 'This is my First Story', 'Date': currDate}
doc1 ={'story': 'Three Irishmen, Paddy, Sean and Seamus, were stumbling home from the pub late one night and found themselves on the road which led past the old graveyard."Come have a look over here," says Paddy, "Its Michael O Gradys grave, God bless his soul. He lived to the ripe old age of 87." "That is nothing," says Sean, "here is one named Patrick O Toole, it says here that he was 95 when he died." Just then, Seamus yells out, "Good God, heres a fella that got to be 145!" "What was his name?" asks Paddy. Seamus stumbles around a bit, awkwardly lights a match to see what else is written on the stone marker and exclaims: "Miles, from Dublin." - Timmy K, Kerry', 'Date': currDate}
#saving the documents to database
db.save(doc)
db.save(doc1) | 49.521739 | 691 | 0.738367 |
ace547dc76927a439eefe656ae440276ae68a4a3 | 3,409 | py | Python | cfgov/jobmanager/tests/models/test_panels.py | adebisi-aden/consumerfinance.gov | 8c0f5afac341823c59f73b0c6bd60592e0f5eaca | [
"CC0-1.0"
] | 37 | 2020-08-18T19:52:39.000Z | 2022-03-23T08:08:41.000Z | cfgov/jobmanager/tests/models/test_panels.py | adebisi-aden/consumerfinance.gov | 8c0f5afac341823c59f73b0c6bd60592e0f5eaca | [
"CC0-1.0"
] | 338 | 2020-08-14T20:46:36.000Z | 2022-03-31T20:49:32.000Z | cfgov/jobmanager/tests/models/test_panels.py | adebisi-aden/consumerfinance.gov | 8c0f5afac341823c59f73b0c6bd60592e0f5eaca | [
"CC0-1.0"
] | 14 | 2020-10-21T15:27:03.000Z | 2022-03-17T03:16:36.000Z | import unittest
from unittest.mock import Mock
from django.core.exceptions import ValidationError
from django.test import TestCase
from wagtail.core.models import Locale, Page
from model_bakery import baker
from jobmanager.models.django import ApplicantType, Grade
from jobmanager.models.pages import JobListingPage
from jobmanager.models.panels import (
EmailApplicationLink, GradePanel, USAJobsApplicationLink
)
class GradePanelTests(unittest.TestCase):
def test_str(self):
grade = Grade(grade='53', salary_min=1, salary_max=100)
self.assertEqual(
str(GradePanel(grade=grade, job_listing_id=123)),
'53'
)
class ApplicationLinkTestCaseMixin(object):
link_cls = None
@classmethod
def setUpClass(cls):
super(ApplicationLinkTestCaseMixin, cls).setUpClass()
cls.root = Page.objects.get(slug='root')
def setUp(self):
locale = Locale.objects.get(pk=1)
self.job_listing = baker.prepare(
JobListingPage, description='foo', locale=locale)
self.job_listing.full_clean = Mock(return_value=None)
self.root.add_child(instance=self.job_listing)
def check_clean(self, **kwargs):
link = self.link_cls(job_listing=self.job_listing, **kwargs)
link.full_clean()
def test_no_inputs_fails_validation(self):
with self.assertRaises(ValidationError):
self.check_clean()
class USAJobsApplicationLinkTestCase(ApplicationLinkTestCaseMixin, TestCase):
link_cls = USAJobsApplicationLink
def setUp(self):
super(USAJobsApplicationLinkTestCase, self).setUp()
self.applicant_type = baker.make(ApplicantType)
def test_all_fields_passes_validation(self):
self.check_clean(
announcement_number='abc123',
url='http://www.xyz',
applicant_type=self.applicant_type
)
def test_requires_url(self):
with self.assertRaises(ValidationError):
self.check_clean(
announcement_number='abc123',
url='this-is-not-a-url',
applicant_type=self.applicant_type
)
class EmailApplicationLinkTestCase(ApplicationLinkTestCaseMixin, TestCase):
link_cls = EmailApplicationLink
def test_all_fields_passes_validation(self):
self.check_clean(
address='user@example.com',
label='Heading',
description='Description'
)
def test_requires_address(self):
with self.assertRaises(ValidationError):
self.check_clean(
address='this-is-not-an-email-address',
label='Heading',
description='Description'
)
def test_description_optional(self):
self.check_clean(
address='user@example.com',
label='Heading'
)
def test_mailto_link(self):
job = baker.prepare(
JobListingPage,
title='This is a page title!',
description='This is a page description'
)
address = 'user@example.com'
link = EmailApplicationLink(address=address, job_listing=job)
self.assertEqual(
link.mailto_link,
'mailto:{}?subject=Application for Position: {}'.format(
address,
'This%20is%20a%20page%20title%21'
)
)
| 29.643478 | 77 | 0.645937 |
ace54819a67ea561ea22f882cd3a8d40607285a1 | 1,555 | py | Python | time_series_comparison/tests.py | BCCN-Prog/analysis_2018 | e29d18bb621d83c3dd3e9f7dbe481bd0fea6f616 | [
"BSD-3-Clause"
] | null | null | null | time_series_comparison/tests.py | BCCN-Prog/analysis_2018 | e29d18bb621d83c3dd3e9f7dbe481bd0fea6f616 | [
"BSD-3-Clause"
] | null | null | null | time_series_comparison/tests.py | BCCN-Prog/analysis_2018 | e29d18bb621d83c3dd3e9f7dbe481bd0fea6f616 | [
"BSD-3-Clause"
] | 4 | 2018-04-24T09:07:23.000Z | 2018-07-10T08:57:28.000Z | import AN_comp_utilities as utils
import numpy as np
# import traj_dist.distance as tdist
# create random weathor conditions to scale parameters in methods according to
# what is subjectively assumed a "good prediction"
true = np.random.normal(1.5,2,1000)
true[true<0] = 0
prediction = true + np.random.normal(0,.5,1000)
prediction[prediction<0] = 0
# not realistic prediction data
p = 0.5
rain = np.random.choice(a=[False, True], size=1000, p=[p, 1-p])
prob_rain_sham = p*100 + np.random.normal(0,2,1000)
prob_rain_sham[prob_rain_sham<0] = 0
prob_rain_sham[prob_rain_sham>100] = 100
# more realistic
prob_rain = rain*100 + np.random.normal(0,15,1000)
prob_rain[prob_rain<0] = 0
prob_rain[prob_rain>100] = 100
# load and use real data
real = 0
pred = 0
data_type = 'temperature'
days_ahead = 1
measure, value, differences, per = utils.compare_time_series(prediction,true,days_ahead,utils.variance,data_type)
print('Variance')
print(measure)
print(value)
measure, value, differences, per = utils.compare_time_series(prediction,true,days_ahead,utils.norm1,data_type)
print('Norm1')
print(measure)
print(value)
measure, value, differences, per = utils.compare_time_series(prediction,true,days_ahead,utils.outlier,data_type)
print('Outlier')
print(measure)
print(value)
measure, value, differences, per = utils.compare_time_series(prob_rain,rain,1,utils.cross_entropy,'prob_rain')
print('Cross-entropy')
print(measure)
print(value)
utils.fit_distr(differences,data_type = 'temperature',fit_with='norm')
utils.plot_histograms_rain(prob_rain,rain)
| 28.272727 | 113 | 0.775563 |
ace54864908a11616a5296b656dfdee3ded68f0c | 2,518 | py | Python | tests/utils/readme_generator.py | monkeydevtools/treepath-python | 56f6cbf662f8a4c13f0c9e753a839fc9f6323dba | [
"Apache-2.0"
] | 2 | 2021-05-26T08:26:25.000Z | 2021-09-24T21:26:01.000Z | tests/utils/readme_generator.py | monkeydevtools/treepath-python | 56f6cbf662f8a4c13f0c9e753a839fc9f6323dba | [
"Apache-2.0"
] | null | null | null | tests/utils/readme_generator.py | monkeydevtools/treepath-python | 56f6cbf662f8a4c13f0c9e753a839fc9f6323dba | [
"Apache-2.0"
] | null | null | null | import inspect
import os
import textwrap
class Readme:
def __init__(self, readme_file: str):
self._readme_file = open(readme_file, 'w')
def append(self, data):
self._readme_file.write(data)
def append_doc(self, data):
dedent_data = textwrap.dedent(data)
self.append(f"{dedent_data}{os.linesep}")
def append_python_src(self, python_src):
dedent_python_src = textwrap.dedent(python_src)
self.append(f"```python{os.linesep}{dedent_python_src}```{os.linesep}")
@staticmethod
def extract_doc_string(python_entity):
doc_string = python_entity.__doc__
return doc_string
@staticmethod
def extract_python_src(python_entity):
doc_string = python_entity.__doc__
source = inspect.getsource(python_entity)
index_of_doc = source.index(doc_string)
source_start = index_of_doc + len(doc_string) + 3
python_src = source[source_start:]
return python_src
def process_python_src(self, python_src: str):
dedent_python_src = textwrap.dedent(python_src)
lines_itr = iter(dedent_python_src.splitlines(keepends=True))
line = next(lines_itr)
self.process_python_src_segment(line, lines_itr)
def process_python_src_segment(self, line, lines_itr):
buffer = line
for line in lines_itr:
if not line.startswith('#'):
buffer += line
else:
if not buffer.isspace():
self.append_python_src(buffer)
self.process_comment_segment(line, lines_itr)
return
self.append_python_src(buffer)
def process_comment_segment(self, line, lines_itr):
buffer = line[1:]
for line in lines_itr:
if line.startswith('#'):
buffer += line[1:]
elif not line.strip():
buffer += line
else:
self.append_doc(buffer)
self.process_python_src_segment(line, lines_itr)
return
self.append_doc(buffer)
def append_function(self, function):
doc_string = self.extract_doc_string(function)
self.append_doc(doc_string)
python_src = self.extract_python_src(function)
self.process_python_src(python_src)
return function
def __iadd__(self, p2):
dedent_txt = textwrap.dedent(p2)
self.append(dedent_txt)
self.append(os.linesep)
return self
| 29.97619 | 79 | 0.627879 |
ace549e7f0a9a15b642ca96c4986e3a86ba9d853 | 1,142 | py | Python | app/taskmanager-db.py | codezeeker/task-mgmt-service | c9212b5e93f89ae964e1514976742082d52be34b | [
"MIT"
] | null | null | null | app/taskmanager-db.py | codezeeker/task-mgmt-service | c9212b5e93f89ae964e1514976742082d52be34b | [
"MIT"
] | 19 | 2020-11-01T15:13:17.000Z | 2020-12-11T15:03:14.000Z | app/taskmanager-db.py | codezeeker/task-mgmt-service | c9212b5e93f89ae964e1514976742082d52be34b | [
"MIT"
] | 1 | 2020-11-02T15:19:57.000Z | 2020-11-02T15:19:57.000Z | import sqlite3
from sqlite3 import Error
def create_connection():
""" create a database connection to a SQLite database """
conn = None
try:
# conn = sqlite3.connect(':memory:')
conn = sqlite3.connect('mydatabase.db')
curs = conn.cursor()
curs.execute("CREATE TABLE IF NOT EXISTS todo (item TEXT)")
list_item = input("add the task: ")
curs.execute("INSERT INTO todo(item) VALUES(?)", [list_item])
curs.execute("COMMIT")
except Error as e:
print(e)
finally:
if conn:
conn.close()
def retrieve_rows():
conn = None
try:
conn = sqlite3.connect('mydatabase.db')
# conn = sqlite3.connect(':memory:')
curs = conn.cursor()
curs.execute("SELECT * from todo")
rows = curs.fetchall()
print(rows)
except Error as e:
print(e)
finally:
if conn:
conn.close()
# Main loop
cont = "y"
while cont == "y":
create_connection()
cont = input("\nDo you want to continue[y/n]: ")
if cont == "n":
print("Here is your list:")
retrieve_rows() | 25.954545 | 69 | 0.564799 |
ace54ab69ec3bd70f9a5483f08a1482c3019f040 | 4,356 | py | Python | dictfire/fire/bing.py | HeywoodKing/dictcmd | bce97846b7d377588abee017901e4ccd1c1b8d69 | [
"MIT"
] | 1 | 2020-06-03T06:41:56.000Z | 2020-06-03T06:41:56.000Z | dictfire/fire/bing.py | HeywoodKing/dictcmd | bce97846b7d377588abee017901e4ccd1c1b8d69 | [
"MIT"
] | null | null | null | dictfire/fire/bing.py | HeywoodKing/dictcmd | bce97846b7d377588abee017901e4ccd1c1b8d69 | [
"MIT"
] | 1 | 2020-06-03T06:41:59.000Z | 2020-06-03T06:41:59.000Z | # !/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
dictfire
@File : bing.py
@Time : 2020/3/16 19:33
@Author : hywell
@Email : opencoding@hotmail.com
@ide : PyCharm
@project : dictfire
@description : Chinese/English Translation
@homepage : https://github.com/HeywoodKing/dictfire.git
@license : MIT, see LICENSE for more details.
@copyright : Copyright (c) 2020 hywell. All rights reserved
"""
from __future__ import absolute_import, unicode_literals
import re
# import asyncio
# import aiohttp
from urllib.parse import quote
import requests
from dictfire.setting import *
class Bing:
"""
必应翻译服务
"""
def __init__(self):
self.src = None
self.url = BING_URL
self.header = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9,en-GB;q=0.8,en;q=0.7",
# "Cache-Control": "max-age=0",
# "Connection": "keep-alive",
"Host": "fanyi.youdao.com",
# "Upgrade-Insecure-Requests": "1",
"User-Agent": UA.random,
}
# self.session = aiohttp.ClientSession(headers=self.header)
def _parse(self, content):
"""
解析内容
"""
code = content['errorCode']
try:
src = content['translateResult'][0][0]['src'] # source
if code == 0: # Success
tgt = content['translateResult'][0][0]['tgt'] # result
msg = '获取成功'
elif code == 20:
# print('WORD TO LONG')
tgt = None
msg = 'WORD TO LONG'
elif code == 30:
# print('TRANSLATE ERROR')
tgt = None
msg = 'TRANSLATE ERROR'
elif code == 40:
# print('DON\'T SUPPORT THIS LANGUAGE')
tgt = None
msg = 'DON\'T SUPPORT THIS LANGUAGE'
elif code == 50:
# print('KEY FAILED')
tgt = None
msg = 'KEY FAILED'
elif code == 60:
# print('DON\'T HAVE THIS WORD')
tgt = None
msg = 'DON\'T HAVE THIS WORD'
else:
# print('UNKOWN')
tgt = None
msg = 'UNKOWN'
except Exception as ex:
code = -1
src = self.src
tgt = None
msg = ex
return {
"code": code,
"type": content['type'],
"src": src,
"tgt": tgt,
"msg": msg
}
def _request(self, url=None, text=None):
"""
请求远程api服务
"""
try:
if url is None:
url = self.url
if text is not None:
url = url + quote(text.encode('utf-8'))
# async with self.session.get(self.url) as resp:
# content = await resp.json(encoding='utf8')
resp = requests.get(url)
content = resp.json(encoding='utf8')
code = 0
else:
code = 1
content = 'Usage: dict fire'
except Exception as ex:
code = -1
content = 'ERROR: Network or remote service error! {}'.format(ex)
return {
"code": code,
"content": content
}
def translate(self, text):
"""
根据输入内容翻译并返回翻译结果
:param text:
:return:
"""
try:
self.src = text
resp = self._request(YOUDAO_URL, text)
if resp['code'] == 0:
result = self._parse(resp['content'])
else:
result = {
"code": resp['code'],
"type": None,
"src": text,
"tgt": text,
"msg": resp['content']
}
return result
except Exception as ex:
raise Exception('ERROR: remote service error! {}'.format(ex))
def main():
Bing().translate("hello world")
if __name__ == '__main__':
main()
| 28.470588 | 143 | 0.459596 |
ace54b0937a662233646069765c6007f82c83593 | 494 | py | Python | profiles_api/urls.py | Tejkumar-G/Profile-rest-api | 984e607b20b3eb96e246adc6c83f26bf7f4f3bbf | [
"MIT"
] | null | null | null | profiles_api/urls.py | Tejkumar-G/Profile-rest-api | 984e607b20b3eb96e246adc6c83f26bf7f4f3bbf | [
"MIT"
] | 6 | 2020-06-06T01:51:01.000Z | 2022-02-10T11:41:40.000Z | profiles_api/urls.py | Tejkumar-G/Profile-rest-api | 984e607b20b3eb96e246adc6c83f26bf7f4f3bbf | [
"MIT"
] | null | null | null | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from profiles_api import views
router = DefaultRouter()
router.register('hello-viewset', views.HelloViewSet, base_name='hello-viewset')
router.register('profile', views.UserProfileViewSet)
router.register('feed', views.UserProfileFeedViewSet)
urlpatterns = [
path('hello-view/', views.HelloApiView.as_view()),
path('login/', views.UserLoginApiView.as_view()),
path('', include(router.urls)),
]
| 35.285714 | 79 | 0.769231 |
ace54bf54065084355cc2a4c588deebb7fd71312 | 132,461 | py | Python | tests/examples/minlplib/crudeoil_pooling_ct3.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | 2 | 2021-07-03T13:19:10.000Z | 2022-02-06T10:48:13.000Z | tests/examples/minlplib/crudeoil_pooling_ct3.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | 1 | 2021-07-04T14:52:14.000Z | 2021-07-15T10:17:11.000Z | tests/examples/minlplib/crudeoil_pooling_ct3.py | ouyang-w-19/decogo | 52546480e49776251d4d27856e18a46f40c824a1 | [
"MIT"
] | null | null | null | # MINLP written by GAMS Convert at 04/21/18 13:51:21
#
# Equation counts
# Total E G L N X C B
# 1199 480 371 348 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 730 602 128 0 0 0 0 0
# FX 2 2 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 4845 4376 469 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.b1 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b2 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b3 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b4 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b5 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b6 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b7 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b8 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b9 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b10 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b11 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b12 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b13 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b14 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b15 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b16 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b17 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b18 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b19 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b20 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b21 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b22 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b23 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b24 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b25 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b26 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b27 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b28 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b29 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b30 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b31 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b32 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b33 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b34 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b35 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b36 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b37 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b38 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b39 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b40 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b41 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b42 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b43 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b44 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b45 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b46 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b47 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b48 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b49 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b50 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b51 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b52 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b53 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b54 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b55 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b56 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b57 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b58 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b59 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b60 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b61 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b62 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b63 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b64 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b65 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b66 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b67 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b68 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b69 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b70 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b71 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b72 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b73 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b74 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b75 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b76 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b77 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b78 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b79 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b80 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b81 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b82 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b83 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b84 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b85 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b86 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b87 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b88 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b89 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b90 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b91 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b92 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b93 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b94 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b95 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b96 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b97 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b98 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b99 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b100 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b101 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b102 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b103 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b104 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b105 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b106 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b107 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b108 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b109 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b110 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b111 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b112 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b113 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b114 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b115 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b116 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b117 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b118 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b119 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b120 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b121 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b122 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b123 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b124 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b125 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b126 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b127 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b128 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,700),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,700),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,700),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,700),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,700),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,700),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,200),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,500),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,300),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,300),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,300),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,300),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,300),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,300),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x547 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x548 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x549 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x550 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x551 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x552 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x553 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x554 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x555 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x556 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x557 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x558 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x559 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x560 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x561 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x562 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x563 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x564 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x565 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x566 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x567 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x568 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x569 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x570 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x571 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x572 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x573 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x574 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x575 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x576 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x577 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x578 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x579 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x580 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x581 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x582 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x583 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x584 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x585 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x586 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x587 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x588 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x596 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x597 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x598 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x599 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x600 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x601 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x602 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x603 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x604 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x605 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x606 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x608 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x610 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x612 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x614 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x616 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x618 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x620 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x622 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x625 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x628 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x631 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x634 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x637 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x640 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x643 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x646 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x649 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x651 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x653 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x655 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x657 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x659 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x661 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x663 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x665 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,1),initialize=0)
m.x667 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x668 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x669 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x670 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x671 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x672 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x673 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x674 = Var(within=Reals,bounds=(12,12),initialize=12)
m.x675 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x676 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x677 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x678 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x679 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x680 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x681 = Var(within=Reals,bounds=(0,12),initialize=0)
m.x682 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x683 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x684 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x685 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x686 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x687 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x688 = Var(within=Reals,bounds=(100,600),initialize=100)
m.x689 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x690 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x691 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x692 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x693 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x694 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x695 = Var(within=Reals,bounds=(100,600),initialize=100)
m.x696 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x697 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x698 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x699 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x700 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x701 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x702 = Var(within=Reals,bounds=(100,600),initialize=100)
m.x703 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x704 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x705 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x706 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x707 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x708 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x709 = Var(within=Reals,bounds=(150,650),initialize=150)
m.x710 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x711 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x712 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x713 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x714 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x715 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x716 = Var(within=Reals,bounds=(250,750),initialize=250)
m.x717 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x718 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x719 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x720 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x721 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x722 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x723 = Var(within=Reals,bounds=(150,650),initialize=150)
m.x724 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x725 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x726 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x727 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x728 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.x729 = Var(within=Reals,bounds=(0,1000),initialize=0)
m.obj = Objective(expr=4*m.x675*m.x688 + 4*m.x676*m.x689 + 4*m.x677*m.x690 + 4*m.x678*m.x691 + 4*m.x679*m.x692 + 4*
m.x680*m.x693 + 4*m.x681*m.x694 + 4*m.x675*m.x695 + 4*m.x676*m.x696 + 4*m.x677*m.x697 + 4*m.x678*
m.x698 + 4*m.x679*m.x699 + 4*m.x680*m.x700 + 4*m.x681*m.x701 + 4*m.x675*m.x702 + 4*m.x676*m.x703
+ 4*m.x677*m.x704 + 4*m.x678*m.x705 + 4*m.x679*m.x706 + 4*m.x680*m.x707 + 4*m.x681*m.x708 + 8*
m.x675*m.x709 + 8*m.x676*m.x710 + 8*m.x677*m.x711 + 8*m.x678*m.x712 + 8*m.x679*m.x713 + 8*m.x680*
m.x714 + 8*m.x681*m.x715 + 8*m.x675*m.x716 + 8*m.x676*m.x717 + 8*m.x677*m.x718 + 8*m.x678*m.x719
+ 8*m.x679*m.x720 + 8*m.x680*m.x721 + 8*m.x681*m.x722 + 8*m.x675*m.x723 + 8*m.x676*m.x724 + 8*
m.x677*m.x725 + 8*m.x678*m.x726 + 8*m.x679*m.x727 + 8*m.x680*m.x728 + 8*m.x681*m.x729
+ 50000*m.x141 + 50000*m.x142 + 5000*m.x682 + 5000*m.x683 + 5000*m.x684 + 10000*m.x685
+ 10000*m.x686 + 10000*m.x687 - 100000, sense=minimize)
m.c2 = Constraint(expr= m.x143 == 500)
m.c3 = Constraint(expr= - m.x143 + m.x144 + m.x335 == 0)
m.c4 = Constraint(expr= - m.x144 + m.x145 + m.x336 == 0)
m.c5 = Constraint(expr= - m.x145 + m.x146 + m.x337 == 0)
m.c6 = Constraint(expr= - m.x146 + m.x147 + m.x338 == 0)
m.c7 = Constraint(expr= - m.x147 + m.x148 + m.x339 == 0)
m.c8 = Constraint(expr= - m.x148 + m.x149 + m.x340 == 0)
m.c9 = Constraint(expr= - m.x149 + m.x150 + m.x341 == 0)
m.c10 = Constraint(expr= m.x151 == 0)
m.c11 = Constraint(expr= - m.x151 + m.x152 - m.x335 + m.x342 + m.x349 == 0)
m.c12 = Constraint(expr= - m.x152 + m.x153 - m.x336 + m.x343 + m.x350 == 0)
m.c13 = Constraint(expr= - m.x153 + m.x154 - m.x337 + m.x344 + m.x351 == 0)
m.c14 = Constraint(expr= - m.x154 + m.x155 - m.x338 + m.x345 + m.x352 == 0)
m.c15 = Constraint(expr= - m.x155 + m.x156 - m.x339 + m.x346 + m.x353 == 0)
m.c16 = Constraint(expr= - m.x156 + m.x157 - m.x340 + m.x347 + m.x354 == 0)
m.c17 = Constraint(expr= - m.x157 + m.x158 - m.x341 + m.x348 + m.x355 == 0)
m.c18 = Constraint(expr= m.x159 == 0)
m.c19 = Constraint(expr= - m.x159 + m.x160 - m.x342 + m.x356 == 0)
m.c20 = Constraint(expr= - m.x160 + m.x161 - m.x343 + m.x357 == 0)
m.c21 = Constraint(expr= - m.x161 + m.x162 - m.x344 + m.x358 == 0)
m.c22 = Constraint(expr= - m.x162 + m.x163 - m.x345 + m.x359 == 0)
m.c23 = Constraint(expr= - m.x163 + m.x164 - m.x346 + m.x360 == 0)
m.c24 = Constraint(expr= - m.x164 + m.x165 - m.x347 + m.x361 == 0)
m.c25 = Constraint(expr= - m.x165 + m.x166 - m.x348 + m.x362 == 0)
m.c26 = Constraint(expr= m.x167 == 0)
m.c27 = Constraint(expr= - m.x167 + m.x168 - m.x349 + m.x363 + m.x370 == 0)
m.c28 = Constraint(expr= - m.x168 + m.x169 - m.x350 + m.x364 + m.x371 == 0)
m.c29 = Constraint(expr= - m.x169 + m.x170 - m.x351 + m.x365 + m.x372 == 0)
m.c30 = Constraint(expr= - m.x170 + m.x171 - m.x352 + m.x366 + m.x373 == 0)
m.c31 = Constraint(expr= - m.x171 + m.x172 - m.x353 + m.x367 + m.x374 == 0)
m.c32 = Constraint(expr= - m.x172 + m.x173 - m.x354 + m.x368 + m.x375 == 0)
m.c33 = Constraint(expr= - m.x173 + m.x174 - m.x355 + m.x369 + m.x376 == 0)
m.c34 = Constraint(expr= m.x175 == 500)
m.c35 = Constraint(expr= - m.x175 + m.x176 + m.x377 == 0)
m.c36 = Constraint(expr= - m.x176 + m.x177 + m.x378 == 0)
m.c37 = Constraint(expr= - m.x177 + m.x178 + m.x379 == 0)
m.c38 = Constraint(expr= - m.x178 + m.x179 + m.x380 == 0)
m.c39 = Constraint(expr= - m.x179 + m.x180 + m.x381 == 0)
m.c40 = Constraint(expr= - m.x180 + m.x181 + m.x382 == 0)
m.c41 = Constraint(expr= - m.x181 + m.x182 + m.x383 == 0)
m.c42 = Constraint(expr= m.x183 == 0)
m.c43 = Constraint(expr= - m.x183 + m.x184 - m.x377 + m.x384 + m.x391 == 0)
m.c44 = Constraint(expr= - m.x184 + m.x185 - m.x378 + m.x385 + m.x392 == 0)
m.c45 = Constraint(expr= - m.x185 + m.x186 - m.x379 + m.x386 + m.x393 == 0)
m.c46 = Constraint(expr= - m.x186 + m.x187 - m.x380 + m.x387 + m.x394 == 0)
m.c47 = Constraint(expr= - m.x187 + m.x188 - m.x381 + m.x388 + m.x395 == 0)
m.c48 = Constraint(expr= - m.x188 + m.x189 - m.x382 + m.x389 + m.x396 == 0)
m.c49 = Constraint(expr= - m.x189 + m.x190 - m.x383 + m.x390 + m.x397 == 0)
m.c50 = Constraint(expr= m.x191 == 0)
m.c51 = Constraint(expr= - m.x191 + m.x192 - m.x384 + m.x398 + m.x405 == 0)
m.c52 = Constraint(expr= - m.x192 + m.x193 - m.x385 + m.x399 + m.x406 == 0)
m.c53 = Constraint(expr= - m.x193 + m.x194 - m.x386 + m.x400 + m.x407 == 0)
m.c54 = Constraint(expr= - m.x194 + m.x195 - m.x387 + m.x401 + m.x408 == 0)
m.c55 = Constraint(expr= - m.x195 + m.x196 - m.x388 + m.x402 + m.x409 == 0)
m.c56 = Constraint(expr= - m.x196 + m.x197 - m.x389 + m.x403 + m.x410 == 0)
m.c57 = Constraint(expr= - m.x197 + m.x198 - m.x390 + m.x404 + m.x411 == 0)
m.c58 = Constraint(expr= m.x199 == 0)
m.c59 = Constraint(expr= - m.x199 + m.x200 - m.x391 + m.x412 == 0)
m.c60 = Constraint(expr= - m.x200 + m.x201 - m.x392 + m.x413 == 0)
m.c61 = Constraint(expr= - m.x201 + m.x202 - m.x393 + m.x414 == 0)
m.c62 = Constraint(expr= - m.x202 + m.x203 - m.x394 + m.x415 == 0)
m.c63 = Constraint(expr= - m.x203 + m.x204 - m.x395 + m.x416 == 0)
m.c64 = Constraint(expr= - m.x204 + m.x205 - m.x396 + m.x417 == 0)
m.c65 = Constraint(expr= - m.x205 + m.x206 - m.x397 + m.x418 == 0)
m.c66 = Constraint(expr= m.x207 == 500)
m.c67 = Constraint(expr= - m.x207 + m.x208 + m.x419 == 0)
m.c68 = Constraint(expr= - m.x208 + m.x209 + m.x420 == 0)
m.c69 = Constraint(expr= - m.x209 + m.x210 + m.x421 == 0)
m.c70 = Constraint(expr= - m.x210 + m.x211 + m.x422 == 0)
m.c71 = Constraint(expr= - m.x211 + m.x212 + m.x423 == 0)
m.c72 = Constraint(expr= - m.x212 + m.x213 + m.x424 == 0)
m.c73 = Constraint(expr= - m.x213 + m.x214 + m.x425 == 0)
m.c74 = Constraint(expr= m.x215 == 0)
m.c75 = Constraint(expr= - m.x215 + m.x216 - m.x419 + m.x426 + m.x433 + m.x440 == 0)
m.c76 = Constraint(expr= - m.x216 + m.x217 - m.x420 + m.x427 + m.x434 + m.x441 == 0)
m.c77 = Constraint(expr= - m.x217 + m.x218 - m.x421 + m.x428 + m.x435 + m.x442 == 0)
m.c78 = Constraint(expr= - m.x218 + m.x219 - m.x422 + m.x429 + m.x436 + m.x443 == 0)
m.c79 = Constraint(expr= - m.x219 + m.x220 - m.x423 + m.x430 + m.x437 + m.x444 == 0)
m.c80 = Constraint(expr= - m.x220 + m.x221 - m.x424 + m.x431 + m.x438 + m.x445 == 0)
m.c81 = Constraint(expr= - m.x221 + m.x222 - m.x425 + m.x432 + m.x439 + m.x446 == 0)
m.c82 = Constraint(expr= m.x223 == 0)
m.c83 = Constraint(expr= - m.x223 + m.x224 - m.x426 + m.x447 == 0)
m.c84 = Constraint(expr= - m.x224 + m.x225 - m.x427 + m.x448 == 0)
m.c85 = Constraint(expr= - m.x225 + m.x226 - m.x428 + m.x449 == 0)
m.c86 = Constraint(expr= - m.x226 + m.x227 - m.x429 + m.x450 == 0)
m.c87 = Constraint(expr= - m.x227 + m.x228 - m.x430 + m.x451 == 0)
m.c88 = Constraint(expr= - m.x228 + m.x229 - m.x431 + m.x452 == 0)
m.c89 = Constraint(expr= - m.x229 + m.x230 - m.x432 + m.x453 == 0)
m.c90 = Constraint(expr= m.x231 == 0)
m.c91 = Constraint(expr= - m.x231 + m.x232 - m.x433 + m.x454 + m.x461 == 0)
m.c92 = Constraint(expr= - m.x232 + m.x233 - m.x434 + m.x455 + m.x462 == 0)
m.c93 = Constraint(expr= - m.x233 + m.x234 - m.x435 + m.x456 + m.x463 == 0)
m.c94 = Constraint(expr= - m.x234 + m.x235 - m.x436 + m.x457 + m.x464 == 0)
m.c95 = Constraint(expr= - m.x235 + m.x236 - m.x437 + m.x458 + m.x465 == 0)
m.c96 = Constraint(expr= - m.x236 + m.x237 - m.x438 + m.x459 + m.x466 == 0)
m.c97 = Constraint(expr= - m.x237 + m.x238 - m.x439 + m.x460 + m.x467 == 0)
m.c98 = Constraint(expr= m.x239 == 0)
m.c99 = Constraint(expr= - m.x239 + m.x240 - m.x440 + m.x468 == 0)
m.c100 = Constraint(expr= - m.x240 + m.x241 - m.x441 + m.x469 == 0)
m.c101 = Constraint(expr= - m.x241 + m.x242 - m.x442 + m.x470 == 0)
m.c102 = Constraint(expr= - m.x242 + m.x243 - m.x443 + m.x471 == 0)
m.c103 = Constraint(expr= - m.x243 + m.x244 - m.x444 + m.x472 == 0)
m.c104 = Constraint(expr= - m.x244 + m.x245 - m.x445 + m.x473 == 0)
m.c105 = Constraint(expr= - m.x245 + m.x246 - m.x446 + m.x474 == 0)
m.c106 = Constraint(expr= m.x247 == 200)
m.c107 = Constraint(expr= - m.x247 + m.x248 + m.x475 + m.x482 == 0)
m.c108 = Constraint(expr= - m.x248 + m.x249 + m.x476 + m.x483 == 0)
m.c109 = Constraint(expr= - m.x249 + m.x250 + m.x477 + m.x484 == 0)
m.c110 = Constraint(expr= - m.x250 + m.x251 + m.x478 + m.x485 == 0)
m.c111 = Constraint(expr= - m.x251 + m.x252 + m.x479 + m.x486 == 0)
m.c112 = Constraint(expr= - m.x252 + m.x253 + m.x480 + m.x487 == 0)
m.c113 = Constraint(expr= - m.x253 + m.x254 + m.x481 + m.x488 == 0)
m.c114 = Constraint(expr= m.x255 == 0)
m.c115 = Constraint(expr= - m.x255 + m.x256 - m.x475 + m.x489 == 0)
m.c116 = Constraint(expr= - m.x256 + m.x257 - m.x476 + m.x490 == 0)
m.c117 = Constraint(expr= - m.x257 + m.x258 - m.x477 + m.x491 == 0)
m.c118 = Constraint(expr= - m.x258 + m.x259 - m.x478 + m.x492 == 0)
m.c119 = Constraint(expr= - m.x259 + m.x260 - m.x479 + m.x493 == 0)
m.c120 = Constraint(expr= - m.x260 + m.x261 - m.x480 + m.x494 == 0)
m.c121 = Constraint(expr= - m.x261 + m.x262 - m.x481 + m.x495 == 0)
m.c122 = Constraint(expr= m.x263 == 0)
m.c123 = Constraint(expr= - m.x263 + m.x264 - m.x482 + m.x496 + m.x503 == 0)
m.c124 = Constraint(expr= - m.x264 + m.x265 - m.x483 + m.x497 + m.x504 == 0)
m.c125 = Constraint(expr= - m.x265 + m.x266 - m.x484 + m.x498 + m.x505 == 0)
m.c126 = Constraint(expr= - m.x266 + m.x267 - m.x485 + m.x499 + m.x506 == 0)
m.c127 = Constraint(expr= - m.x267 + m.x268 - m.x486 + m.x500 + m.x507 == 0)
m.c128 = Constraint(expr= - m.x268 + m.x269 - m.x487 + m.x501 + m.x508 == 0)
m.c129 = Constraint(expr= - m.x269 + m.x270 - m.x488 + m.x502 + m.x509 == 0)
m.c130 = Constraint(expr= m.x271 == 200)
m.c131 = Constraint(expr= - m.x271 + m.x272 + m.x510 + m.x517 + m.x524 == 0)
m.c132 = Constraint(expr= - m.x272 + m.x273 + m.x511 + m.x518 + m.x525 == 0)
m.c133 = Constraint(expr= - m.x273 + m.x274 + m.x512 + m.x519 + m.x526 == 0)
m.c134 = Constraint(expr= - m.x274 + m.x275 + m.x513 + m.x520 + m.x527 == 0)
m.c135 = Constraint(expr= - m.x275 + m.x276 + m.x514 + m.x521 + m.x528 == 0)
m.c136 = Constraint(expr= - m.x276 + m.x277 + m.x515 + m.x522 + m.x529 == 0)
m.c137 = Constraint(expr= - m.x277 + m.x278 + m.x516 + m.x523 + m.x530 == 0)
m.c138 = Constraint(expr= m.x279 == 0)
m.c139 = Constraint(expr= - m.x279 + m.x280 - m.x510 + m.x531 == 0)
m.c140 = Constraint(expr= - m.x280 + m.x281 - m.x511 + m.x532 == 0)
m.c141 = Constraint(expr= - m.x281 + m.x282 - m.x512 + m.x533 == 0)
m.c142 = Constraint(expr= - m.x282 + m.x283 - m.x513 + m.x534 == 0)
m.c143 = Constraint(expr= - m.x283 + m.x284 - m.x514 + m.x535 == 0)
m.c144 = Constraint(expr= - m.x284 + m.x285 - m.x515 + m.x536 == 0)
m.c145 = Constraint(expr= - m.x285 + m.x286 - m.x516 + m.x537 == 0)
m.c146 = Constraint(expr= m.x287 == 500)
m.c147 = Constraint(expr= - m.x287 + m.x288 - m.x517 + m.x538 + m.x545 == 0)
m.c148 = Constraint(expr= - m.x288 + m.x289 - m.x518 + m.x539 + m.x546 == 0)
m.c149 = Constraint(expr= - m.x289 + m.x290 - m.x519 + m.x540 + m.x547 == 0)
m.c150 = Constraint(expr= - m.x290 + m.x291 - m.x520 + m.x541 + m.x548 == 0)
m.c151 = Constraint(expr= - m.x291 + m.x292 - m.x521 + m.x542 + m.x549 == 0)
m.c152 = Constraint(expr= - m.x292 + m.x293 - m.x522 + m.x543 + m.x550 == 0)
m.c153 = Constraint(expr= - m.x293 + m.x294 - m.x523 + m.x544 + m.x551 == 0)
m.c154 = Constraint(expr= m.x295 == 0)
m.c155 = Constraint(expr= - m.x295 + m.x296 - m.x524 + m.x552 == 0)
m.c156 = Constraint(expr= - m.x296 + m.x297 - m.x525 + m.x553 == 0)
m.c157 = Constraint(expr= - m.x297 + m.x298 - m.x526 + m.x554 == 0)
m.c158 = Constraint(expr= - m.x298 + m.x299 - m.x527 + m.x555 == 0)
m.c159 = Constraint(expr= - m.x299 + m.x300 - m.x528 + m.x556 == 0)
m.c160 = Constraint(expr= - m.x300 + m.x301 - m.x529 + m.x557 == 0)
m.c161 = Constraint(expr= - m.x301 + m.x302 - m.x530 + m.x558 == 0)
m.c162 = Constraint(expr= m.x303 == 200)
m.c163 = Constraint(expr= - m.x303 + m.x304 + m.x559 + m.x566 == 0)
m.c164 = Constraint(expr= - m.x304 + m.x305 + m.x560 + m.x567 == 0)
m.c165 = Constraint(expr= - m.x305 + m.x306 + m.x561 + m.x568 == 0)
m.c166 = Constraint(expr= - m.x306 + m.x307 + m.x562 + m.x569 == 0)
m.c167 = Constraint(expr= - m.x307 + m.x308 + m.x563 + m.x570 == 0)
m.c168 = Constraint(expr= - m.x308 + m.x309 + m.x564 + m.x571 == 0)
m.c169 = Constraint(expr= - m.x309 + m.x310 + m.x565 + m.x572 == 0)
m.c170 = Constraint(expr= m.x311 == 0)
m.c171 = Constraint(expr= - m.x311 + m.x312 - m.x559 + m.x573 + m.x580 == 0)
m.c172 = Constraint(expr= - m.x312 + m.x313 - m.x560 + m.x574 + m.x581 == 0)
m.c173 = Constraint(expr= - m.x313 + m.x314 - m.x561 + m.x575 + m.x582 == 0)
m.c174 = Constraint(expr= - m.x314 + m.x315 - m.x562 + m.x576 + m.x583 == 0)
m.c175 = Constraint(expr= - m.x315 + m.x316 - m.x563 + m.x577 + m.x584 == 0)
m.c176 = Constraint(expr= - m.x316 + m.x317 - m.x564 + m.x578 + m.x585 == 0)
m.c177 = Constraint(expr= - m.x317 + m.x318 - m.x565 + m.x579 + m.x586 == 0)
m.c178 = Constraint(expr= m.x319 == 300)
m.c179 = Constraint(expr= - m.x319 + m.x320 - m.x566 + m.x587 == 0)
m.c180 = Constraint(expr= - m.x320 + m.x321 - m.x567 + m.x588 == 0)
m.c181 = Constraint(expr= - m.x321 + m.x322 - m.x568 + m.x589 == 0)
m.c182 = Constraint(expr= - m.x322 + m.x323 - m.x569 + m.x590 == 0)
m.c183 = Constraint(expr= - m.x323 + m.x324 - m.x570 + m.x591 == 0)
m.c184 = Constraint(expr= - m.x324 + m.x325 - m.x571 + m.x592 == 0)
m.c185 = Constraint(expr= - m.x325 + m.x326 - m.x572 + m.x593 == 0)
m.c186 = Constraint(expr= m.x327 == 300)
m.c187 = Constraint(expr= - m.x327 + m.x328 + m.x594 == 0)
m.c188 = Constraint(expr= - m.x328 + m.x329 + m.x595 == 0)
m.c189 = Constraint(expr= - m.x329 + m.x330 + m.x596 == 0)
m.c190 = Constraint(expr= - m.x330 + m.x331 + m.x597 == 0)
m.c191 = Constraint(expr= - m.x331 + m.x332 + m.x598 == 0)
m.c192 = Constraint(expr= - m.x332 + m.x333 + m.x599 == 0)
m.c193 = Constraint(expr= - m.x333 + m.x334 + m.x600 == 0)
m.c194 = Constraint(expr= m.x152 + m.x248 <= 1000)
m.c195 = Constraint(expr= m.x153 + m.x249 <= 1000)
m.c196 = Constraint(expr= m.x154 + m.x250 <= 1000)
m.c197 = Constraint(expr= m.x155 + m.x251 <= 1000)
m.c198 = Constraint(expr= m.x156 + m.x252 <= 1000)
m.c199 = Constraint(expr= m.x157 + m.x253 <= 1000)
m.c200 = Constraint(expr= m.x158 + m.x254 <= 1000)
m.c201 = Constraint(expr= m.x216 + m.x272 <= 1000)
m.c202 = Constraint(expr= m.x217 + m.x273 <= 1000)
m.c203 = Constraint(expr= m.x218 + m.x274 <= 1000)
m.c204 = Constraint(expr= m.x219 + m.x275 <= 1000)
m.c205 = Constraint(expr= m.x220 + m.x276 <= 1000)
m.c206 = Constraint(expr= m.x221 + m.x277 <= 1000)
m.c207 = Constraint(expr= m.x222 + m.x278 <= 1000)
m.c208 = Constraint(expr= m.x184 + m.x304 <= 1000)
m.c209 = Constraint(expr= m.x185 + m.x305 <= 1000)
m.c210 = Constraint(expr= m.x186 + m.x306 <= 1000)
m.c211 = Constraint(expr= m.x187 + m.x307 <= 1000)
m.c212 = Constraint(expr= m.x188 + m.x308 <= 1000)
m.c213 = Constraint(expr= m.x189 + m.x309 <= 1000)
m.c214 = Constraint(expr= m.x190 + m.x310 <= 1000)
m.c215 = Constraint(expr= m.x160 + m.x224 + m.x256 + m.x280 + m.x328 <= 1000)
m.c216 = Constraint(expr= m.x161 + m.x225 + m.x257 + m.x281 + m.x329 <= 1000)
m.c217 = Constraint(expr= m.x162 + m.x226 + m.x258 + m.x282 + m.x330 <= 1000)
m.c218 = Constraint(expr= m.x163 + m.x227 + m.x259 + m.x283 + m.x331 <= 1000)
m.c219 = Constraint(expr= m.x164 + m.x228 + m.x260 + m.x284 + m.x332 <= 1000)
m.c220 = Constraint(expr= m.x165 + m.x229 + m.x261 + m.x285 + m.x333 <= 1000)
m.c221 = Constraint(expr= m.x166 + m.x230 + m.x262 + m.x286 + m.x334 <= 1000)
m.c222 = Constraint(expr= m.x168 + m.x192 + m.x232 + m.x264 + m.x288 + m.x312 <= 1000)
m.c223 = Constraint(expr= m.x169 + m.x193 + m.x233 + m.x265 + m.x289 + m.x313 <= 1000)
m.c224 = Constraint(expr= m.x170 + m.x194 + m.x234 + m.x266 + m.x290 + m.x314 <= 1000)
m.c225 = Constraint(expr= m.x171 + m.x195 + m.x235 + m.x267 + m.x291 + m.x315 <= 1000)
m.c226 = Constraint(expr= m.x172 + m.x196 + m.x236 + m.x268 + m.x292 + m.x316 <= 1000)
m.c227 = Constraint(expr= m.x173 + m.x197 + m.x237 + m.x269 + m.x293 + m.x317 <= 1000)
m.c228 = Constraint(expr= m.x174 + m.x198 + m.x238 + m.x270 + m.x294 + m.x318 <= 1000)
m.c229 = Constraint(expr= m.x200 + m.x240 + m.x296 + m.x320 <= 1000)
m.c230 = Constraint(expr= m.x201 + m.x241 + m.x297 + m.x321 <= 1000)
m.c231 = Constraint(expr= m.x202 + m.x242 + m.x298 + m.x322 <= 1000)
m.c232 = Constraint(expr= m.x203 + m.x243 + m.x299 + m.x323 <= 1000)
m.c233 = Constraint(expr= m.x204 + m.x244 + m.x300 + m.x324 <= 1000)
m.c234 = Constraint(expr= m.x205 + m.x245 + m.x301 + m.x325 <= 1000)
m.c235 = Constraint(expr= m.x206 + m.x246 + m.x302 + m.x326 <= 1000)
m.c236 = Constraint(expr= m.x152 + m.x248 >= 0)
m.c237 = Constraint(expr= m.x153 + m.x249 >= 0)
m.c238 = Constraint(expr= m.x154 + m.x250 >= 0)
m.c239 = Constraint(expr= m.x155 + m.x251 >= 0)
m.c240 = Constraint(expr= m.x156 + m.x252 >= 0)
m.c241 = Constraint(expr= m.x157 + m.x253 >= 0)
m.c242 = Constraint(expr= m.x158 + m.x254 >= 0)
m.c243 = Constraint(expr= m.x216 + m.x272 >= 0)
m.c244 = Constraint(expr= m.x217 + m.x273 >= 0)
m.c245 = Constraint(expr= m.x218 + m.x274 >= 0)
m.c246 = Constraint(expr= m.x219 + m.x275 >= 0)
m.c247 = Constraint(expr= m.x220 + m.x276 >= 0)
m.c248 = Constraint(expr= m.x221 + m.x277 >= 0)
m.c249 = Constraint(expr= m.x222 + m.x278 >= 0)
m.c250 = Constraint(expr= m.x184 + m.x304 >= 0)
m.c251 = Constraint(expr= m.x185 + m.x305 >= 0)
m.c252 = Constraint(expr= m.x186 + m.x306 >= 0)
m.c253 = Constraint(expr= m.x187 + m.x307 >= 0)
m.c254 = Constraint(expr= m.x188 + m.x308 >= 0)
m.c255 = Constraint(expr= m.x189 + m.x309 >= 0)
m.c256 = Constraint(expr= m.x190 + m.x310 >= 0)
m.c257 = Constraint(expr= m.x160 + m.x224 + m.x256 + m.x280 + m.x328 >= 0)
m.c258 = Constraint(expr= m.x161 + m.x225 + m.x257 + m.x281 + m.x329 >= 0)
m.c259 = Constraint(expr= m.x162 + m.x226 + m.x258 + m.x282 + m.x330 >= 0)
m.c260 = Constraint(expr= m.x163 + m.x227 + m.x259 + m.x283 + m.x331 >= 0)
m.c261 = Constraint(expr= m.x164 + m.x228 + m.x260 + m.x284 + m.x332 >= 0)
m.c262 = Constraint(expr= m.x165 + m.x229 + m.x261 + m.x285 + m.x333 >= 0)
m.c263 = Constraint(expr= m.x166 + m.x230 + m.x262 + m.x286 + m.x334 >= 0)
m.c264 = Constraint(expr= m.x168 + m.x192 + m.x232 + m.x264 + m.x288 + m.x312 >= 0)
m.c265 = Constraint(expr= m.x169 + m.x193 + m.x233 + m.x265 + m.x289 + m.x313 >= 0)
m.c266 = Constraint(expr= m.x170 + m.x194 + m.x234 + m.x266 + m.x290 + m.x314 >= 0)
m.c267 = Constraint(expr= m.x171 + m.x195 + m.x235 + m.x267 + m.x291 + m.x315 >= 0)
m.c268 = Constraint(expr= m.x172 + m.x196 + m.x236 + m.x268 + m.x292 + m.x316 >= 0)
m.c269 = Constraint(expr= m.x173 + m.x197 + m.x237 + m.x269 + m.x293 + m.x317 >= 0)
m.c270 = Constraint(expr= m.x174 + m.x198 + m.x238 + m.x270 + m.x294 + m.x318 >= 0)
m.c271 = Constraint(expr= m.x200 + m.x240 + m.x296 + m.x320 >= 0)
m.c272 = Constraint(expr= m.x201 + m.x241 + m.x297 + m.x321 >= 0)
m.c273 = Constraint(expr= m.x202 + m.x242 + m.x298 + m.x322 >= 0)
m.c274 = Constraint(expr= m.x203 + m.x243 + m.x299 + m.x323 >= 0)
m.c275 = Constraint(expr= m.x204 + m.x244 + m.x300 + m.x324 >= 0)
m.c276 = Constraint(expr= m.x205 + m.x245 + m.x301 + m.x325 >= 0)
m.c277 = Constraint(expr= m.x206 + m.x246 + m.x302 + m.x326 >= 0)
m.c278 = Constraint(expr= - 0.02*m.x152 - 0.01*m.x248 <= 0)
m.c279 = Constraint(expr= - 0.02*m.x153 - 0.01*m.x249 <= 0)
m.c280 = Constraint(expr= - 0.02*m.x154 - 0.01*m.x250 <= 0)
m.c281 = Constraint(expr= - 0.02*m.x155 - 0.01*m.x251 <= 0)
m.c282 = Constraint(expr= - 0.02*m.x156 - 0.01*m.x252 <= 0)
m.c283 = Constraint(expr= - 0.02*m.x157 - 0.01*m.x253 <= 0)
m.c284 = Constraint(expr= - 0.02*m.x158 - 0.01*m.x254 <= 0)
m.c285 = Constraint(expr= - 0.01*m.x272 <= 0)
m.c286 = Constraint(expr= - 0.01*m.x273 <= 0)
m.c287 = Constraint(expr= - 0.01*m.x274 <= 0)
m.c288 = Constraint(expr= - 0.01*m.x275 <= 0)
m.c289 = Constraint(expr= - 0.01*m.x276 <= 0)
m.c290 = Constraint(expr= - 0.01*m.x277 <= 0)
m.c291 = Constraint(expr= - 0.01*m.x278 <= 0)
m.c292 = Constraint(expr= - 0.00499999999999999*m.x184 - 0.01*m.x304 <= 0)
m.c293 = Constraint(expr= - 0.00499999999999999*m.x185 - 0.01*m.x305 <= 0)
m.c294 = Constraint(expr= - 0.00499999999999999*m.x186 - 0.01*m.x306 <= 0)
m.c295 = Constraint(expr= - 0.00499999999999999*m.x187 - 0.01*m.x307 <= 0)
m.c296 = Constraint(expr= - 0.00499999999999999*m.x188 - 0.01*m.x308 <= 0)
m.c297 = Constraint(expr= - 0.00499999999999999*m.x189 - 0.01*m.x309 <= 0)
m.c298 = Constraint(expr= - 0.00499999999999999*m.x190 - 0.01*m.x310 <= 0)
m.c299 = Constraint(expr= - 0.025*m.x160 + 0.025*m.x224 - 0.015*m.x256 + 0.015*m.x280 - 0.005*m.x328 <= 0)
m.c300 = Constraint(expr= - 0.025*m.x161 + 0.025*m.x225 - 0.015*m.x257 + 0.015*m.x281 - 0.005*m.x329 <= 0)
m.c301 = Constraint(expr= - 0.025*m.x162 + 0.025*m.x226 - 0.015*m.x258 + 0.015*m.x282 - 0.005*m.x330 <= 0)
m.c302 = Constraint(expr= - 0.025*m.x163 + 0.025*m.x227 - 0.015*m.x259 + 0.015*m.x283 - 0.005*m.x331 <= 0)
m.c303 = Constraint(expr= - 0.025*m.x164 + 0.025*m.x228 - 0.015*m.x260 + 0.015*m.x284 - 0.005*m.x332 <= 0)
m.c304 = Constraint(expr= - 0.025*m.x165 + 0.025*m.x229 - 0.015*m.x261 + 0.015*m.x285 - 0.005*m.x333 <= 0)
m.c305 = Constraint(expr= - 0.025*m.x166 + 0.025*m.x230 - 0.015*m.x262 + 0.015*m.x286 - 0.005*m.x334 <= 0)
m.c306 = Constraint(expr= - 0.055*m.x168 + 0.02*m.x192 - 0.005*m.x232 - 0.045*m.x264 - 0.015*m.x288 + 0.015*m.x312 <= 0)
m.c307 = Constraint(expr= - 0.055*m.x169 + 0.02*m.x193 - 0.005*m.x233 - 0.045*m.x265 - 0.015*m.x289 + 0.015*m.x313 <= 0)
m.c308 = Constraint(expr= - 0.055*m.x170 + 0.02*m.x194 - 0.005*m.x234 - 0.045*m.x266 - 0.015*m.x290 + 0.015*m.x314 <= 0)
m.c309 = Constraint(expr= - 0.055*m.x171 + 0.02*m.x195 - 0.005*m.x235 - 0.045*m.x267 - 0.015*m.x291 + 0.015*m.x315 <= 0)
m.c310 = Constraint(expr= - 0.055*m.x172 + 0.02*m.x196 - 0.005*m.x236 - 0.045*m.x268 - 0.015*m.x292 + 0.015*m.x316 <= 0)
m.c311 = Constraint(expr= - 0.055*m.x173 + 0.02*m.x197 - 0.005*m.x237 - 0.045*m.x269 - 0.015*m.x293 + 0.015*m.x317 <= 0)
m.c312 = Constraint(expr= - 0.055*m.x174 + 0.02*m.x198 - 0.005*m.x238 - 0.045*m.x270 - 0.015*m.x294 + 0.015*m.x318 <= 0)
m.c313 = Constraint(expr= - 0.025*m.x240 - 0.035*m.x296 - 0.005*m.x320 <= 0)
m.c314 = Constraint(expr= - 0.025*m.x241 - 0.035*m.x297 - 0.005*m.x321 <= 0)
m.c315 = Constraint(expr= - 0.025*m.x242 - 0.035*m.x298 - 0.005*m.x322 <= 0)
m.c316 = Constraint(expr= - 0.025*m.x243 - 0.035*m.x299 - 0.005*m.x323 <= 0)
m.c317 = Constraint(expr= - 0.025*m.x244 - 0.035*m.x300 - 0.005*m.x324 <= 0)
m.c318 = Constraint(expr= - 0.025*m.x245 - 0.035*m.x301 - 0.005*m.x325 <= 0)
m.c319 = Constraint(expr= - 0.025*m.x246 - 0.035*m.x302 - 0.005*m.x326 <= 0)
m.c320 = Constraint(expr= 0.01*m.x248 >= 0)
m.c321 = Constraint(expr= 0.01*m.x249 >= 0)
m.c322 = Constraint(expr= 0.01*m.x250 >= 0)
m.c323 = Constraint(expr= 0.01*m.x251 >= 0)
m.c324 = Constraint(expr= 0.01*m.x252 >= 0)
m.c325 = Constraint(expr= 0.01*m.x253 >= 0)
m.c326 = Constraint(expr= 0.01*m.x254 >= 0)
m.c327 = Constraint(expr= 0.02*m.x216 + 0.01*m.x272 >= 0)
m.c328 = Constraint(expr= 0.02*m.x217 + 0.01*m.x273 >= 0)
m.c329 = Constraint(expr= 0.02*m.x218 + 0.01*m.x274 >= 0)
m.c330 = Constraint(expr= 0.02*m.x219 + 0.01*m.x275 >= 0)
m.c331 = Constraint(expr= 0.02*m.x220 + 0.01*m.x276 >= 0)
m.c332 = Constraint(expr= 0.02*m.x221 + 0.01*m.x277 >= 0)
m.c333 = Constraint(expr= 0.02*m.x222 + 0.01*m.x278 >= 0)
m.c334 = Constraint(expr= 0.015*m.x184 + 0.01*m.x304 >= 0)
m.c335 = Constraint(expr= 0.015*m.x185 + 0.01*m.x305 >= 0)
m.c336 = Constraint(expr= 0.015*m.x186 + 0.01*m.x306 >= 0)
m.c337 = Constraint(expr= 0.015*m.x187 + 0.01*m.x307 >= 0)
m.c338 = Constraint(expr= 0.015*m.x188 + 0.01*m.x308 >= 0)
m.c339 = Constraint(expr= 0.015*m.x189 + 0.01*m.x309 >= 0)
m.c340 = Constraint(expr= 0.015*m.x190 + 0.01*m.x310 >= 0)
m.c341 = Constraint(expr= - 0.015*m.x160 + 0.035*m.x224 - 0.005*m.x256 + 0.025*m.x280 + 0.005*m.x328 >= 0)
m.c342 = Constraint(expr= - 0.015*m.x161 + 0.035*m.x225 - 0.005*m.x257 + 0.025*m.x281 + 0.005*m.x329 >= 0)
m.c343 = Constraint(expr= - 0.015*m.x162 + 0.035*m.x226 - 0.005*m.x258 + 0.025*m.x282 + 0.005*m.x330 >= 0)
m.c344 = Constraint(expr= - 0.015*m.x163 + 0.035*m.x227 - 0.005*m.x259 + 0.025*m.x283 + 0.005*m.x331 >= 0)
m.c345 = Constraint(expr= - 0.015*m.x164 + 0.035*m.x228 - 0.005*m.x260 + 0.025*m.x284 + 0.005*m.x332 >= 0)
m.c346 = Constraint(expr= - 0.015*m.x165 + 0.035*m.x229 - 0.005*m.x261 + 0.025*m.x285 + 0.005*m.x333 >= 0)
m.c347 = Constraint(expr= - 0.015*m.x166 + 0.035*m.x230 - 0.005*m.x262 + 0.025*m.x286 + 0.005*m.x334 >= 0)
m.c348 = Constraint(expr= - 0.035*m.x168 + 0.04*m.x192 + 0.015*m.x232 - 0.025*m.x264 + 0.005*m.x288 + 0.035*m.x312 >= 0)
m.c349 = Constraint(expr= - 0.035*m.x169 + 0.04*m.x193 + 0.015*m.x233 - 0.025*m.x265 + 0.005*m.x289 + 0.035*m.x313 >= 0)
m.c350 = Constraint(expr= - 0.035*m.x170 + 0.04*m.x194 + 0.015*m.x234 - 0.025*m.x266 + 0.005*m.x290 + 0.035*m.x314 >= 0)
m.c351 = Constraint(expr= - 0.035*m.x171 + 0.04*m.x195 + 0.015*m.x235 - 0.025*m.x267 + 0.005*m.x291 + 0.035*m.x315 >= 0)
m.c352 = Constraint(expr= - 0.035*m.x172 + 0.04*m.x196 + 0.015*m.x236 - 0.025*m.x268 + 0.005*m.x292 + 0.035*m.x316 >= 0)
m.c353 = Constraint(expr= - 0.035*m.x173 + 0.04*m.x197 + 0.015*m.x237 - 0.025*m.x269 + 0.005*m.x293 + 0.035*m.x317 >= 0)
m.c354 = Constraint(expr= - 0.035*m.x174 + 0.04*m.x198 + 0.015*m.x238 - 0.025*m.x270 + 0.005*m.x294 + 0.035*m.x318 >= 0)
m.c355 = Constraint(expr= 0.01*m.x200 - 0.015*m.x240 - 0.025*m.x296 + 0.005*m.x320 >= 0)
m.c356 = Constraint(expr= 0.01*m.x201 - 0.015*m.x241 - 0.025*m.x297 + 0.005*m.x321 >= 0)
m.c357 = Constraint(expr= 0.01*m.x202 - 0.015*m.x242 - 0.025*m.x298 + 0.005*m.x322 >= 0)
m.c358 = Constraint(expr= 0.01*m.x203 - 0.015*m.x243 - 0.025*m.x299 + 0.005*m.x323 >= 0)
m.c359 = Constraint(expr= 0.01*m.x204 - 0.015*m.x244 - 0.025*m.x300 + 0.005*m.x324 >= 0)
m.c360 = Constraint(expr= 0.01*m.x205 - 0.015*m.x245 - 0.025*m.x301 + 0.005*m.x325 >= 0)
m.c361 = Constraint(expr= 0.01*m.x206 - 0.015*m.x246 - 0.025*m.x302 + 0.005*m.x326 >= 0)
m.c362 = Constraint(expr= m.b1 + m.b22 <= 1)
m.c363 = Constraint(expr= m.b2 + m.b23 <= 1)
m.c364 = Constraint(expr= m.b3 + m.b24 <= 1)
m.c365 = Constraint(expr= m.b4 + m.b25 <= 1)
m.c366 = Constraint(expr= m.b5 + m.b26 <= 1)
m.c367 = Constraint(expr= m.b6 + m.b27 <= 1)
m.c368 = Constraint(expr= m.b7 + m.b28 <= 1)
m.c369 = Constraint(expr= m.b1 + m.b29 <= 1)
m.c370 = Constraint(expr= m.b2 + m.b30 <= 1)
m.c371 = Constraint(expr= m.b3 + m.b31 <= 1)
m.c372 = Constraint(expr= m.b4 + m.b32 <= 1)
m.c373 = Constraint(expr= m.b5 + m.b33 <= 1)
m.c374 = Constraint(expr= m.b6 + m.b34 <= 1)
m.c375 = Constraint(expr= m.b7 + m.b35 <= 1)
m.c376 = Constraint(expr= m.b15 + m.b36 <= 1)
m.c377 = Constraint(expr= m.b16 + m.b37 <= 1)
m.c378 = Constraint(expr= m.b17 + m.b38 <= 1)
m.c379 = Constraint(expr= m.b18 + m.b39 <= 1)
m.c380 = Constraint(expr= m.b19 + m.b40 <= 1)
m.c381 = Constraint(expr= m.b20 + m.b41 <= 1)
m.c382 = Constraint(expr= m.b21 + m.b42 <= 1)
m.c383 = Constraint(expr= m.b15 + m.b43 <= 1)
m.c384 = Constraint(expr= m.b16 + m.b44 <= 1)
m.c385 = Constraint(expr= m.b17 + m.b45 <= 1)
m.c386 = Constraint(expr= m.b18 + m.b46 <= 1)
m.c387 = Constraint(expr= m.b19 + m.b47 <= 1)
m.c388 = Constraint(expr= m.b20 + m.b48 <= 1)
m.c389 = Constraint(expr= m.b21 + m.b49 <= 1)
m.c390 = Constraint(expr= m.b15 + m.b50 <= 1)
m.c391 = Constraint(expr= m.b16 + m.b51 <= 1)
m.c392 = Constraint(expr= m.b17 + m.b52 <= 1)
m.c393 = Constraint(expr= m.b18 + m.b53 <= 1)
m.c394 = Constraint(expr= m.b19 + m.b54 <= 1)
m.c395 = Constraint(expr= m.b20 + m.b55 <= 1)
m.c396 = Constraint(expr= m.b21 + m.b56 <= 1)
m.c397 = Constraint(expr= m.b8 + m.b57 <= 1)
m.c398 = Constraint(expr= m.b9 + m.b58 <= 1)
m.c399 = Constraint(expr= m.b10 + m.b59 <= 1)
m.c400 = Constraint(expr= m.b11 + m.b60 <= 1)
m.c401 = Constraint(expr= m.b12 + m.b61 <= 1)
m.c402 = Constraint(expr= m.b13 + m.b62 <= 1)
m.c403 = Constraint(expr= m.b14 + m.b63 <= 1)
m.c404 = Constraint(expr= m.b8 + m.b64 <= 1)
m.c405 = Constraint(expr= m.b9 + m.b65 <= 1)
m.c406 = Constraint(expr= m.b10 + m.b66 <= 1)
m.c407 = Constraint(expr= m.b11 + m.b67 <= 1)
m.c408 = Constraint(expr= m.b12 + m.b68 <= 1)
m.c409 = Constraint(expr= m.b13 + m.b69 <= 1)
m.c410 = Constraint(expr= m.b14 + m.b70 <= 1)
m.c411 = Constraint(expr= m.b22 + m.b71 <= 1)
m.c412 = Constraint(expr= m.b23 + m.b72 <= 1)
m.c413 = Constraint(expr= m.b24 + m.b73 <= 1)
m.c414 = Constraint(expr= m.b25 + m.b74 <= 1)
m.c415 = Constraint(expr= m.b26 + m.b75 <= 1)
m.c416 = Constraint(expr= m.b27 + m.b76 <= 1)
m.c417 = Constraint(expr= m.b28 + m.b77 <= 1)
m.c418 = Constraint(expr= m.b36 + m.b71 <= 1)
m.c419 = Constraint(expr= m.b37 + m.b72 <= 1)
m.c420 = Constraint(expr= m.b38 + m.b73 <= 1)
m.c421 = Constraint(expr= m.b39 + m.b74 <= 1)
m.c422 = Constraint(expr= m.b40 + m.b75 <= 1)
m.c423 = Constraint(expr= m.b41 + m.b76 <= 1)
m.c424 = Constraint(expr= m.b42 + m.b77 <= 1)
m.c425 = Constraint(expr= m.b29 + m.b78 + m.b85 <= 1)
m.c426 = Constraint(expr= m.b30 + m.b79 + m.b86 <= 1)
m.c427 = Constraint(expr= m.b31 + m.b80 + m.b87 <= 1)
m.c428 = Constraint(expr= m.b32 + m.b81 + m.b88 <= 1)
m.c429 = Constraint(expr= m.b33 + m.b82 + m.b89 <= 1)
m.c430 = Constraint(expr= m.b34 + m.b83 + m.b90 <= 1)
m.c431 = Constraint(expr= m.b35 + m.b84 + m.b91 <= 1)
m.c432 = Constraint(expr= m.b43 + m.b78 + m.b85 <= 1)
m.c433 = Constraint(expr= m.b44 + m.b79 + m.b86 <= 1)
m.c434 = Constraint(expr= m.b45 + m.b80 + m.b87 <= 1)
m.c435 = Constraint(expr= m.b46 + m.b81 + m.b88 <= 1)
m.c436 = Constraint(expr= m.b47 + m.b82 + m.b89 <= 1)
m.c437 = Constraint(expr= m.b48 + m.b83 + m.b90 <= 1)
m.c438 = Constraint(expr= m.b49 + m.b84 + m.b91 <= 1)
m.c439 = Constraint(expr= m.b57 + m.b78 + m.b85 <= 1)
m.c440 = Constraint(expr= m.b58 + m.b79 + m.b86 <= 1)
m.c441 = Constraint(expr= m.b59 + m.b80 + m.b87 <= 1)
m.c442 = Constraint(expr= m.b60 + m.b81 + m.b88 <= 1)
m.c443 = Constraint(expr= m.b61 + m.b82 + m.b89 <= 1)
m.c444 = Constraint(expr= m.b62 + m.b83 + m.b90 <= 1)
m.c445 = Constraint(expr= m.b63 + m.b84 + m.b91 <= 1)
m.c446 = Constraint(expr= m.b50 + m.b92 <= 1)
m.c447 = Constraint(expr= m.b51 + m.b93 <= 1)
m.c448 = Constraint(expr= m.b52 + m.b94 <= 1)
m.c449 = Constraint(expr= m.b53 + m.b95 <= 1)
m.c450 = Constraint(expr= m.b54 + m.b96 <= 1)
m.c451 = Constraint(expr= m.b55 + m.b97 <= 1)
m.c452 = Constraint(expr= m.b56 + m.b98 <= 1)
m.c453 = Constraint(expr= m.b64 + m.b92 <= 1)
m.c454 = Constraint(expr= m.b65 + m.b93 <= 1)
m.c455 = Constraint(expr= m.b66 + m.b94 <= 1)
m.c456 = Constraint(expr= m.b67 + m.b95 <= 1)
m.c457 = Constraint(expr= m.b68 + m.b96 <= 1)
m.c458 = Constraint(expr= m.b69 + m.b97 <= 1)
m.c459 = Constraint(expr= m.b70 + m.b98 <= 1)
m.c460 = Constraint(expr= m.x150 == 0)
m.c461 = Constraint(expr= m.x182 == 0)
m.c462 = Constraint(expr= m.x214 == 0)
m.c463 = Constraint(expr= m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361 + m.x362 + m.x447 + m.x448 + m.x449
+ m.x450 + m.x451 + m.x452 + m.x453 + m.x489 + m.x490 + m.x491 + m.x492 + m.x493 + m.x494
+ m.x495 + m.x531 + m.x532 + m.x533 + m.x534 + m.x535 + m.x536 + m.x537 + m.x594 + m.x595
+ m.x596 + m.x597 + m.x598 + m.x599 + m.x600 == 500)
m.c464 = Constraint(expr= m.x363 + m.x364 + m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x370 + m.x371 + m.x372
+ m.x373 + m.x374 + m.x375 + m.x376 + m.x398 + m.x399 + m.x400 + m.x401 + m.x402 + m.x403
+ m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x454 + m.x455
+ m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465
+ m.x466 + m.x467 + m.x496 + m.x497 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503
+ m.x504 + m.x505 + m.x506 + m.x507 + m.x508 + m.x509 + m.x538 + m.x539 + m.x540 + m.x541
+ m.x542 + m.x543 + m.x544 + m.x545 + m.x546 + m.x547 + m.x548 + m.x549 + m.x550 + m.x551
+ m.x573 + m.x574 + m.x575 + m.x576 + m.x577 + m.x578 + m.x579 + m.x580 + m.x581 + m.x582
+ m.x583 + m.x584 + m.x585 + m.x586 == 500)
m.c465 = Constraint(expr= m.x412 + m.x413 + m.x414 + m.x415 + m.x416 + m.x417 + m.x418 + m.x468 + m.x469 + m.x470
+ m.x471 + m.x472 + m.x473 + m.x474 + m.x552 + m.x553 + m.x554 + m.x555 + m.x556 + m.x557
+ m.x558 + m.x587 + m.x588 + m.x589 + m.x590 + m.x591 + m.x592 + m.x593 == 500)
m.c466 = Constraint(expr= m.b99 + m.b100 + m.b102 + m.b105 + m.b108 == 1)
m.c467 = Constraint(expr= m.b101 + m.b103 + m.b106 + m.b109 + m.b111 == 1)
m.c468 = Constraint(expr= m.b104 + m.b107 + m.b110 + m.b112 + m.b113 == 1)
m.c469 = Constraint(expr= m.b114 + m.b115 + m.b117 + m.b120 + m.b123 == 1)
m.c470 = Constraint(expr= m.b116 + m.b118 + m.b121 + m.b124 + m.b126 == 1)
m.c471 = Constraint(expr= m.b119 + m.b122 + m.b125 + m.b127 + m.b128 == 1)
m.c472 = Constraint(expr= m.b1 + m.b8 + m.b15 <= 1)
m.c473 = Constraint(expr= m.b2 + m.b9 + m.b16 <= 1)
m.c474 = Constraint(expr= m.b3 + m.b10 + m.b17 <= 1)
m.c475 = Constraint(expr= m.b4 + m.b11 + m.b18 <= 1)
m.c476 = Constraint(expr= m.b5 + m.b12 + m.b19 <= 1)
m.c477 = Constraint(expr= m.b6 + m.b13 + m.b20 <= 1)
m.c478 = Constraint(expr= m.b7 + m.b14 + m.b21 <= 1)
m.c479 = Constraint(expr= m.b71 + m.b78 == 1)
m.c480 = Constraint(expr= m.b72 + m.b79 == 1)
m.c481 = Constraint(expr= m.b73 + m.b80 == 1)
m.c482 = Constraint(expr= m.b74 + m.b81 == 1)
m.c483 = Constraint(expr= m.b75 + m.b82 == 1)
m.c484 = Constraint(expr= m.b76 + m.b83 == 1)
m.c485 = Constraint(expr= m.b77 + m.b84 == 1)
m.c486 = Constraint(expr= m.b85 + m.b92 == 1)
m.c487 = Constraint(expr= m.b86 + m.b93 == 1)
m.c488 = Constraint(expr= m.b87 + m.b94 == 1)
m.c489 = Constraint(expr= m.b88 + m.b95 == 1)
m.c490 = Constraint(expr= m.b89 + m.b96 == 1)
m.c491 = Constraint(expr= m.b90 + m.b97 == 1)
m.c492 = Constraint(expr= m.b91 + m.b98 == 1)
m.c493 = Constraint(expr= m.b1 - m.b99 <= 0)
m.c494 = Constraint(expr= m.b2 - m.b99 - m.b100 <= 0)
m.c495 = Constraint(expr= m.b3 - m.b99 - m.b100 - m.b102 <= 0)
m.c496 = Constraint(expr= m.b4 - m.b99 - m.b100 - m.b102 - m.b105 <= 0)
m.c497 = Constraint(expr= m.b5 - m.b99 - m.b100 - m.b102 - m.b105 - m.b108 <= 0)
m.c498 = Constraint(expr= m.b6 - m.b99 - m.b100 - m.b102 - m.b105 - m.b108 <= 0)
m.c499 = Constraint(expr= m.b7 - m.b99 - m.b100 - m.b102 - m.b105 - m.b108 <= 0)
m.c500 = Constraint(expr= m.b8 <= 0)
m.c501 = Constraint(expr= m.b9 - m.b101 <= 0)
m.c502 = Constraint(expr= m.b10 - m.b101 - m.b103 <= 0)
m.c503 = Constraint(expr= m.b11 - m.b101 - m.b103 - m.b106 <= 0)
m.c504 = Constraint(expr= m.b12 - m.b101 - m.b103 - m.b106 - m.b109 <= 0)
m.c505 = Constraint(expr= m.b13 - m.b101 - m.b103 - m.b106 - m.b109 - m.b111 <= 0)
m.c506 = Constraint(expr= m.b14 - m.b101 - m.b103 - m.b106 - m.b109 - m.b111 <= 0)
m.c507 = Constraint(expr= m.b15 <= 0)
m.c508 = Constraint(expr= m.b16 <= 0)
m.c509 = Constraint(expr= m.b17 - m.b104 <= 0)
m.c510 = Constraint(expr= m.b18 - m.b104 - m.b107 <= 0)
m.c511 = Constraint(expr= m.b19 - m.b104 - m.b107 - m.b110 <= 0)
m.c512 = Constraint(expr= m.b20 - m.b104 - m.b107 - m.b110 - m.b112 <= 0)
m.c513 = Constraint(expr= m.b21 - m.b104 - m.b107 - m.b110 - m.b112 - m.b113 <= 0)
m.c514 = Constraint(expr= m.b1 - m.b114 - m.b115 - m.b117 - m.b120 - m.b123 <= 0)
m.c515 = Constraint(expr= m.b2 - m.b115 - m.b117 - m.b120 - m.b123 <= 0)
m.c516 = Constraint(expr= m.b3 - m.b117 - m.b120 - m.b123 <= 0)
m.c517 = Constraint(expr= m.b4 - m.b120 - m.b123 <= 0)
m.c518 = Constraint(expr= m.b5 - m.b123 <= 0)
m.c519 = Constraint(expr= m.b6 <= 0)
m.c520 = Constraint(expr= m.b7 <= 0)
m.c521 = Constraint(expr= m.b8 - m.b116 - m.b118 - m.b121 - m.b124 - m.b126 <= 0)
m.c522 = Constraint(expr= m.b9 - m.b116 - m.b118 - m.b121 - m.b124 - m.b126 <= 0)
m.c523 = Constraint(expr= m.b10 - m.b118 - m.b121 - m.b124 - m.b126 <= 0)
m.c524 = Constraint(expr= m.b11 - m.b121 - m.b124 - m.b126 <= 0)
m.c525 = Constraint(expr= m.b12 - m.b124 - m.b126 <= 0)
m.c526 = Constraint(expr= m.b13 - m.b126 <= 0)
m.c527 = Constraint(expr= m.b14 <= 0)
m.c528 = Constraint(expr= m.b15 - m.b119 - m.b122 - m.b125 - m.b127 - m.b128 <= 0)
m.c529 = Constraint(expr= m.b16 - m.b119 - m.b122 - m.b125 - m.b127 - m.b128 <= 0)
m.c530 = Constraint(expr= m.b17 - m.b119 - m.b122 - m.b125 - m.b127 - m.b128 <= 0)
m.c531 = Constraint(expr= m.b18 - m.b122 - m.b125 - m.b127 - m.b128 <= 0)
m.c532 = Constraint(expr= m.b19 - m.b125 - m.b127 - m.b128 <= 0)
m.c533 = Constraint(expr= m.b20 - m.b127 - m.b128 <= 0)
m.c534 = Constraint(expr= m.b21 - m.b128 <= 0)
m.c535 = Constraint(expr= - m.b71 - m.b79 + m.x129 >= -1)
m.c536 = Constraint(expr= - m.b72 - m.b80 + m.x130 >= -1)
m.c537 = Constraint(expr= - m.b73 - m.b81 + m.x131 >= -1)
m.c538 = Constraint(expr= - m.b74 - m.b82 + m.x132 >= -1)
m.c539 = Constraint(expr= - m.b75 - m.b83 + m.x133 >= -1)
m.c540 = Constraint(expr= - m.b76 - m.b84 + m.x134 >= -1)
m.c541 = Constraint(expr= - m.b85 - m.b93 + m.x135 >= -1)
m.c542 = Constraint(expr= - m.b86 - m.b94 + m.x136 >= -1)
m.c543 = Constraint(expr= - m.b87 - m.b95 + m.x137 >= -1)
m.c544 = Constraint(expr= - m.b88 - m.b96 + m.x138 >= -1)
m.c545 = Constraint(expr= - m.b89 - m.b97 + m.x139 >= -1)
m.c546 = Constraint(expr= - m.b90 - m.b98 + m.x140 >= -1)
m.c547 = Constraint(expr= - m.b72 - m.b78 + m.x129 >= -1)
m.c548 = Constraint(expr= - m.b73 - m.b79 + m.x130 >= -1)
m.c549 = Constraint(expr= - m.b74 - m.b80 + m.x131 >= -1)
m.c550 = Constraint(expr= - m.b75 - m.b81 + m.x132 >= -1)
m.c551 = Constraint(expr= - m.b76 - m.b82 + m.x133 >= -1)
m.c552 = Constraint(expr= - m.b77 - m.b83 + m.x134 >= -1)
m.c553 = Constraint(expr= - m.b86 - m.b92 + m.x135 >= -1)
m.c554 = Constraint(expr= - m.b87 - m.b93 + m.x136 >= -1)
m.c555 = Constraint(expr= - m.b88 - m.b94 + m.x137 >= -1)
m.c556 = Constraint(expr= - m.b89 - m.b95 + m.x138 >= -1)
m.c557 = Constraint(expr= - m.b90 - m.b96 + m.x139 >= -1)
m.c558 = Constraint(expr= - m.b91 - m.b97 + m.x140 >= -1)
m.c559 = Constraint(expr= m.x129 + m.x130 + m.x131 + m.x132 + m.x133 + m.x134 - m.x141 == -1)
m.c560 = Constraint(expr= m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140 - m.x142 == -1)
m.c561 = Constraint(expr= - 0.5*m.x151 - 0.5*m.x152 - 0.5*m.x247 - 0.5*m.x248 + m.x688 == 0)
m.c562 = Constraint(expr= - 0.5*m.x152 - 0.5*m.x153 - 0.5*m.x248 - 0.5*m.x249 + m.x689 == 0)
m.c563 = Constraint(expr= - 0.5*m.x153 - 0.5*m.x154 - 0.5*m.x249 - 0.5*m.x250 + m.x690 == 0)
m.c564 = Constraint(expr= - 0.5*m.x154 - 0.5*m.x155 - 0.5*m.x250 - 0.5*m.x251 + m.x691 == 0)
m.c565 = Constraint(expr= - 0.5*m.x155 - 0.5*m.x156 - 0.5*m.x251 - 0.5*m.x252 + m.x692 == 0)
m.c566 = Constraint(expr= - 0.5*m.x156 - 0.5*m.x157 - 0.5*m.x252 - 0.5*m.x253 + m.x693 == 0)
m.c567 = Constraint(expr= - 0.5*m.x157 - 0.5*m.x158 - 0.5*m.x253 - 0.5*m.x254 + m.x694 == 0)
m.c568 = Constraint(expr= - 0.5*m.x215 - 0.5*m.x216 - 0.5*m.x271 - 0.5*m.x272 + m.x695 == 0)
m.c569 = Constraint(expr= - 0.5*m.x216 - 0.5*m.x217 - 0.5*m.x272 - 0.5*m.x273 + m.x696 == 0)
m.c570 = Constraint(expr= - 0.5*m.x217 - 0.5*m.x218 - 0.5*m.x273 - 0.5*m.x274 + m.x697 == 0)
m.c571 = Constraint(expr= - 0.5*m.x218 - 0.5*m.x219 - 0.5*m.x274 - 0.5*m.x275 + m.x698 == 0)
m.c572 = Constraint(expr= - 0.5*m.x219 - 0.5*m.x220 - 0.5*m.x275 - 0.5*m.x276 + m.x699 == 0)
m.c573 = Constraint(expr= - 0.5*m.x220 - 0.5*m.x221 - 0.5*m.x276 - 0.5*m.x277 + m.x700 == 0)
m.c574 = Constraint(expr= - 0.5*m.x221 - 0.5*m.x222 - 0.5*m.x277 - 0.5*m.x278 + m.x701 == 0)
m.c575 = Constraint(expr= - 0.5*m.x183 - 0.5*m.x184 - 0.5*m.x303 - 0.5*m.x304 + m.x702 == 0)
m.c576 = Constraint(expr= - 0.5*m.x184 - 0.5*m.x185 - 0.5*m.x304 - 0.5*m.x305 + m.x703 == 0)
m.c577 = Constraint(expr= - 0.5*m.x185 - 0.5*m.x186 - 0.5*m.x305 - 0.5*m.x306 + m.x704 == 0)
m.c578 = Constraint(expr= - 0.5*m.x186 - 0.5*m.x187 - 0.5*m.x306 - 0.5*m.x307 + m.x705 == 0)
m.c579 = Constraint(expr= - 0.5*m.x187 - 0.5*m.x188 - 0.5*m.x307 - 0.5*m.x308 + m.x706 == 0)
m.c580 = Constraint(expr= - 0.5*m.x188 - 0.5*m.x189 - 0.5*m.x308 - 0.5*m.x309 + m.x707 == 0)
m.c581 = Constraint(expr= - 0.5*m.x189 - 0.5*m.x190 - 0.5*m.x309 - 0.5*m.x310 + m.x708 == 0)
m.c582 = Constraint(expr= - 0.5*m.x159 - 0.5*m.x160 - 0.5*m.x223 - 0.5*m.x224 - 0.5*m.x255 - 0.5*m.x256 - 0.5*m.x279
- 0.5*m.x280 - 0.5*m.x327 - 0.5*m.x328 + m.x709 == 0)
m.c583 = Constraint(expr= - 0.5*m.x160 - 0.5*m.x161 - 0.5*m.x224 - 0.5*m.x225 - 0.5*m.x256 - 0.5*m.x257 - 0.5*m.x280
- 0.5*m.x281 - 0.5*m.x328 - 0.5*m.x329 + m.x710 == 0)
m.c584 = Constraint(expr= - 0.5*m.x161 - 0.5*m.x162 - 0.5*m.x225 - 0.5*m.x226 - 0.5*m.x257 - 0.5*m.x258 - 0.5*m.x281
- 0.5*m.x282 - 0.5*m.x329 - 0.5*m.x330 + m.x711 == 0)
m.c585 = Constraint(expr= - 0.5*m.x162 - 0.5*m.x163 - 0.5*m.x226 - 0.5*m.x227 - 0.5*m.x258 - 0.5*m.x259 - 0.5*m.x282
- 0.5*m.x283 - 0.5*m.x330 - 0.5*m.x331 + m.x712 == 0)
m.c586 = Constraint(expr= - 0.5*m.x163 - 0.5*m.x164 - 0.5*m.x227 - 0.5*m.x228 - 0.5*m.x259 - 0.5*m.x260 - 0.5*m.x283
- 0.5*m.x284 - 0.5*m.x331 - 0.5*m.x332 + m.x713 == 0)
m.c587 = Constraint(expr= - 0.5*m.x164 - 0.5*m.x165 - 0.5*m.x228 - 0.5*m.x229 - 0.5*m.x260 - 0.5*m.x261 - 0.5*m.x284
- 0.5*m.x285 - 0.5*m.x332 - 0.5*m.x333 + m.x714 == 0)
m.c588 = Constraint(expr= - 0.5*m.x165 - 0.5*m.x166 - 0.5*m.x229 - 0.5*m.x230 - 0.5*m.x261 - 0.5*m.x262 - 0.5*m.x285
- 0.5*m.x286 - 0.5*m.x333 - 0.5*m.x334 + m.x715 == 0)
m.c589 = Constraint(expr= - 0.5*m.x167 - 0.5*m.x168 - 0.5*m.x191 - 0.5*m.x192 - 0.5*m.x231 - 0.5*m.x232 - 0.5*m.x263
- 0.5*m.x264 - 0.5*m.x287 - 0.5*m.x288 - 0.5*m.x311 - 0.5*m.x312 + m.x716 == 0)
m.c590 = Constraint(expr= - 0.5*m.x168 - 0.5*m.x169 - 0.5*m.x192 - 0.5*m.x193 - 0.5*m.x232 - 0.5*m.x233 - 0.5*m.x264
- 0.5*m.x265 - 0.5*m.x288 - 0.5*m.x289 - 0.5*m.x312 - 0.5*m.x313 + m.x717 == 0)
m.c591 = Constraint(expr= - 0.5*m.x169 - 0.5*m.x170 - 0.5*m.x193 - 0.5*m.x194 - 0.5*m.x233 - 0.5*m.x234 - 0.5*m.x265
- 0.5*m.x266 - 0.5*m.x289 - 0.5*m.x290 - 0.5*m.x313 - 0.5*m.x314 + m.x718 == 0)
m.c592 = Constraint(expr= - 0.5*m.x170 - 0.5*m.x171 - 0.5*m.x194 - 0.5*m.x195 - 0.5*m.x234 - 0.5*m.x235 - 0.5*m.x266
- 0.5*m.x267 - 0.5*m.x290 - 0.5*m.x291 - 0.5*m.x314 - 0.5*m.x315 + m.x719 == 0)
m.c593 = Constraint(expr= - 0.5*m.x171 - 0.5*m.x172 - 0.5*m.x195 - 0.5*m.x196 - 0.5*m.x235 - 0.5*m.x236 - 0.5*m.x267
- 0.5*m.x268 - 0.5*m.x291 - 0.5*m.x292 - 0.5*m.x315 - 0.5*m.x316 + m.x720 == 0)
m.c594 = Constraint(expr= - 0.5*m.x172 - 0.5*m.x173 - 0.5*m.x196 - 0.5*m.x197 - 0.5*m.x236 - 0.5*m.x237 - 0.5*m.x268
- 0.5*m.x269 - 0.5*m.x292 - 0.5*m.x293 - 0.5*m.x316 - 0.5*m.x317 + m.x721 == 0)
m.c595 = Constraint(expr= - 0.5*m.x173 - 0.5*m.x174 - 0.5*m.x197 - 0.5*m.x198 - 0.5*m.x237 - 0.5*m.x238 - 0.5*m.x269
- 0.5*m.x270 - 0.5*m.x293 - 0.5*m.x294 - 0.5*m.x317 - 0.5*m.x318 + m.x722 == 0)
m.c596 = Constraint(expr= - 0.5*m.x199 - 0.5*m.x200 - 0.5*m.x239 - 0.5*m.x240 - 0.5*m.x295 - 0.5*m.x296 - 0.5*m.x319
- 0.5*m.x320 + m.x723 == 0)
m.c597 = Constraint(expr= - 0.5*m.x200 - 0.5*m.x201 - 0.5*m.x240 - 0.5*m.x241 - 0.5*m.x296 - 0.5*m.x297 - 0.5*m.x320
- 0.5*m.x321 + m.x724 == 0)
m.c598 = Constraint(expr= - 0.5*m.x201 - 0.5*m.x202 - 0.5*m.x241 - 0.5*m.x242 - 0.5*m.x297 - 0.5*m.x298 - 0.5*m.x321
- 0.5*m.x322 + m.x725 == 0)
m.c599 = Constraint(expr= - 0.5*m.x202 - 0.5*m.x203 - 0.5*m.x242 - 0.5*m.x243 - 0.5*m.x298 - 0.5*m.x299 - 0.5*m.x322
- 0.5*m.x323 + m.x726 == 0)
m.c600 = Constraint(expr= - 0.5*m.x203 - 0.5*m.x204 - 0.5*m.x243 - 0.5*m.x244 - 0.5*m.x299 - 0.5*m.x300 - 0.5*m.x323
- 0.5*m.x324 + m.x727 == 0)
m.c601 = Constraint(expr= - 0.5*m.x204 - 0.5*m.x205 - 0.5*m.x244 - 0.5*m.x245 - 0.5*m.x300 - 0.5*m.x301 - 0.5*m.x324
- 0.5*m.x325 + m.x728 == 0)
m.c602 = Constraint(expr= - 0.5*m.x205 - 0.5*m.x206 - 0.5*m.x245 - 0.5*m.x246 - 0.5*m.x301 - 0.5*m.x302 - 0.5*m.x325
- 0.5*m.x326 + m.x729 == 0)
m.c603 = Constraint(expr= - 500*m.b1 + m.x335 <= 0)
m.c604 = Constraint(expr= - 500*m.b2 + m.x336 <= 0)
m.c605 = Constraint(expr= - 500*m.b3 + m.x337 <= 0)
m.c606 = Constraint(expr= - 500*m.b4 + m.x338 <= 0)
m.c607 = Constraint(expr= - 500*m.b5 + m.x339 <= 0)
m.c608 = Constraint(expr= - 500*m.b6 + m.x340 <= 0)
m.c609 = Constraint(expr= - 500*m.b7 + m.x341 <= 0)
m.c610 = Constraint(expr= - 500*m.b8 + m.x377 <= 0)
m.c611 = Constraint(expr= - 500*m.b9 + m.x378 <= 0)
m.c612 = Constraint(expr= - 500*m.b10 + m.x379 <= 0)
m.c613 = Constraint(expr= - 500*m.b11 + m.x380 <= 0)
m.c614 = Constraint(expr= - 500*m.b12 + m.x381 <= 0)
m.c615 = Constraint(expr= - 500*m.b13 + m.x382 <= 0)
m.c616 = Constraint(expr= - 500*m.b14 + m.x383 <= 0)
m.c617 = Constraint(expr= - 500*m.b15 + m.x419 <= 0)
m.c618 = Constraint(expr= - 500*m.b16 + m.x420 <= 0)
m.c619 = Constraint(expr= - 500*m.b17 + m.x421 <= 0)
m.c620 = Constraint(expr= - 500*m.b18 + m.x422 <= 0)
m.c621 = Constraint(expr= - 500*m.b19 + m.x423 <= 0)
m.c622 = Constraint(expr= - 500*m.b20 + m.x424 <= 0)
m.c623 = Constraint(expr= - 500*m.b21 + m.x425 <= 0)
m.c624 = Constraint(expr= - 1000*m.b22 + m.x342 + m.x475 <= 0)
m.c625 = Constraint(expr= - 1000*m.b23 + m.x343 + m.x476 <= 0)
m.c626 = Constraint(expr= - 1000*m.b24 + m.x344 + m.x477 <= 0)
m.c627 = Constraint(expr= - 1000*m.b25 + m.x345 + m.x478 <= 0)
m.c628 = Constraint(expr= - 1000*m.b26 + m.x346 + m.x479 <= 0)
m.c629 = Constraint(expr= - 1000*m.b27 + m.x347 + m.x480 <= 0)
m.c630 = Constraint(expr= - 1000*m.b28 + m.x348 + m.x481 <= 0)
m.c631 = Constraint(expr= - 1000*m.b29 + m.x349 + m.x482 <= 0)
m.c632 = Constraint(expr= - 1000*m.b30 + m.x350 + m.x483 <= 0)
m.c633 = Constraint(expr= - 1000*m.b31 + m.x351 + m.x484 <= 0)
m.c634 = Constraint(expr= - 1000*m.b32 + m.x352 + m.x485 <= 0)
m.c635 = Constraint(expr= - 1000*m.b33 + m.x353 + m.x486 <= 0)
m.c636 = Constraint(expr= - 1000*m.b34 + m.x354 + m.x487 <= 0)
m.c637 = Constraint(expr= - 1000*m.b35 + m.x355 + m.x488 <= 0)
m.c638 = Constraint(expr= - 1000*m.b36 + m.x426 + m.x510 <= 0)
m.c639 = Constraint(expr= - 1000*m.b37 + m.x427 + m.x511 <= 0)
m.c640 = Constraint(expr= - 1000*m.b38 + m.x428 + m.x512 <= 0)
m.c641 = Constraint(expr= - 1000*m.b39 + m.x429 + m.x513 <= 0)
m.c642 = Constraint(expr= - 1000*m.b40 + m.x430 + m.x514 <= 0)
m.c643 = Constraint(expr= - 1000*m.b41 + m.x431 + m.x515 <= 0)
m.c644 = Constraint(expr= - 1000*m.b42 + m.x432 + m.x516 <= 0)
m.c645 = Constraint(expr= - 1000*m.b43 + m.x433 + m.x517 <= 0)
m.c646 = Constraint(expr= - 1000*m.b44 + m.x434 + m.x518 <= 0)
m.c647 = Constraint(expr= - 1000*m.b45 + m.x435 + m.x519 <= 0)
m.c648 = Constraint(expr= - 1000*m.b46 + m.x436 + m.x520 <= 0)
m.c649 = Constraint(expr= - 1000*m.b47 + m.x437 + m.x521 <= 0)
m.c650 = Constraint(expr= - 1000*m.b48 + m.x438 + m.x522 <= 0)
m.c651 = Constraint(expr= - 1000*m.b49 + m.x439 + m.x523 <= 0)
m.c652 = Constraint(expr= - 1000*m.b50 + m.x440 + m.x524 <= 0)
m.c653 = Constraint(expr= - 1000*m.b51 + m.x441 + m.x525 <= 0)
m.c654 = Constraint(expr= - 1000*m.b52 + m.x442 + m.x526 <= 0)
m.c655 = Constraint(expr= - 1000*m.b53 + m.x443 + m.x527 <= 0)
m.c656 = Constraint(expr= - 1000*m.b54 + m.x444 + m.x528 <= 0)
m.c657 = Constraint(expr= - 1000*m.b55 + m.x445 + m.x529 <= 0)
m.c658 = Constraint(expr= - 1000*m.b56 + m.x446 + m.x530 <= 0)
m.c659 = Constraint(expr= - 1000*m.b57 + m.x384 + m.x559 <= 0)
m.c660 = Constraint(expr= - 1000*m.b58 + m.x385 + m.x560 <= 0)
m.c661 = Constraint(expr= - 1000*m.b59 + m.x386 + m.x561 <= 0)
m.c662 = Constraint(expr= - 1000*m.b60 + m.x387 + m.x562 <= 0)
m.c663 = Constraint(expr= - 1000*m.b61 + m.x388 + m.x563 <= 0)
m.c664 = Constraint(expr= - 1000*m.b62 + m.x389 + m.x564 <= 0)
m.c665 = Constraint(expr= - 1000*m.b63 + m.x390 + m.x565 <= 0)
m.c666 = Constraint(expr= - 1000*m.b64 + m.x391 + m.x566 <= 0)
m.c667 = Constraint(expr= - 1000*m.b65 + m.x392 + m.x567 <= 0)
m.c668 = Constraint(expr= - 1000*m.b66 + m.x393 + m.x568 <= 0)
m.c669 = Constraint(expr= - 1000*m.b67 + m.x394 + m.x569 <= 0)
m.c670 = Constraint(expr= - 1000*m.b68 + m.x395 + m.x570 <= 0)
m.c671 = Constraint(expr= - 1000*m.b69 + m.x396 + m.x571 <= 0)
m.c672 = Constraint(expr= - 1000*m.b70 + m.x397 + m.x572 <= 0)
m.c673 = Constraint(expr= - 1000*m.b71 + m.x356 + m.x447 + m.x489 + m.x531 + m.x594 <= 0)
m.c674 = Constraint(expr= - 1000*m.b72 + m.x357 + m.x448 + m.x490 + m.x532 + m.x595 <= 0)
m.c675 = Constraint(expr= - 1000*m.b73 + m.x358 + m.x449 + m.x491 + m.x533 + m.x596 <= 0)
m.c676 = Constraint(expr= - 1000*m.b74 + m.x359 + m.x450 + m.x492 + m.x534 + m.x597 <= 0)
m.c677 = Constraint(expr= - 1000*m.b75 + m.x360 + m.x451 + m.x493 + m.x535 + m.x598 <= 0)
m.c678 = Constraint(expr= - 1000*m.b76 + m.x361 + m.x452 + m.x494 + m.x536 + m.x599 <= 0)
m.c679 = Constraint(expr= - 1000*m.b77 + m.x362 + m.x453 + m.x495 + m.x537 + m.x600 <= 0)
m.c680 = Constraint(expr= - 1000*m.b78 + m.x363 + m.x398 + m.x454 + m.x496 + m.x538 + m.x573 <= 0)
m.c681 = Constraint(expr= - 1000*m.b79 + m.x364 + m.x399 + m.x455 + m.x497 + m.x539 + m.x574 <= 0)
m.c682 = Constraint(expr= - 1000*m.b80 + m.x365 + m.x400 + m.x456 + m.x498 + m.x540 + m.x575 <= 0)
m.c683 = Constraint(expr= - 1000*m.b81 + m.x366 + m.x401 + m.x457 + m.x499 + m.x541 + m.x576 <= 0)
m.c684 = Constraint(expr= - 1000*m.b82 + m.x367 + m.x402 + m.x458 + m.x500 + m.x542 + m.x577 <= 0)
m.c685 = Constraint(expr= - 1000*m.b83 + m.x368 + m.x403 + m.x459 + m.x501 + m.x543 + m.x578 <= 0)
m.c686 = Constraint(expr= - 1000*m.b84 + m.x369 + m.x404 + m.x460 + m.x502 + m.x544 + m.x579 <= 0)
m.c687 = Constraint(expr= - 1000*m.b85 + m.x370 + m.x405 + m.x461 + m.x503 + m.x545 + m.x580 <= 0)
m.c688 = Constraint(expr= - 1000*m.b86 + m.x371 + m.x406 + m.x462 + m.x504 + m.x546 + m.x581 <= 0)
m.c689 = Constraint(expr= - 1000*m.b87 + m.x372 + m.x407 + m.x463 + m.x505 + m.x547 + m.x582 <= 0)
m.c690 = Constraint(expr= - 1000*m.b88 + m.x373 + m.x408 + m.x464 + m.x506 + m.x548 + m.x583 <= 0)
m.c691 = Constraint(expr= - 1000*m.b89 + m.x374 + m.x409 + m.x465 + m.x507 + m.x549 + m.x584 <= 0)
m.c692 = Constraint(expr= - 1000*m.b90 + m.x375 + m.x410 + m.x466 + m.x508 + m.x550 + m.x585 <= 0)
m.c693 = Constraint(expr= - 1000*m.b91 + m.x376 + m.x411 + m.x467 + m.x509 + m.x551 + m.x586 <= 0)
m.c694 = Constraint(expr= - 1000*m.b92 + m.x412 + m.x468 + m.x552 + m.x587 <= 0)
m.c695 = Constraint(expr= - 1000*m.b93 + m.x413 + m.x469 + m.x553 + m.x588 <= 0)
m.c696 = Constraint(expr= - 1000*m.b94 + m.x414 + m.x470 + m.x554 + m.x589 <= 0)
m.c697 = Constraint(expr= - 1000*m.b95 + m.x415 + m.x471 + m.x555 + m.x590 <= 0)
m.c698 = Constraint(expr= - 1000*m.b96 + m.x416 + m.x472 + m.x556 + m.x591 <= 0)
m.c699 = Constraint(expr= - 1000*m.b97 + m.x417 + m.x473 + m.x557 + m.x592 <= 0)
m.c700 = Constraint(expr= - 1000*m.b98 + m.x418 + m.x474 + m.x558 + m.x593 <= 0)
m.c701 = Constraint(expr= - m.b1 + m.x335 >= 0)
m.c702 = Constraint(expr= - m.b2 + m.x336 >= 0)
m.c703 = Constraint(expr= - m.b3 + m.x337 >= 0)
m.c704 = Constraint(expr= - m.b4 + m.x338 >= 0)
m.c705 = Constraint(expr= - m.b5 + m.x339 >= 0)
m.c706 = Constraint(expr= - m.b6 + m.x340 >= 0)
m.c707 = Constraint(expr= - m.b7 + m.x341 >= 0)
m.c708 = Constraint(expr= - m.b8 + m.x377 >= 0)
m.c709 = Constraint(expr= - m.b9 + m.x378 >= 0)
m.c710 = Constraint(expr= - m.b10 + m.x379 >= 0)
m.c711 = Constraint(expr= - m.b11 + m.x380 >= 0)
m.c712 = Constraint(expr= - m.b12 + m.x381 >= 0)
m.c713 = Constraint(expr= - m.b13 + m.x382 >= 0)
m.c714 = Constraint(expr= - m.b14 + m.x383 >= 0)
m.c715 = Constraint(expr= - m.b15 + m.x419 >= 0)
m.c716 = Constraint(expr= - m.b16 + m.x420 >= 0)
m.c717 = Constraint(expr= - m.b17 + m.x421 >= 0)
m.c718 = Constraint(expr= - m.b18 + m.x422 >= 0)
m.c719 = Constraint(expr= - m.b19 + m.x423 >= 0)
m.c720 = Constraint(expr= - m.b20 + m.x424 >= 0)
m.c721 = Constraint(expr= - m.b21 + m.x425 >= 0)
m.c722 = Constraint(expr= - m.b22 + m.x342 + m.x475 >= 0)
m.c723 = Constraint(expr= - m.b23 + m.x343 + m.x476 >= 0)
m.c724 = Constraint(expr= - m.b24 + m.x344 + m.x477 >= 0)
m.c725 = Constraint(expr= - m.b25 + m.x345 + m.x478 >= 0)
m.c726 = Constraint(expr= - m.b26 + m.x346 + m.x479 >= 0)
m.c727 = Constraint(expr= - m.b27 + m.x347 + m.x480 >= 0)
m.c728 = Constraint(expr= - m.b28 + m.x348 + m.x481 >= 0)
m.c729 = Constraint(expr= - m.b29 + m.x349 + m.x482 >= 0)
m.c730 = Constraint(expr= - m.b30 + m.x350 + m.x483 >= 0)
m.c731 = Constraint(expr= - m.b31 + m.x351 + m.x484 >= 0)
m.c732 = Constraint(expr= - m.b32 + m.x352 + m.x485 >= 0)
m.c733 = Constraint(expr= - m.b33 + m.x353 + m.x486 >= 0)
m.c734 = Constraint(expr= - m.b34 + m.x354 + m.x487 >= 0)
m.c735 = Constraint(expr= - m.b35 + m.x355 + m.x488 >= 0)
m.c736 = Constraint(expr= - m.b36 + m.x426 + m.x510 >= 0)
m.c737 = Constraint(expr= - m.b37 + m.x427 + m.x511 >= 0)
m.c738 = Constraint(expr= - m.b38 + m.x428 + m.x512 >= 0)
m.c739 = Constraint(expr= - m.b39 + m.x429 + m.x513 >= 0)
m.c740 = Constraint(expr= - m.b40 + m.x430 + m.x514 >= 0)
m.c741 = Constraint(expr= - m.b41 + m.x431 + m.x515 >= 0)
m.c742 = Constraint(expr= - m.b42 + m.x432 + m.x516 >= 0)
m.c743 = Constraint(expr= - m.b43 + m.x433 + m.x517 >= 0)
m.c744 = Constraint(expr= - m.b44 + m.x434 + m.x518 >= 0)
m.c745 = Constraint(expr= - m.b45 + m.x435 + m.x519 >= 0)
m.c746 = Constraint(expr= - m.b46 + m.x436 + m.x520 >= 0)
m.c747 = Constraint(expr= - m.b47 + m.x437 + m.x521 >= 0)
m.c748 = Constraint(expr= - m.b48 + m.x438 + m.x522 >= 0)
m.c749 = Constraint(expr= - m.b49 + m.x439 + m.x523 >= 0)
m.c750 = Constraint(expr= - m.b50 + m.x440 + m.x524 >= 0)
m.c751 = Constraint(expr= - m.b51 + m.x441 + m.x525 >= 0)
m.c752 = Constraint(expr= - m.b52 + m.x442 + m.x526 >= 0)
m.c753 = Constraint(expr= - m.b53 + m.x443 + m.x527 >= 0)
m.c754 = Constraint(expr= - m.b54 + m.x444 + m.x528 >= 0)
m.c755 = Constraint(expr= - m.b55 + m.x445 + m.x529 >= 0)
m.c756 = Constraint(expr= - m.b56 + m.x446 + m.x530 >= 0)
m.c757 = Constraint(expr= - m.b57 + m.x384 + m.x559 >= 0)
m.c758 = Constraint(expr= - m.b58 + m.x385 + m.x560 >= 0)
m.c759 = Constraint(expr= - m.b59 + m.x386 + m.x561 >= 0)
m.c760 = Constraint(expr= - m.b60 + m.x387 + m.x562 >= 0)
m.c761 = Constraint(expr= - m.b61 + m.x388 + m.x563 >= 0)
m.c762 = Constraint(expr= - m.b62 + m.x389 + m.x564 >= 0)
m.c763 = Constraint(expr= - m.b63 + m.x390 + m.x565 >= 0)
m.c764 = Constraint(expr= - m.b64 + m.x391 + m.x566 >= 0)
m.c765 = Constraint(expr= - m.b65 + m.x392 + m.x567 >= 0)
m.c766 = Constraint(expr= - m.b66 + m.x393 + m.x568 >= 0)
m.c767 = Constraint(expr= - m.b67 + m.x394 + m.x569 >= 0)
m.c768 = Constraint(expr= - m.b68 + m.x395 + m.x570 >= 0)
m.c769 = Constraint(expr= - m.b69 + m.x396 + m.x571 >= 0)
m.c770 = Constraint(expr= - m.b70 + m.x397 + m.x572 >= 0)
m.c771 = Constraint(expr= - m.b71 + m.x356 + m.x447 + m.x489 + m.x531 + m.x594 >= 0)
m.c772 = Constraint(expr= - m.b72 + m.x357 + m.x448 + m.x490 + m.x532 + m.x595 >= 0)
m.c773 = Constraint(expr= - m.b73 + m.x358 + m.x449 + m.x491 + m.x533 + m.x596 >= 0)
m.c774 = Constraint(expr= - m.b74 + m.x359 + m.x450 + m.x492 + m.x534 + m.x597 >= 0)
m.c775 = Constraint(expr= - m.b75 + m.x360 + m.x451 + m.x493 + m.x535 + m.x598 >= 0)
m.c776 = Constraint(expr= - m.b76 + m.x361 + m.x452 + m.x494 + m.x536 + m.x599 >= 0)
m.c777 = Constraint(expr= - m.b77 + m.x362 + m.x453 + m.x495 + m.x537 + m.x600 >= 0)
m.c778 = Constraint(expr= - m.b78 + m.x363 + m.x398 + m.x454 + m.x496 + m.x538 + m.x573 >= 0)
m.c779 = Constraint(expr= - m.b79 + m.x364 + m.x399 + m.x455 + m.x497 + m.x539 + m.x574 >= 0)
m.c780 = Constraint(expr= - m.b80 + m.x365 + m.x400 + m.x456 + m.x498 + m.x540 + m.x575 >= 0)
m.c781 = Constraint(expr= - m.b81 + m.x366 + m.x401 + m.x457 + m.x499 + m.x541 + m.x576 >= 0)
m.c782 = Constraint(expr= - m.b82 + m.x367 + m.x402 + m.x458 + m.x500 + m.x542 + m.x577 >= 0)
m.c783 = Constraint(expr= - m.b83 + m.x368 + m.x403 + m.x459 + m.x501 + m.x543 + m.x578 >= 0)
m.c784 = Constraint(expr= - m.b84 + m.x369 + m.x404 + m.x460 + m.x502 + m.x544 + m.x579 >= 0)
m.c785 = Constraint(expr= - m.b85 + m.x370 + m.x405 + m.x461 + m.x503 + m.x545 + m.x580 >= 0)
m.c786 = Constraint(expr= - m.b86 + m.x371 + m.x406 + m.x462 + m.x504 + m.x546 + m.x581 >= 0)
m.c787 = Constraint(expr= - m.b87 + m.x372 + m.x407 + m.x463 + m.x505 + m.x547 + m.x582 >= 0)
m.c788 = Constraint(expr= - m.b88 + m.x373 + m.x408 + m.x464 + m.x506 + m.x548 + m.x583 >= 0)
m.c789 = Constraint(expr= - m.b89 + m.x374 + m.x409 + m.x465 + m.x507 + m.x549 + m.x584 >= 0)
m.c790 = Constraint(expr= - m.b90 + m.x375 + m.x410 + m.x466 + m.x508 + m.x550 + m.x585 >= 0)
m.c791 = Constraint(expr= - m.b91 + m.x376 + m.x411 + m.x467 + m.x509 + m.x551 + m.x586 >= 0)
m.c792 = Constraint(expr= - m.b92 + m.x412 + m.x468 + m.x552 + m.x587 >= 0)
m.c793 = Constraint(expr= - m.b93 + m.x413 + m.x469 + m.x553 + m.x588 >= 0)
m.c794 = Constraint(expr= - m.b94 + m.x414 + m.x470 + m.x554 + m.x589 >= 0)
m.c795 = Constraint(expr= - m.b95 + m.x415 + m.x471 + m.x555 + m.x590 >= 0)
m.c796 = Constraint(expr= - m.b96 + m.x416 + m.x472 + m.x556 + m.x591 >= 0)
m.c797 = Constraint(expr= - m.b97 + m.x417 + m.x473 + m.x557 + m.x592 >= 0)
m.c798 = Constraint(expr= - m.b98 + m.x418 + m.x474 + m.x558 + m.x593 >= 0)
m.c799 = Constraint(expr= - 0.002*m.x335 - m.x667 + m.x668 >= 0)
m.c800 = Constraint(expr= - 0.002*m.x336 - m.x668 + m.x669 >= 0)
m.c801 = Constraint(expr= - 0.002*m.x337 - m.x669 + m.x670 >= 0)
m.c802 = Constraint(expr= - 0.002*m.x338 - m.x670 + m.x671 >= 0)
m.c803 = Constraint(expr= - 0.002*m.x339 - m.x671 + m.x672 >= 0)
m.c804 = Constraint(expr= - 0.002*m.x340 - m.x672 + m.x673 >= 0)
m.c805 = Constraint(expr= - 0.002*m.x341 - m.x673 + m.x674 >= 0)
m.c806 = Constraint(expr= - 0.002*m.x377 - m.x667 + m.x668 >= 0)
m.c807 = Constraint(expr= - 0.002*m.x378 - m.x668 + m.x669 >= 0)
m.c808 = Constraint(expr= - 0.002*m.x379 - m.x669 + m.x670 >= 0)
m.c809 = Constraint(expr= - 0.002*m.x380 - m.x670 + m.x671 >= 0)
m.c810 = Constraint(expr= - 0.002*m.x381 - m.x671 + m.x672 >= 0)
m.c811 = Constraint(expr= - 0.002*m.x382 - m.x672 + m.x673 >= 0)
m.c812 = Constraint(expr= - 0.002*m.x383 - m.x673 + m.x674 >= 0)
m.c813 = Constraint(expr= - 0.002*m.x419 - m.x667 + m.x668 >= 0)
m.c814 = Constraint(expr= - 0.002*m.x420 - m.x668 + m.x669 >= 0)
m.c815 = Constraint(expr= - 0.002*m.x421 - m.x669 + m.x670 >= 0)
m.c816 = Constraint(expr= - 0.002*m.x422 - m.x670 + m.x671 >= 0)
m.c817 = Constraint(expr= - 0.002*m.x423 - m.x671 + m.x672 >= 0)
m.c818 = Constraint(expr= - 0.002*m.x424 - m.x672 + m.x673 >= 0)
m.c819 = Constraint(expr= - 0.002*m.x425 - m.x673 + m.x674 >= 0)
m.c820 = Constraint(expr= - 0.002*m.x342 - 0.002*m.x475 - m.x667 + m.x668 >= 0)
m.c821 = Constraint(expr= - 0.002*m.x343 - 0.002*m.x476 - m.x668 + m.x669 >= 0)
m.c822 = Constraint(expr= - 0.002*m.x344 - 0.002*m.x477 - m.x669 + m.x670 >= 0)
m.c823 = Constraint(expr= - 0.002*m.x345 - 0.002*m.x478 - m.x670 + m.x671 >= 0)
m.c824 = Constraint(expr= - 0.002*m.x346 - 0.002*m.x479 - m.x671 + m.x672 >= 0)
m.c825 = Constraint(expr= - 0.002*m.x347 - 0.002*m.x480 - m.x672 + m.x673 >= 0)
m.c826 = Constraint(expr= - 0.002*m.x348 - 0.002*m.x481 - m.x673 + m.x674 >= 0)
m.c827 = Constraint(expr= - 0.002*m.x349 - 0.002*m.x482 - m.x667 + m.x668 >= 0)
m.c828 = Constraint(expr= - 0.002*m.x350 - 0.002*m.x483 - m.x668 + m.x669 >= 0)
m.c829 = Constraint(expr= - 0.002*m.x351 - 0.002*m.x484 - m.x669 + m.x670 >= 0)
m.c830 = Constraint(expr= - 0.002*m.x352 - 0.002*m.x485 - m.x670 + m.x671 >= 0)
m.c831 = Constraint(expr= - 0.002*m.x353 - 0.002*m.x486 - m.x671 + m.x672 >= 0)
m.c832 = Constraint(expr= - 0.002*m.x354 - 0.002*m.x487 - m.x672 + m.x673 >= 0)
m.c833 = Constraint(expr= - 0.002*m.x355 - 0.002*m.x488 - m.x673 + m.x674 >= 0)
m.c834 = Constraint(expr= - 0.002*m.x426 - 0.002*m.x510 - m.x667 + m.x668 >= 0)
m.c835 = Constraint(expr= - 0.002*m.x427 - 0.002*m.x511 - m.x668 + m.x669 >= 0)
m.c836 = Constraint(expr= - 0.002*m.x428 - 0.002*m.x512 - m.x669 + m.x670 >= 0)
m.c837 = Constraint(expr= - 0.002*m.x429 - 0.002*m.x513 - m.x670 + m.x671 >= 0)
m.c838 = Constraint(expr= - 0.002*m.x430 - 0.002*m.x514 - m.x671 + m.x672 >= 0)
m.c839 = Constraint(expr= - 0.002*m.x431 - 0.002*m.x515 - m.x672 + m.x673 >= 0)
m.c840 = Constraint(expr= - 0.002*m.x432 - 0.002*m.x516 - m.x673 + m.x674 >= 0)
m.c841 = Constraint(expr= - 0.002*m.x433 - 0.002*m.x517 - m.x667 + m.x668 >= 0)
m.c842 = Constraint(expr= - 0.002*m.x434 - 0.002*m.x518 - m.x668 + m.x669 >= 0)
m.c843 = Constraint(expr= - 0.002*m.x435 - 0.002*m.x519 - m.x669 + m.x670 >= 0)
m.c844 = Constraint(expr= - 0.002*m.x436 - 0.002*m.x520 - m.x670 + m.x671 >= 0)
m.c845 = Constraint(expr= - 0.002*m.x437 - 0.002*m.x521 - m.x671 + m.x672 >= 0)
m.c846 = Constraint(expr= - 0.002*m.x438 - 0.002*m.x522 - m.x672 + m.x673 >= 0)
m.c847 = Constraint(expr= - 0.002*m.x439 - 0.002*m.x523 - m.x673 + m.x674 >= 0)
m.c848 = Constraint(expr= - 0.002*m.x440 - 0.002*m.x524 - m.x667 + m.x668 >= 0)
m.c849 = Constraint(expr= - 0.002*m.x441 - 0.002*m.x525 - m.x668 + m.x669 >= 0)
m.c850 = Constraint(expr= - 0.002*m.x442 - 0.002*m.x526 - m.x669 + m.x670 >= 0)
m.c851 = Constraint(expr= - 0.002*m.x443 - 0.002*m.x527 - m.x670 + m.x671 >= 0)
m.c852 = Constraint(expr= - 0.002*m.x444 - 0.002*m.x528 - m.x671 + m.x672 >= 0)
m.c853 = Constraint(expr= - 0.002*m.x445 - 0.002*m.x529 - m.x672 + m.x673 >= 0)
m.c854 = Constraint(expr= - 0.002*m.x446 - 0.002*m.x530 - m.x673 + m.x674 >= 0)
m.c855 = Constraint(expr= - 0.002*m.x384 - 0.002*m.x559 - m.x667 + m.x668 >= 0)
m.c856 = Constraint(expr= - 0.002*m.x385 - 0.002*m.x560 - m.x668 + m.x669 >= 0)
m.c857 = Constraint(expr= - 0.002*m.x386 - 0.002*m.x561 - m.x669 + m.x670 >= 0)
m.c858 = Constraint(expr= - 0.002*m.x387 - 0.002*m.x562 - m.x670 + m.x671 >= 0)
m.c859 = Constraint(expr= - 0.002*m.x388 - 0.002*m.x563 - m.x671 + m.x672 >= 0)
m.c860 = Constraint(expr= - 0.002*m.x389 - 0.002*m.x564 - m.x672 + m.x673 >= 0)
m.c861 = Constraint(expr= - 0.002*m.x390 - 0.002*m.x565 - m.x673 + m.x674 >= 0)
m.c862 = Constraint(expr= - 0.002*m.x391 - 0.002*m.x566 - m.x667 + m.x668 >= 0)
m.c863 = Constraint(expr= - 0.002*m.x392 - 0.002*m.x567 - m.x668 + m.x669 >= 0)
m.c864 = Constraint(expr= - 0.002*m.x393 - 0.002*m.x568 - m.x669 + m.x670 >= 0)
m.c865 = Constraint(expr= - 0.002*m.x394 - 0.002*m.x569 - m.x670 + m.x671 >= 0)
m.c866 = Constraint(expr= - 0.002*m.x395 - 0.002*m.x570 - m.x671 + m.x672 >= 0)
m.c867 = Constraint(expr= - 0.002*m.x396 - 0.002*m.x571 - m.x672 + m.x673 >= 0)
m.c868 = Constraint(expr= - 0.002*m.x397 - 0.002*m.x572 - m.x673 + m.x674 >= 0)
m.c869 = Constraint(expr= - 0.002*m.x363 - 0.002*m.x370 - 0.002*m.x398 - 0.002*m.x405 - 0.002*m.x454 - 0.002*m.x461
- 0.002*m.x496 - 0.002*m.x503 - 0.002*m.x538 - 0.002*m.x545 - 0.002*m.x573 - 0.002*m.x580
- m.x667 + m.x668 >= 0)
m.c870 = Constraint(expr= - 0.002*m.x364 - 0.002*m.x371 - 0.002*m.x399 - 0.002*m.x406 - 0.002*m.x455 - 0.002*m.x462
- 0.002*m.x497 - 0.002*m.x504 - 0.002*m.x539 - 0.002*m.x546 - 0.002*m.x574 - 0.002*m.x581
- m.x668 + m.x669 >= 0)
m.c871 = Constraint(expr= - 0.002*m.x365 - 0.002*m.x372 - 0.002*m.x400 - 0.002*m.x407 - 0.002*m.x456 - 0.002*m.x463
- 0.002*m.x498 - 0.002*m.x505 - 0.002*m.x540 - 0.002*m.x547 - 0.002*m.x575 - 0.002*m.x582
- m.x669 + m.x670 >= 0)
m.c872 = Constraint(expr= - 0.002*m.x366 - 0.002*m.x373 - 0.002*m.x401 - 0.002*m.x408 - 0.002*m.x457 - 0.002*m.x464
- 0.002*m.x499 - 0.002*m.x506 - 0.002*m.x541 - 0.002*m.x548 - 0.002*m.x576 - 0.002*m.x583
- m.x670 + m.x671 >= 0)
m.c873 = Constraint(expr= - 0.002*m.x367 - 0.002*m.x374 - 0.002*m.x402 - 0.002*m.x409 - 0.002*m.x458 - 0.002*m.x465
- 0.002*m.x500 - 0.002*m.x507 - 0.002*m.x542 - 0.002*m.x549 - 0.002*m.x577 - 0.002*m.x584
- m.x671 + m.x672 >= 0)
m.c874 = Constraint(expr= - 0.002*m.x368 - 0.002*m.x375 - 0.002*m.x403 - 0.002*m.x410 - 0.002*m.x459 - 0.002*m.x466
- 0.002*m.x501 - 0.002*m.x508 - 0.002*m.x543 - 0.002*m.x550 - 0.002*m.x578 - 0.002*m.x585
- m.x672 + m.x673 >= 0)
m.c875 = Constraint(expr= - 0.002*m.x369 - 0.002*m.x376 - 0.002*m.x404 - 0.002*m.x411 - 0.002*m.x460 - 0.002*m.x467
- 0.002*m.x502 - 0.002*m.x509 - 0.002*m.x544 - 0.002*m.x551 - 0.002*m.x579 - 0.002*m.x586
- m.x673 + m.x674 >= 0)
m.c876 = Constraint(expr= - 0.002*m.x356 - 0.002*m.x363 - 0.002*m.x398 - 0.002*m.x447 - 0.002*m.x454 - 0.002*m.x489
- 0.002*m.x496 - 0.002*m.x531 - 0.002*m.x538 - 0.002*m.x573 - 0.002*m.x594 - m.x667 + m.x668
>= 0)
m.c877 = Constraint(expr= - 0.002*m.x357 - 0.002*m.x364 - 0.002*m.x399 - 0.002*m.x448 - 0.002*m.x455 - 0.002*m.x490
- 0.002*m.x497 - 0.002*m.x532 - 0.002*m.x539 - 0.002*m.x574 - 0.002*m.x595 - m.x668 + m.x669
>= 0)
m.c878 = Constraint(expr= - 0.002*m.x358 - 0.002*m.x365 - 0.002*m.x400 - 0.002*m.x449 - 0.002*m.x456 - 0.002*m.x491
- 0.002*m.x498 - 0.002*m.x533 - 0.002*m.x540 - 0.002*m.x575 - 0.002*m.x596 - m.x669 + m.x670
>= 0)
m.c879 = Constraint(expr= - 0.002*m.x359 - 0.002*m.x366 - 0.002*m.x401 - 0.002*m.x450 - 0.002*m.x457 - 0.002*m.x492
- 0.002*m.x499 - 0.002*m.x534 - 0.002*m.x541 - 0.002*m.x576 - 0.002*m.x597 - m.x670 + m.x671
>= 0)
m.c880 = Constraint(expr= - 0.002*m.x360 - 0.002*m.x367 - 0.002*m.x402 - 0.002*m.x451 - 0.002*m.x458 - 0.002*m.x493
- 0.002*m.x500 - 0.002*m.x535 - 0.002*m.x542 - 0.002*m.x577 - 0.002*m.x598 - m.x671 + m.x672
>= 0)
m.c881 = Constraint(expr= - 0.002*m.x361 - 0.002*m.x368 - 0.002*m.x403 - 0.002*m.x452 - 0.002*m.x459 - 0.002*m.x494
- 0.002*m.x501 - 0.002*m.x536 - 0.002*m.x543 - 0.002*m.x578 - 0.002*m.x599 - m.x672 + m.x673
>= 0)
m.c882 = Constraint(expr= - 0.002*m.x362 - 0.002*m.x369 - 0.002*m.x404 - 0.002*m.x453 - 0.002*m.x460 - 0.002*m.x495
- 0.002*m.x502 - 0.002*m.x537 - 0.002*m.x544 - 0.002*m.x579 - 0.002*m.x600 - m.x673 + m.x674
>= 0)
m.c883 = Constraint(expr= - 0.002*m.x370 - 0.002*m.x405 - 0.002*m.x412 - 0.002*m.x461 - 0.002*m.x468 - 0.002*m.x503
- 0.002*m.x545 - 0.002*m.x552 - 0.002*m.x580 - 0.002*m.x587 - m.x667 + m.x668 >= 0)
m.c884 = Constraint(expr= - 0.002*m.x371 - 0.002*m.x406 - 0.002*m.x413 - 0.002*m.x462 - 0.002*m.x469 - 0.002*m.x504
- 0.002*m.x546 - 0.002*m.x553 - 0.002*m.x581 - 0.002*m.x588 - m.x668 + m.x669 >= 0)
m.c885 = Constraint(expr= - 0.002*m.x372 - 0.002*m.x407 - 0.002*m.x414 - 0.002*m.x463 - 0.002*m.x470 - 0.002*m.x505
- 0.002*m.x547 - 0.002*m.x554 - 0.002*m.x582 - 0.002*m.x589 - m.x669 + m.x670 >= 0)
m.c886 = Constraint(expr= - 0.002*m.x373 - 0.002*m.x408 - 0.002*m.x415 - 0.002*m.x464 - 0.002*m.x471 - 0.002*m.x506
- 0.002*m.x548 - 0.002*m.x555 - 0.002*m.x583 - 0.002*m.x590 - m.x670 + m.x671 >= 0)
m.c887 = Constraint(expr= - 0.002*m.x374 - 0.002*m.x409 - 0.002*m.x416 - 0.002*m.x465 - 0.002*m.x472 - 0.002*m.x507
- 0.002*m.x549 - 0.002*m.x556 - 0.002*m.x584 - 0.002*m.x591 - m.x671 + m.x672 >= 0)
m.c888 = Constraint(expr= - 0.002*m.x375 - 0.002*m.x410 - 0.002*m.x417 - 0.002*m.x466 - 0.002*m.x473 - 0.002*m.x508
- 0.002*m.x550 - 0.002*m.x557 - 0.002*m.x585 - 0.002*m.x592 - m.x672 + m.x673 >= 0)
m.c889 = Constraint(expr= - 0.002*m.x376 - 0.002*m.x411 - 0.002*m.x418 - 0.002*m.x467 - 0.002*m.x474 - 0.002*m.x509
- 0.002*m.x551 - 0.002*m.x558 - 0.002*m.x586 - 0.002*m.x593 - m.x673 + m.x674 >= 0)
m.c890 = Constraint(expr= - 0.02*m.x356 - 0.02*m.x363 - 0.02*m.x398 - 0.02*m.x447 - 0.02*m.x454 - 0.02*m.x489
- 0.02*m.x496 - 0.02*m.x531 - 0.02*m.x538 - 0.02*m.x573 - 0.02*m.x594 - m.x667 + m.x668 <= 0)
m.c891 = Constraint(expr= - 0.02*m.x357 - 0.02*m.x364 - 0.02*m.x399 - 0.02*m.x448 - 0.02*m.x455 - 0.02*m.x490
- 0.02*m.x497 - 0.02*m.x532 - 0.02*m.x539 - 0.02*m.x574 - 0.02*m.x595 - m.x668 + m.x669 <= 0)
m.c892 = Constraint(expr= - 0.02*m.x358 - 0.02*m.x365 - 0.02*m.x400 - 0.02*m.x449 - 0.02*m.x456 - 0.02*m.x491
- 0.02*m.x498 - 0.02*m.x533 - 0.02*m.x540 - 0.02*m.x575 - 0.02*m.x596 - m.x669 + m.x670 <= 0)
m.c893 = Constraint(expr= - 0.02*m.x359 - 0.02*m.x366 - 0.02*m.x401 - 0.02*m.x450 - 0.02*m.x457 - 0.02*m.x492
- 0.02*m.x499 - 0.02*m.x534 - 0.02*m.x541 - 0.02*m.x576 - 0.02*m.x597 - m.x670 + m.x671 <= 0)
m.c894 = Constraint(expr= - 0.02*m.x360 - 0.02*m.x367 - 0.02*m.x402 - 0.02*m.x451 - 0.02*m.x458 - 0.02*m.x493
- 0.02*m.x500 - 0.02*m.x535 - 0.02*m.x542 - 0.02*m.x577 - 0.02*m.x598 - m.x671 + m.x672 <= 0)
m.c895 = Constraint(expr= - 0.02*m.x361 - 0.02*m.x368 - 0.02*m.x403 - 0.02*m.x452 - 0.02*m.x459 - 0.02*m.x494
- 0.02*m.x501 - 0.02*m.x536 - 0.02*m.x543 - 0.02*m.x578 - 0.02*m.x599 - m.x672 + m.x673 <= 0)
m.c896 = Constraint(expr= - 0.02*m.x362 - 0.02*m.x369 - 0.02*m.x404 - 0.02*m.x453 - 0.02*m.x460 - 0.02*m.x495
- 0.02*m.x502 - 0.02*m.x537 - 0.02*m.x544 - 0.02*m.x579 - 0.02*m.x600 - m.x673 + m.x674 <= 0)
m.c897 = Constraint(expr= - 0.02*m.x370 - 0.02*m.x405 - 0.02*m.x412 - 0.02*m.x461 - 0.02*m.x468 - 0.02*m.x503
- 0.02*m.x545 - 0.02*m.x552 - 0.02*m.x580 - 0.02*m.x587 - m.x667 + m.x668 <= 0)
m.c898 = Constraint(expr= - 0.02*m.x371 - 0.02*m.x406 - 0.02*m.x413 - 0.02*m.x462 - 0.02*m.x469 - 0.02*m.x504
- 0.02*m.x546 - 0.02*m.x553 - 0.02*m.x581 - 0.02*m.x588 - m.x668 + m.x669 <= 0)
m.c899 = Constraint(expr= - 0.02*m.x372 - 0.02*m.x407 - 0.02*m.x414 - 0.02*m.x463 - 0.02*m.x470 - 0.02*m.x505
- 0.02*m.x547 - 0.02*m.x554 - 0.02*m.x582 - 0.02*m.x589 - m.x669 + m.x670 <= 0)
m.c900 = Constraint(expr= - 0.02*m.x373 - 0.02*m.x408 - 0.02*m.x415 - 0.02*m.x464 - 0.02*m.x471 - 0.02*m.x506
- 0.02*m.x548 - 0.02*m.x555 - 0.02*m.x583 - 0.02*m.x590 - m.x670 + m.x671 <= 0)
m.c901 = Constraint(expr= - 0.02*m.x374 - 0.02*m.x409 - 0.02*m.x416 - 0.02*m.x465 - 0.02*m.x472 - 0.02*m.x507
- 0.02*m.x549 - 0.02*m.x556 - 0.02*m.x584 - 0.02*m.x591 - m.x671 + m.x672 <= 0)
m.c902 = Constraint(expr= - 0.02*m.x375 - 0.02*m.x410 - 0.02*m.x417 - 0.02*m.x466 - 0.02*m.x473 - 0.02*m.x508
- 0.02*m.x550 - 0.02*m.x557 - 0.02*m.x585 - 0.02*m.x592 - m.x672 + m.x673 <= 0)
m.c903 = Constraint(expr= - 0.02*m.x376 - 0.02*m.x411 - 0.02*m.x418 - 0.02*m.x467 - 0.02*m.x474 - 0.02*m.x509
- 0.02*m.x551 - 0.02*m.x558 - 0.02*m.x586 - 0.02*m.x593 - m.x673 + m.x674 <= 0)
m.c904 = Constraint(expr= m.x667 >= 0)
m.c905 = Constraint(expr= - 4*m.b101 + m.x668 >= 0)
m.c906 = Constraint(expr= - 4*m.b103 - 8*m.b104 + m.x669 >= 0)
m.c907 = Constraint(expr= - 4*m.b106 - 8*m.b107 + m.x670 >= 0)
m.c908 = Constraint(expr= - 4*m.b109 - 8*m.b110 + m.x671 >= 0)
m.c909 = Constraint(expr= - 4*m.b111 - 8*m.b112 + m.x672 >= 0)
m.c910 = Constraint(expr= - 8*m.b113 + m.x673 >= 0)
m.c911 = Constraint(expr= - m.b114 + m.x668 >= 0)
m.c912 = Constraint(expr= - m.b115 - 5*m.b116 + m.x669 >= 0)
m.c913 = Constraint(expr= - m.b117 - 5*m.b118 - 9*m.b119 + m.x670 >= 0)
m.c914 = Constraint(expr= - m.b120 - 5*m.b121 - 9*m.b122 + m.x671 >= 0)
m.c915 = Constraint(expr= - m.b123 - 5*m.b124 - 9*m.b125 + m.x672 >= 0)
m.c916 = Constraint(expr= - 5*m.b126 - 9*m.b127 + m.x673 >= 0)
m.c917 = Constraint(expr= - 9*m.b128 + m.x674 >= 0)
m.c918 = Constraint(expr= m.b99 + 2*m.b100 + 3*m.b102 + 4*m.b105 + 5*m.b108 - 2*m.b114 - 3*m.b115 - 4*m.b117
- 5*m.b120 - 6*m.b123 <= -1)
m.c919 = Constraint(expr= 2*m.b101 + 3*m.b103 + 4*m.b106 + 5*m.b109 + 6*m.b111 - 3*m.b116 - 4*m.b118 - 5*m.b121
- 6*m.b124 - 7*m.b126 <= -1)
m.c920 = Constraint(expr= 3*m.b104 + 4*m.b107 + 5*m.b110 + 6*m.b112 + 7*m.b113 - 4*m.b119 - 5*m.b122 - 6*m.b125
- 7*m.b127 - 8*m.b128 <= -1)
m.c921 = Constraint(expr= - 2*m.b101 - 3*m.b103 - 4*m.b106 - 5*m.b109 - 6*m.b111 + 2*m.b114 + 3*m.b115 + 4*m.b117
+ 5*m.b120 + 6*m.b123 <= 0)
m.c922 = Constraint(expr= - 3*m.b104 - 4*m.b107 - 5*m.b110 - 6*m.b112 - 7*m.b113 + 3*m.b116 + 4*m.b118 + 5*m.b121
+ 6*m.b124 + 7*m.b126 <= 0)
m.c923 = Constraint(expr= - 12*m.b99 - m.x667 + m.x682 >= -12)
m.c924 = Constraint(expr= - 12*m.b100 - m.x668 + m.x682 >= -12)
m.c925 = Constraint(expr= - 12*m.b102 - m.x669 + m.x682 >= -12)
m.c926 = Constraint(expr= - 12*m.b105 - m.x670 + m.x682 >= -12)
m.c927 = Constraint(expr= - 12*m.b108 - m.x671 + m.x682 >= -12)
m.c928 = Constraint(expr= - 8*m.b101 - m.x668 + m.x683 >= -12)
m.c929 = Constraint(expr= - 8*m.b103 - m.x669 + m.x683 >= -12)
m.c930 = Constraint(expr= - 8*m.b106 - m.x670 + m.x683 >= -12)
m.c931 = Constraint(expr= - 8*m.b109 - m.x671 + m.x683 >= -12)
m.c932 = Constraint(expr= - 8*m.b111 - m.x672 + m.x683 >= -12)
m.c933 = Constraint(expr= - 4*m.b104 - m.x669 + m.x684 >= -12)
m.c934 = Constraint(expr= - 4*m.b107 - m.x670 + m.x684 >= -12)
m.c935 = Constraint(expr= - 4*m.b110 - m.x671 + m.x684 >= -12)
m.c936 = Constraint(expr= - 4*m.b112 - m.x672 + m.x684 >= -12)
m.c937 = Constraint(expr= - 4*m.b113 - m.x673 + m.x684 >= -12)
m.c938 = Constraint(expr= - 11*m.b99 - 11*m.b114 + m.x667 - m.x668 + m.x685 >= -22)
m.c939 = Constraint(expr= - 11*m.b99 - 11*m.b115 + m.x667 - m.x669 + m.x685 >= -22)
m.c940 = Constraint(expr= - 11*m.b99 - 11*m.b117 + m.x667 - m.x670 + m.x685 >= -22)
m.c941 = Constraint(expr= - 11*m.b99 - 11*m.b120 + m.x667 - m.x671 + m.x685 >= -22)
m.c942 = Constraint(expr= - 11*m.b99 - 11*m.b123 + m.x667 - m.x672 + m.x685 >= -22)
m.c943 = Constraint(expr= - 11*m.b100 - 11*m.b115 + m.x668 - m.x669 + m.x685 >= -22)
m.c944 = Constraint(expr= - 11*m.b100 - 11*m.b117 + m.x668 - m.x670 + m.x685 >= -22)
m.c945 = Constraint(expr= - 11*m.b100 - 11*m.b120 + m.x668 - m.x671 + m.x685 >= -22)
m.c946 = Constraint(expr= - 11*m.b100 - 11*m.b123 + m.x668 - m.x672 + m.x685 >= -22)
m.c947 = Constraint(expr= - 11*m.b102 - 11*m.b117 + m.x669 - m.x670 + m.x685 >= -22)
m.c948 = Constraint(expr= - 11*m.b102 - 11*m.b120 + m.x669 - m.x671 + m.x685 >= -22)
m.c949 = Constraint(expr= - 11*m.b102 - 11*m.b123 + m.x669 - m.x672 + m.x685 >= -22)
m.c950 = Constraint(expr= - 11*m.b105 - 11*m.b120 + m.x670 - m.x671 + m.x685 >= -22)
m.c951 = Constraint(expr= - 11*m.b105 - 11*m.b123 + m.x670 - m.x672 + m.x685 >= -22)
m.c952 = Constraint(expr= - 11*m.b108 - 11*m.b123 + m.x671 - m.x672 + m.x685 >= -22)
m.c953 = Constraint(expr= - 11*m.b101 - 11*m.b116 + m.x668 - m.x669 + m.x686 >= -22)
m.c954 = Constraint(expr= - 11*m.b101 - 11*m.b118 + m.x668 - m.x670 + m.x686 >= -22)
m.c955 = Constraint(expr= - 11*m.b101 - 11*m.b121 + m.x668 - m.x671 + m.x686 >= -22)
m.c956 = Constraint(expr= - 11*m.b101 - 11*m.b124 + m.x668 - m.x672 + m.x686 >= -22)
m.c957 = Constraint(expr= - 11*m.b101 - 11*m.b126 + m.x668 - m.x673 + m.x686 >= -22)
m.c958 = Constraint(expr= - 11*m.b103 - 11*m.b118 + m.x669 - m.x670 + m.x686 >= -22)
m.c959 = Constraint(expr= - 11*m.b103 - 11*m.b121 + m.x669 - m.x671 + m.x686 >= -22)
m.c960 = Constraint(expr= - 11*m.b103 - 11*m.b124 + m.x669 - m.x672 + m.x686 >= -22)
m.c961 = Constraint(expr= - 11*m.b103 - 11*m.b126 + m.x669 - m.x673 + m.x686 >= -22)
m.c962 = Constraint(expr= - 11*m.b106 - 11*m.b121 + m.x670 - m.x671 + m.x686 >= -22)
m.c963 = Constraint(expr= - 11*m.b106 - 11*m.b124 + m.x670 - m.x672 + m.x686 >= -22)
m.c964 = Constraint(expr= - 11*m.b106 - 11*m.b126 + m.x670 - m.x673 + m.x686 >= -22)
m.c965 = Constraint(expr= - 11*m.b109 - 11*m.b124 + m.x671 - m.x672 + m.x686 >= -22)
m.c966 = Constraint(expr= - 11*m.b109 - 11*m.b126 + m.x671 - m.x673 + m.x686 >= -22)
m.c967 = Constraint(expr= - 11*m.b111 - 11*m.b126 + m.x672 - m.x673 + m.x686 >= -22)
m.c968 = Constraint(expr= - 11*m.b104 - 11*m.b119 + m.x669 - m.x670 + m.x687 >= -22)
m.c969 = Constraint(expr= - 11*m.b104 - 11*m.b122 + m.x669 - m.x671 + m.x687 >= -22)
m.c970 = Constraint(expr= - 11*m.b104 - 11*m.b125 + m.x669 - m.x672 + m.x687 >= -22)
m.c971 = Constraint(expr= - 11*m.b104 - 11*m.b127 + m.x669 - m.x673 + m.x687 >= -22)
m.c972 = Constraint(expr= - 11*m.b104 - 11*m.b128 + m.x669 - m.x674 + m.x687 >= -22)
m.c973 = Constraint(expr= - 11*m.b107 - 11*m.b122 + m.x670 - m.x671 + m.x687 >= -22)
m.c974 = Constraint(expr= - 11*m.b107 - 11*m.b125 + m.x670 - m.x672 + m.x687 >= -22)
m.c975 = Constraint(expr= - 11*m.b107 - 11*m.b127 + m.x670 - m.x673 + m.x687 >= -22)
m.c976 = Constraint(expr= - 11*m.b107 - 11*m.b128 + m.x670 - m.x674 + m.x687 >= -22)
m.c977 = Constraint(expr= - 11*m.b110 - 11*m.b125 + m.x671 - m.x672 + m.x687 >= -22)
m.c978 = Constraint(expr= - 11*m.b110 - 11*m.b127 + m.x671 - m.x673 + m.x687 >= -22)
m.c979 = Constraint(expr= - 11*m.b110 - 11*m.b128 + m.x671 - m.x674 + m.x687 >= -22)
m.c980 = Constraint(expr= - 11*m.b112 - 11*m.b127 + m.x672 - m.x673 + m.x687 >= -22)
m.c981 = Constraint(expr= - 11*m.b112 - 11*m.b128 + m.x672 - m.x674 + m.x687 >= -22)
m.c982 = Constraint(expr= - 11*m.b113 - 11*m.b128 + m.x673 - m.x674 + m.x687 >= -22)
m.c983 = Constraint(expr= m.x667 - m.x668 + m.x675 == 0)
m.c984 = Constraint(expr= m.x668 - m.x669 + m.x676 == 0)
m.c985 = Constraint(expr= m.x669 - m.x670 + m.x677 == 0)
m.c986 = Constraint(expr= m.x670 - m.x671 + m.x678 == 0)
m.c987 = Constraint(expr= m.x671 - m.x672 + m.x679 == 0)
m.c988 = Constraint(expr= m.x672 - m.x673 + m.x680 == 0)
m.c989 = Constraint(expr= m.x673 - m.x674 + m.x681 == 0)
m.c990 = Constraint(expr=-m.x601*m.x152 + m.x343 == 0)
m.c991 = Constraint(expr=-m.x602*m.x153 + m.x344 == 0)
m.c992 = Constraint(expr=-m.x603*m.x154 + m.x345 == 0)
m.c993 = Constraint(expr=-m.x604*m.x155 + m.x346 == 0)
m.c994 = Constraint(expr=-m.x605*m.x156 + m.x347 == 0)
m.c995 = Constraint(expr=-m.x606*m.x157 + m.x348 == 0)
m.c996 = Constraint(expr=-m.x607*m.x152 + m.x350 == 0)
m.c997 = Constraint(expr=-m.x608*m.x153 + m.x351 == 0)
m.c998 = Constraint(expr=-m.x609*m.x154 + m.x352 == 0)
m.c999 = Constraint(expr=-m.x610*m.x155 + m.x353 == 0)
m.c1000 = Constraint(expr=-m.x611*m.x156 + m.x354 == 0)
m.c1001 = Constraint(expr=-m.x612*m.x157 + m.x355 == 0)
m.c1002 = Constraint(expr=-m.x643*m.x160 + m.x357 == 0)
m.c1003 = Constraint(expr=-m.x644*m.x161 + m.x358 == 0)
m.c1004 = Constraint(expr=-m.x645*m.x162 + m.x359 == 0)
m.c1005 = Constraint(expr=-m.x646*m.x163 + m.x360 == 0)
m.c1006 = Constraint(expr=-m.x647*m.x164 + m.x361 == 0)
m.c1007 = Constraint(expr=-m.x648*m.x165 + m.x362 == 0)
m.c1008 = Constraint(expr=-m.x649*m.x168 + m.x364 == 0)
m.c1009 = Constraint(expr=-m.x650*m.x169 + m.x365 == 0)
m.c1010 = Constraint(expr=-m.x651*m.x170 + m.x366 == 0)
m.c1011 = Constraint(expr=-m.x652*m.x171 + m.x367 == 0)
m.c1012 = Constraint(expr=-m.x653*m.x172 + m.x368 == 0)
m.c1013 = Constraint(expr=-m.x654*m.x173 + m.x369 == 0)
m.c1014 = Constraint(expr=-m.x655*m.x168 + m.x371 == 0)
m.c1015 = Constraint(expr=-m.x656*m.x169 + m.x372 == 0)
m.c1016 = Constraint(expr=-m.x657*m.x170 + m.x373 == 0)
m.c1017 = Constraint(expr=-m.x658*m.x171 + m.x374 == 0)
m.c1018 = Constraint(expr=-m.x659*m.x172 + m.x375 == 0)
m.c1019 = Constraint(expr=-m.x660*m.x173 + m.x376 == 0)
m.c1020 = Constraint(expr=-m.x631*m.x184 + m.x385 == 0)
m.c1021 = Constraint(expr=-m.x632*m.x185 + m.x386 == 0)
m.c1022 = Constraint(expr=-m.x633*m.x186 + m.x387 == 0)
m.c1023 = Constraint(expr=-m.x634*m.x187 + m.x388 == 0)
m.c1024 = Constraint(expr=-m.x635*m.x188 + m.x389 == 0)
m.c1025 = Constraint(expr=-m.x636*m.x189 + m.x390 == 0)
m.c1026 = Constraint(expr=-m.x637*m.x184 + m.x392 == 0)
m.c1027 = Constraint(expr=-m.x638*m.x185 + m.x393 == 0)
m.c1028 = Constraint(expr=-m.x639*m.x186 + m.x394 == 0)
m.c1029 = Constraint(expr=-m.x640*m.x187 + m.x395 == 0)
m.c1030 = Constraint(expr=-m.x641*m.x188 + m.x396 == 0)
m.c1031 = Constraint(expr=-m.x642*m.x189 + m.x397 == 0)
m.c1032 = Constraint(expr=-m.x649*m.x192 + m.x399 == 0)
m.c1033 = Constraint(expr=-m.x650*m.x193 + m.x400 == 0)
m.c1034 = Constraint(expr=-m.x651*m.x194 + m.x401 == 0)
m.c1035 = Constraint(expr=-m.x652*m.x195 + m.x402 == 0)
m.c1036 = Constraint(expr=-m.x653*m.x196 + m.x403 == 0)
m.c1037 = Constraint(expr=-m.x654*m.x197 + m.x404 == 0)
m.c1038 = Constraint(expr=-m.x655*m.x192 + m.x406 == 0)
m.c1039 = Constraint(expr=-m.x656*m.x193 + m.x407 == 0)
m.c1040 = Constraint(expr=-m.x657*m.x194 + m.x408 == 0)
m.c1041 = Constraint(expr=-m.x658*m.x195 + m.x409 == 0)
m.c1042 = Constraint(expr=-m.x659*m.x196 + m.x410 == 0)
m.c1043 = Constraint(expr=-m.x660*m.x197 + m.x411 == 0)
m.c1044 = Constraint(expr=-m.x661*m.x200 + m.x413 == 0)
m.c1045 = Constraint(expr=-m.x662*m.x201 + m.x414 == 0)
m.c1046 = Constraint(expr=-m.x663*m.x202 + m.x415 == 0)
m.c1047 = Constraint(expr=-m.x664*m.x203 + m.x416 == 0)
m.c1048 = Constraint(expr=-m.x665*m.x204 + m.x417 == 0)
m.c1049 = Constraint(expr=-m.x666*m.x205 + m.x418 == 0)
m.c1050 = Constraint(expr=-m.x613*m.x216 + m.x427 == 0)
m.c1051 = Constraint(expr=-m.x614*m.x217 + m.x428 == 0)
m.c1052 = Constraint(expr=-m.x615*m.x218 + m.x429 == 0)
m.c1053 = Constraint(expr=-m.x616*m.x219 + m.x430 == 0)
m.c1054 = Constraint(expr=-m.x617*m.x220 + m.x431 == 0)
m.c1055 = Constraint(expr=-m.x618*m.x221 + m.x432 == 0)
m.c1056 = Constraint(expr=-m.x619*m.x216 + m.x434 == 0)
m.c1057 = Constraint(expr=-m.x620*m.x217 + m.x435 == 0)
m.c1058 = Constraint(expr=-m.x621*m.x218 + m.x436 == 0)
m.c1059 = Constraint(expr=-m.x622*m.x219 + m.x437 == 0)
m.c1060 = Constraint(expr=-m.x623*m.x220 + m.x438 == 0)
m.c1061 = Constraint(expr=-m.x624*m.x221 + m.x439 == 0)
m.c1062 = Constraint(expr=-m.x625*m.x216 + m.x441 == 0)
m.c1063 = Constraint(expr=-m.x626*m.x217 + m.x442 == 0)
m.c1064 = Constraint(expr=-m.x627*m.x218 + m.x443 == 0)
m.c1065 = Constraint(expr=-m.x628*m.x219 + m.x444 == 0)
m.c1066 = Constraint(expr=-m.x629*m.x220 + m.x445 == 0)
m.c1067 = Constraint(expr=-m.x630*m.x221 + m.x446 == 0)
m.c1068 = Constraint(expr=-m.x643*m.x224 + m.x448 == 0)
m.c1069 = Constraint(expr=-m.x644*m.x225 + m.x449 == 0)
m.c1070 = Constraint(expr=-m.x645*m.x226 + m.x450 == 0)
m.c1071 = Constraint(expr=-m.x646*m.x227 + m.x451 == 0)
m.c1072 = Constraint(expr=-m.x647*m.x228 + m.x452 == 0)
m.c1073 = Constraint(expr=-m.x648*m.x229 + m.x453 == 0)
m.c1074 = Constraint(expr=-m.x649*m.x232 + m.x455 == 0)
m.c1075 = Constraint(expr=-m.x650*m.x233 + m.x456 == 0)
m.c1076 = Constraint(expr=-m.x651*m.x234 + m.x457 == 0)
m.c1077 = Constraint(expr=-m.x652*m.x235 + m.x458 == 0)
m.c1078 = Constraint(expr=-m.x653*m.x236 + m.x459 == 0)
m.c1079 = Constraint(expr=-m.x654*m.x237 + m.x460 == 0)
m.c1080 = Constraint(expr=-m.x655*m.x232 + m.x462 == 0)
m.c1081 = Constraint(expr=-m.x656*m.x233 + m.x463 == 0)
m.c1082 = Constraint(expr=-m.x657*m.x234 + m.x464 == 0)
m.c1083 = Constraint(expr=-m.x658*m.x235 + m.x465 == 0)
m.c1084 = Constraint(expr=-m.x659*m.x236 + m.x466 == 0)
m.c1085 = Constraint(expr=-m.x660*m.x237 + m.x467 == 0)
m.c1086 = Constraint(expr=-m.x661*m.x240 + m.x469 == 0)
m.c1087 = Constraint(expr=-m.x662*m.x241 + m.x470 == 0)
m.c1088 = Constraint(expr=-m.x663*m.x242 + m.x471 == 0)
m.c1089 = Constraint(expr=-m.x664*m.x243 + m.x472 == 0)
m.c1090 = Constraint(expr=-m.x665*m.x244 + m.x473 == 0)
m.c1091 = Constraint(expr=-m.x666*m.x245 + m.x474 == 0)
m.c1092 = Constraint(expr=-m.x601*m.x248 + m.x476 == 0)
m.c1093 = Constraint(expr=-m.x602*m.x249 + m.x477 == 0)
m.c1094 = Constraint(expr=-m.x603*m.x250 + m.x478 == 0)
m.c1095 = Constraint(expr=-m.x604*m.x251 + m.x479 == 0)
m.c1096 = Constraint(expr=-m.x605*m.x252 + m.x480 == 0)
m.c1097 = Constraint(expr=-m.x606*m.x253 + m.x481 == 0)
m.c1098 = Constraint(expr=-m.x607*m.x248 + m.x483 == 0)
m.c1099 = Constraint(expr=-m.x608*m.x249 + m.x484 == 0)
m.c1100 = Constraint(expr=-m.x609*m.x250 + m.x485 == 0)
m.c1101 = Constraint(expr=-m.x610*m.x251 + m.x486 == 0)
m.c1102 = Constraint(expr=-m.x611*m.x252 + m.x487 == 0)
m.c1103 = Constraint(expr=-m.x612*m.x253 + m.x488 == 0)
m.c1104 = Constraint(expr=-m.x643*m.x256 + m.x490 == 0)
m.c1105 = Constraint(expr=-m.x644*m.x257 + m.x491 == 0)
m.c1106 = Constraint(expr=-m.x645*m.x258 + m.x492 == 0)
m.c1107 = Constraint(expr=-m.x646*m.x259 + m.x493 == 0)
m.c1108 = Constraint(expr=-m.x647*m.x260 + m.x494 == 0)
m.c1109 = Constraint(expr=-m.x648*m.x261 + m.x495 == 0)
m.c1110 = Constraint(expr=-m.x649*m.x264 + m.x497 == 0)
m.c1111 = Constraint(expr=-m.x650*m.x265 + m.x498 == 0)
m.c1112 = Constraint(expr=-m.x651*m.x266 + m.x499 == 0)
m.c1113 = Constraint(expr=-m.x652*m.x267 + m.x500 == 0)
m.c1114 = Constraint(expr=-m.x653*m.x268 + m.x501 == 0)
m.c1115 = Constraint(expr=-m.x654*m.x269 + m.x502 == 0)
m.c1116 = Constraint(expr=-m.x655*m.x264 + m.x504 == 0)
m.c1117 = Constraint(expr=-m.x656*m.x265 + m.x505 == 0)
m.c1118 = Constraint(expr=-m.x657*m.x266 + m.x506 == 0)
m.c1119 = Constraint(expr=-m.x658*m.x267 + m.x507 == 0)
m.c1120 = Constraint(expr=-m.x659*m.x268 + m.x508 == 0)
m.c1121 = Constraint(expr=-m.x660*m.x269 + m.x509 == 0)
m.c1122 = Constraint(expr=-m.x613*m.x272 + m.x511 == 0)
m.c1123 = Constraint(expr=-m.x614*m.x273 + m.x512 == 0)
m.c1124 = Constraint(expr=-m.x615*m.x274 + m.x513 == 0)
m.c1125 = Constraint(expr=-m.x616*m.x275 + m.x514 == 0)
m.c1126 = Constraint(expr=-m.x617*m.x276 + m.x515 == 0)
m.c1127 = Constraint(expr=-m.x618*m.x277 + m.x516 == 0)
m.c1128 = Constraint(expr=-m.x619*m.x272 + m.x518 == 0)
m.c1129 = Constraint(expr=-m.x620*m.x273 + m.x519 == 0)
m.c1130 = Constraint(expr=-m.x621*m.x274 + m.x520 == 0)
m.c1131 = Constraint(expr=-m.x622*m.x275 + m.x521 == 0)
m.c1132 = Constraint(expr=-m.x623*m.x276 + m.x522 == 0)
m.c1133 = Constraint(expr=-m.x624*m.x277 + m.x523 == 0)
m.c1134 = Constraint(expr=-m.x625*m.x272 + m.x525 == 0)
m.c1135 = Constraint(expr=-m.x626*m.x273 + m.x526 == 0)
m.c1136 = Constraint(expr=-m.x627*m.x274 + m.x527 == 0)
m.c1137 = Constraint(expr=-m.x628*m.x275 + m.x528 == 0)
m.c1138 = Constraint(expr=-m.x629*m.x276 + m.x529 == 0)
m.c1139 = Constraint(expr=-m.x630*m.x277 + m.x530 == 0)
m.c1140 = Constraint(expr=-m.x643*m.x280 + m.x532 == 0)
m.c1141 = Constraint(expr=-m.x644*m.x281 + m.x533 == 0)
m.c1142 = Constraint(expr=-m.x645*m.x282 + m.x534 == 0)
m.c1143 = Constraint(expr=-m.x646*m.x283 + m.x535 == 0)
m.c1144 = Constraint(expr=-m.x647*m.x284 + m.x536 == 0)
m.c1145 = Constraint(expr=-m.x648*m.x285 + m.x537 == 0)
m.c1146 = Constraint(expr=-m.x649*m.x288 + m.x539 == 0)
m.c1147 = Constraint(expr=-m.x650*m.x289 + m.x540 == 0)
m.c1148 = Constraint(expr=-m.x651*m.x290 + m.x541 == 0)
m.c1149 = Constraint(expr=-m.x652*m.x291 + m.x542 == 0)
m.c1150 = Constraint(expr=-m.x653*m.x292 + m.x543 == 0)
m.c1151 = Constraint(expr=-m.x654*m.x293 + m.x544 == 0)
m.c1152 = Constraint(expr=-m.x655*m.x288 + m.x546 == 0)
m.c1153 = Constraint(expr=-m.x656*m.x289 + m.x547 == 0)
m.c1154 = Constraint(expr=-m.x657*m.x290 + m.x548 == 0)
m.c1155 = Constraint(expr=-m.x658*m.x291 + m.x549 == 0)
m.c1156 = Constraint(expr=-m.x659*m.x292 + m.x550 == 0)
m.c1157 = Constraint(expr=-m.x660*m.x293 + m.x551 == 0)
m.c1158 = Constraint(expr=-m.x661*m.x296 + m.x553 == 0)
m.c1159 = Constraint(expr=-m.x662*m.x297 + m.x554 == 0)
m.c1160 = Constraint(expr=-m.x663*m.x298 + m.x555 == 0)
m.c1161 = Constraint(expr=-m.x664*m.x299 + m.x556 == 0)
m.c1162 = Constraint(expr=-m.x665*m.x300 + m.x557 == 0)
m.c1163 = Constraint(expr=-m.x666*m.x301 + m.x558 == 0)
m.c1164 = Constraint(expr=-m.x631*m.x304 + m.x560 == 0)
m.c1165 = Constraint(expr=-m.x632*m.x305 + m.x561 == 0)
m.c1166 = Constraint(expr=-m.x633*m.x306 + m.x562 == 0)
m.c1167 = Constraint(expr=-m.x634*m.x307 + m.x563 == 0)
m.c1168 = Constraint(expr=-m.x635*m.x308 + m.x564 == 0)
m.c1169 = Constraint(expr=-m.x636*m.x309 + m.x565 == 0)
m.c1170 = Constraint(expr=-m.x637*m.x304 + m.x567 == 0)
m.c1171 = Constraint(expr=-m.x638*m.x305 + m.x568 == 0)
m.c1172 = Constraint(expr=-m.x639*m.x306 + m.x569 == 0)
m.c1173 = Constraint(expr=-m.x640*m.x307 + m.x570 == 0)
m.c1174 = Constraint(expr=-m.x641*m.x308 + m.x571 == 0)
m.c1175 = Constraint(expr=-m.x642*m.x309 + m.x572 == 0)
m.c1176 = Constraint(expr=-m.x649*m.x312 + m.x574 == 0)
m.c1177 = Constraint(expr=-m.x650*m.x313 + m.x575 == 0)
m.c1178 = Constraint(expr=-m.x651*m.x314 + m.x576 == 0)
m.c1179 = Constraint(expr=-m.x652*m.x315 + m.x577 == 0)
m.c1180 = Constraint(expr=-m.x653*m.x316 + m.x578 == 0)
m.c1181 = Constraint(expr=-m.x654*m.x317 + m.x579 == 0)
m.c1182 = Constraint(expr=-m.x655*m.x312 + m.x581 == 0)
m.c1183 = Constraint(expr=-m.x656*m.x313 + m.x582 == 0)
m.c1184 = Constraint(expr=-m.x657*m.x314 + m.x583 == 0)
m.c1185 = Constraint(expr=-m.x658*m.x315 + m.x584 == 0)
m.c1186 = Constraint(expr=-m.x659*m.x316 + m.x585 == 0)
m.c1187 = Constraint(expr=-m.x660*m.x317 + m.x586 == 0)
m.c1188 = Constraint(expr=-m.x661*m.x320 + m.x588 == 0)
m.c1189 = Constraint(expr=-m.x662*m.x321 + m.x589 == 0)
m.c1190 = Constraint(expr=-m.x663*m.x322 + m.x590 == 0)
m.c1191 = Constraint(expr=-m.x664*m.x323 + m.x591 == 0)
m.c1192 = Constraint(expr=-m.x665*m.x324 + m.x592 == 0)
m.c1193 = Constraint(expr=-m.x666*m.x325 + m.x593 == 0)
m.c1194 = Constraint(expr=-m.x643*m.x328 + m.x595 == 0)
m.c1195 = Constraint(expr=-m.x644*m.x329 + m.x596 == 0)
m.c1196 = Constraint(expr=-m.x645*m.x330 + m.x597 == 0)
m.c1197 = Constraint(expr=-m.x646*m.x331 + m.x598 == 0)
m.c1198 = Constraint(expr=-m.x647*m.x332 + m.x599 == 0)
m.c1199 = Constraint(expr=-m.x648*m.x333 + m.x600 == 0)
| 40.769775 | 120 | 0.617729 |
ace54c72ea5a57fc1e0892005f3430b2a189c87d | 2,103 | py | Python | src/webapi/migrations/0004_definition_library_project.py | kumagallium/labmine-api | 074e3b9a8665ce9e176da46fdd9ad91dc0734682 | [
"MIT"
] | null | null | null | src/webapi/migrations/0004_definition_library_project.py | kumagallium/labmine-api | 074e3b9a8665ce9e176da46fdd9ad91dc0734682 | [
"MIT"
] | null | null | null | src/webapi/migrations/0004_definition_library_project.py | kumagallium/labmine-api | 074e3b9a8665ce9e176da46fdd9ad91dc0734682 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.1 on 2020-02-06 05:28
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('webapi', '0003_auto_20200205_1311'),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('project_name', models.TextField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('editor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Library',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('experiment_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='webapi.Experiment')),
('project_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='webapi.Project')),
],
),
migrations.CreateModel(
name='Definition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('datum_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='webapi.Datum')),
('sample_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='webapi.Sample')),
],
),
]
| 44.744681 | 122 | 0.61436 |
ace54e24d6aea7eca42489125d126c040059c691 | 6,121 | py | Python | tools/telemetry/telemetry/page/record_wpr.py | Acidburn0zzz/chromium-1 | 4c08f442d2588a2c7cfaa117a55bd87d2ac32f9a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tools/telemetry/telemetry/page/record_wpr.py | Acidburn0zzz/chromium-1 | 4c08f442d2588a2c7cfaa117a55bd87d2ac32f9a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tools/telemetry/telemetry/page/record_wpr.py | Acidburn0zzz/chromium-1 | 4c08f442d2588a2c7cfaa117a55bd87d2ac32f9a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
import sys
import tempfile
import time
import urlparse
from telemetry import test
from telemetry.core import browser_options
from telemetry.core import discover
from telemetry.core import util
from telemetry.core import wpr_modes
from telemetry.page import page_measurement
from telemetry.page import page_measurement_results
from telemetry.page import page_runner
from telemetry.page import page_set
from telemetry.page import page_test
from telemetry.page import profile_creator
from telemetry.page import test_expectations
from telemetry.page.actions import action_runner as action_runner_module
from telemetry.page.actions import interact
class RecordPage(page_test.PageTest):
def __init__(self, measurements):
# This class overwrites PageTest.Run, so that the test method name is not
# really used (except for throwing an exception if it doesn't exist).
super(RecordPage, self).__init__('Run')
self._action_names = set(
[measurement().action_name_to_run
for measurement in measurements.values()
if measurement().action_name_to_run])
self.test = None
def CanRunForPage(self, page):
return page.url.startswith('http')
def WillNavigateToPage(self, page, tab):
"""Override to ensure all resources are fetched from network."""
tab.ClearCache(force=False)
if self.test:
self.test.WillNavigateToPage(page, tab)
def DidNavigateToPage(self, page, tab):
"""Forward the call to the test."""
if self.test:
self.test.DidNavigateToPage(page, tab)
def RunPage(self, page, tab, results):
# When recording, sleep to catch any resources that load post-onload.
tab.WaitForDocumentReadyStateToBeComplete()
if self.test:
dummy_results = page_measurement_results.PageMeasurementResults()
dummy_results.WillMeasurePage(page)
self.test.MeasurePage(page, tab, dummy_results)
dummy_results.DidMeasurePage()
else:
# TODO(tonyg): This should probably monitor resource timing for activity
# and sleep until 2s since the last network event with some timeout like
# 20s. We could wrap this up as WaitForNetworkIdle() and share with the
# speed index metric.
time.sleep(3)
# Run the actions for all measurements. Reload the page between
# actions.
should_reload = False
interactive = self.options and self.options.interactive
for action_name in self._action_names:
if not hasattr(page, action_name):
continue
if should_reload:
self.RunNavigateSteps(page, tab)
action_runner = action_runner_module.ActionRunner(page, tab, self)
if interactive:
action_runner.RunAction(interact.InteractAction())
else:
self._RunMethod(page, action_name, action_runner)
should_reload = True
def _CreatePageSetForUrl(url):
ps_name = urlparse.urlparse(url).hostname + '.json'
ps_path = os.path.join(util.GetBaseDir(), 'page_sets', ps_name)
ps = {'archive_data_file': '../data/%s' % ps_name,
'pages': [
{ 'url': url }
]
}
with open(ps_path, 'w') as f:
f.write(json.dumps(ps))
print 'Created new page set %s' % ps_path
return page_set.PageSet.FromFile(ps_path)
def Main(base_dir):
measurements = {
n: cls for n, cls in discover.DiscoverClasses(
base_dir, base_dir, page_measurement.PageMeasurement).items()
# Filter out unneeded ProfileCreators (crbug.com/319573).
if not issubclass(cls, profile_creator.ProfileCreator)
}
tests = discover.DiscoverClasses(base_dir, base_dir, test.Test,
index_by_class_name=True)
options = browser_options.BrowserFinderOptions()
parser = options.CreateParser('%prog <PageSet|Test|URL>')
page_runner.AddCommandLineArgs(parser)
recorder = RecordPage(measurements)
recorder.AddCommandLineArgs(parser)
quick_args = [a for a in sys.argv[1:] if not a.startswith('-')]
if len(quick_args) != 1:
parser.print_usage()
sys.exit(1)
target = quick_args[0]
if target in tests:
recorder.test = tests[target]().test()
recorder.test.AddCommandLineArgs(parser)
recorder.test.SetArgumentDefaults(parser)
parser.parse_args()
recorder.test.ProcessCommandLineArgs(parser, options)
ps = tests[target]().CreatePageSet(options)
elif discover.IsPageSetFile(target):
parser.parse_args()
ps = page_set.PageSet.FromFile(target)
elif target.startswith('http'):
parser.parse_args()
ps = _CreatePageSetForUrl(target)
else:
parser.print_usage()
sys.exit(1)
page_runner.ProcessCommandLineArgs(parser, options)
recorder.ProcessCommandLineArgs(parser, options)
expectations = test_expectations.TestExpectations()
# Set the archive path to something temporary.
temp_target_wpr_file_path = tempfile.mkstemp()[1]
ps.wpr_archive_info.AddNewTemporaryRecording(temp_target_wpr_file_path)
# Do the actual recording.
options.browser_options.wpr_mode = wpr_modes.WPR_RECORD
options.browser_options.no_proxy_server = True
recorder.CustomizeBrowserOptions(options)
results = page_runner.Run(recorder, ps, expectations, options)
if results.errors or results.failures:
logging.warning('Some pages failed. The recording has not been updated for '
'these pages.')
logging.warning('Failed pages:\n%s',
'\n'.join(zip(*results.errors + results.failures)[0]))
if results.skipped:
logging.warning('Some pages were skipped. The recording has not been '
'updated for these pages.')
logging.warning('Skipped pages:\n%s', '\n'.join(zip(*results.skipped)[0]))
if results.successes:
# Update the metadata for the pages which were recorded.
ps.wpr_archive_info.AddRecordedPages(results.successes)
else:
os.remove(temp_target_wpr_file_path)
return min(255, len(results.failures))
| 35.381503 | 80 | 0.723248 |
ace54e92897c45a7ee7194fb96d5892bc641b362 | 6,201 | py | Python | run_scripts/dagger_exp_script.py | FineArtz/ILSwiss | 08e21b9563cf904d28d3d6e51ce5c28372d493ad | [
"MIT"
] | null | null | null | run_scripts/dagger_exp_script.py | FineArtz/ILSwiss | 08e21b9563cf904d28d3d6e51ce5c28372d493ad | [
"MIT"
] | null | null | null | run_scripts/dagger_exp_script.py | FineArtz/ILSwiss | 08e21b9563cf904d28d3d6e51ce5c28372d493ad | [
"MIT"
] | null | null | null | import yaml
import argparse
import joblib
import numpy as np
import os, sys, inspect
import random
import pickle
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
print(sys.path)
import gym
from rlkit.envs import get_env, get_envs
import rlkit.torch.utils.pytorch_util as ptu
from rlkit.launchers.launcher_util import setup_logger, set_seed
from rlkit.data_management.env_replay_buffer import EnvReplayBuffer
from rlkit.envs.wrappers import ScaledEnv, MinmaxEnv, ProxyEnv, NormalizedBoxEnv
from rlkit.torch.common.policies import (
ReparamTanhMultivariateGaussianPolicy,
MakeDeterministic,
)
from rlkit.torch.algorithms.dagger.dagger import DAgger
def experiment(variant):
with open("demos_listing.yaml", "r") as f:
listings = yaml.load(f.read())
demos_path = listings[variant["expert_name"]]["file_paths"][variant["expert_idx"]]
"""
Buffer input format
"""
# buffer_save_dict = joblib.load(expert_demos_path)
# expert_replay_buffer = buffer_save_dict['train']
# obs_mean, obs_std = buffer_save_dict['obs_mean'], buffer_save_dict['obs_std']
# acts_mean, acts_std = buffer_save_dict['acts_mean'], buffer_save_dict['acts_std']
# obs_min, obs_max = buffer_save_dict['obs_min'], buffer_save_dict['obs_max']
# if 'minmax_env_with_demo_stats' in variant.keys():
# if (variant['minmax_env_with_demo_stats']) and not (variant['scale_env_with_demo_stats']):
# assert 'norm_train' in buffer_save_dict.keys()
# expert_replay_buffer = buffer_save_dict['norm_train']
"""
PKL input format
"""
print("demos_path", demos_path)
with open(demos_path, "rb") as f:
traj_list = pickle.load(f)
traj_list = random.sample(traj_list, variant["traj_num"])
obs = np.vstack([traj_list[i]["observations"] for i in range(len(traj_list))])
acts = np.vstack([traj_list[i]["actions"] for i in range(len(traj_list))])
obs_mean, obs_std = np.mean(obs, axis=0), np.std(obs, axis=0)
# acts_mean, acts_std = np.mean(acts, axis=0), np.std(acts, axis=0)
acts_mean, acts_std = None, None
obs_min, obs_max = np.min(obs, axis=0), np.max(obs, axis=0)
# print("obs:mean:{}".format(obs_mean))
# print("obs_std:{}".format(obs_std))
# print("acts_mean:{}".format(acts_mean))
# print("acts_std:{}".format(acts_std))
env_specs = variant["env_specs"]
env = get_env(env_specs)
env.seed(env_specs["eval_env_seed"])
print("\n\nEnv: {}".format(env_specs["env_name"]))
print("kwargs: {}".format(env_specs["env_kwargs"]))
print("Obs Space: {}".format(env.observation_space))
print("Act Space: {}\n\n".format(env.action_space))
expert_replay_buffer = EnvReplayBuffer(
variant["dagger_params"]["replay_buffer_size"],
env,
random_seed=np.random.randint(10000),
)
for i in range(len(traj_list)):
expert_replay_buffer.add_path(
traj_list[i], absorbing=variant["dagger_params"]["wrap_absorbing"], env=env
)
env_wrapper = ProxyEnv # Identical wrapper
kwargs = {}
wrapper_kwargs = {}
if variant["scale_env_with_demo_stats"]:
print("\nWARNING: Using scale env wrapper")
tmp_env_wrapper = env_wrapper = ScaledEnv
wrapper_kwargs = dict(
obs_mean=obs_mean,
obs_std=obs_std,
acts_mean=acts_mean,
acts_std=acts_std,
)
elif variant["minmax_env_with_demo_stats"]:
print("\nWARNING: Using min max env wrapper")
tmp_env_wrapper = env_wrapper = MinmaxEnv
wrapper_kwargs = dict(obs_min=obs_min, obs_max=obs_max)
obs_space = env.observation_space
act_space = env.action_space
assert not isinstance(obs_space, gym.spaces.Dict)
assert len(obs_space.shape) == 1
assert len(act_space.shape) == 1
if isinstance(act_space, gym.spaces.Box) and (
(acts_mean is None) and (acts_std is None)
):
print("\nWARNING: Using Normalized Box Env wrapper")
env_wrapper = lambda *args, **kwargs: NormalizedBoxEnv(
tmp_env_wrapper(*args, **kwargs)
)
env = env_wrapper(env, **wrapper_kwargs)
training_env = get_envs(
env_specs, env_wrapper, wrapper_kwargs=wrapper_kwargs, **kwargs
)
training_env.seed(env_specs["training_env_seed"])
obs_dim = obs_space.shape[0]
action_dim = act_space.shape[0]
# build the policy models
net_size = variant["policy_net_size"]
num_hidden = variant["policy_num_hidden_layers"]
policy = ReparamTanhMultivariateGaussianPolicy(
hidden_sizes=num_hidden * [net_size],
obs_dim=obs_dim,
action_dim=action_dim,
)
# load the expert policy
expert_policy = joblib.load(variant["expert_policy_path"])["policy"]
if variant["use_deterministic_expert"]:
expert_policy = MakeDeterministic(expert_policy)
algorithm = DAgger(
env=env,
training_env=training_env,
exploration_policy=policy,
expert_policy=expert_policy,
expert_replay_buffer=expert_replay_buffer,
**variant["dagger_params"]
)
if ptu.gpu_enabled():
algorithm.to(ptu.device)
algorithm.train()
return 1
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-e", "--experiment", help="experiment specification file")
parser.add_argument("-g", "--gpu", help="gpu id", type=int, default=0)
args = parser.parse_args()
with open(args.experiment, "r") as spec_file:
spec_string = spec_file.read()
exp_specs = yaml.load(spec_string)
# make all seeds the same.
exp_specs["env_specs"]["eval_env_seed"] = exp_specs["env_specs"][
"training_env_seed"
] = exp_specs["seed"]
if exp_specs["using_gpus"] > 0:
print("\n\nUSING GPU\n\n")
ptu.set_gpu_mode(True)
exp_id = exp_specs["exp_id"]
exp_prefix = exp_specs["exp_name"]
seed = exp_specs["seed"]
set_seed(seed)
setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs)
experiment(exp_specs)
| 34.45 | 100 | 0.67989 |
ace54efe4b973da185d987770d7880f36d2f2683 | 12,159 | py | Python | pybind/nos/v7_1_0/interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/nos/v7_1_0/interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/nos/v7_1_0/interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | 1 | 2021-11-05T22:15:42.000Z | 2021-11-05T22:15:42.000Z |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import add_
import remove_
class vlan(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/hundredgigabitethernet/switchport/trunk-private-vlan-classification/private-vlan/trunk/allowed/vlan. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__add_','__remove_',)
_yang_name = 'vlan'
_rest_name = 'vlan'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__add_ = YANGDynClass(base=YANGListType("trunk_vlan_id trunk_ctag_id",add_.add_, yang_name="add", rest_name="add", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="add", rest_name="add", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)
self.__remove_ = YANGDynClass(base=YANGListType("trunk_vlan_id trunk_ctag_id",remove_.remove_, yang_name="remove", rest_name="remove", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="remove", rest_name="remove", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'hundredgigabitethernet', u'switchport', u'trunk-private-vlan-classification', u'private-vlan', u'trunk', u'allowed', u'vlan']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'HundredGigabitEthernet', u'switchport', u'private-vlan', u'trunk', u'allowed', u'vlan']
def _get_add_(self):
"""
Getter method for add_, mapped from YANG variable /interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/add (list)
"""
return self.__add_
def _set_add_(self, v, load=False):
"""
Setter method for add_, mapped from YANG variable /interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/add (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_add_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_add_() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("trunk_vlan_id trunk_ctag_id",add_.add_, yang_name="add", rest_name="add", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="add", rest_name="add", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """add_ must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("trunk_vlan_id trunk_ctag_id",add_.add_, yang_name="add", rest_name="add", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="add", rest_name="add", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)""",
})
self.__add_ = t
if hasattr(self, '_set'):
self._set()
def _unset_add_(self):
self.__add_ = YANGDynClass(base=YANGListType("trunk_vlan_id trunk_ctag_id",add_.add_, yang_name="add", rest_name="add", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="add", rest_name="add", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)
def _get_remove_(self):
"""
Getter method for remove_, mapped from YANG variable /interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/remove (list)
"""
return self.__remove_
def _set_remove_(self, v, load=False):
"""
Setter method for remove_, mapped from YANG variable /interface/hundredgigabitethernet/switchport/trunk_private_vlan_classification/private_vlan/trunk/allowed/vlan/remove (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_remove_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remove_() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("trunk_vlan_id trunk_ctag_id",remove_.remove_, yang_name="remove", rest_name="remove", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="remove", rest_name="remove", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """remove_ must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("trunk_vlan_id trunk_ctag_id",remove_.remove_, yang_name="remove", rest_name="remove", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="remove", rest_name="remove", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)""",
})
self.__remove_ = t
if hasattr(self, '_set'):
self._set()
def _unset_remove_(self):
self.__remove_ = YANGDynClass(base=YANGListType("trunk_vlan_id trunk_ctag_id",remove_.remove_, yang_name="remove", rest_name="remove", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='trunk-vlan-id trunk-ctag-id', extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}), is_container='list', yang_name="remove", rest_name="remove", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'ctag-pvlan-classification-phy-remove-config', u'cli-no-key-completion': None, u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)
add_ = __builtin__.property(_get_add_, _set_add_)
remove_ = __builtin__.property(_get_remove_, _set_remove_)
_pyangbind_elements = {'add_': add_, 'remove_': remove_, }
| 75.99375 | 865 | 0.73789 |
ace54f0734f3e9abe7cd9dcf6549236a723003e6 | 3,551 | py | Python | scripts/deps/generate_protocol_resources.py | bramus/devtools-frontend | 03f91c46e7920e768eba5192b7f902e916b9cac3 | [
"BSD-3-Clause"
] | null | null | null | scripts/deps/generate_protocol_resources.py | bramus/devtools-frontend | 03f91c46e7920e768eba5192b7f902e916b9cac3 | [
"BSD-3-Clause"
] | 1 | 2021-01-22T00:22:39.000Z | 2021-01-22T00:22:39.000Z | scripts/deps/generate_protocol_resources.py | emchap/devtools-frontend | 65c824b5adc918400877f7f1d65f17c901e42421 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env vpython
#
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path as path
import re
import os
import subprocess
import sys
_CURRENT_DIR = path.join(path.dirname(__file__))
try:
old_sys_path = sys.path[:]
sys.path.append(path.join(_CURRENT_DIR, '..', '..', 'scripts'))
import devtools_paths
finally:
sys.path = old_sys_path
ROOT_DIRECTORY = path.join(path.dirname(path.abspath(__file__)), '..', '..')
V8_DIRECTORY_PATH = path.join(ROOT_DIRECTORY, 'v8')
PROTOCOL_LOCATION = path.join(ROOT_DIRECTORY, 'third_party', 'blink', 'public', 'devtools_protocol')
SCRIPTS_BUILD_PATH = path.join(ROOT_DIRECTORY, 'scripts', 'build')
GENERATE_ARIA_SCRIPT = path.join(SCRIPTS_BUILD_PATH, 'generate_aria.py')
GENERATE_SUPPORTED_CSS_SCRIPT = path.join(SCRIPTS_BUILD_PATH, 'generate_supported_css.py')
GENERATE_PROTOCOL_DEFINITIONS_SCRIPT = path.join(SCRIPTS_BUILD_PATH, 'code_generator_frontend.py')
CONCATENATE_PROTOCOL_SCRIPT = path.join(ROOT_DIRECTORY, 'third_party', 'inspector_protocol', 'concatenate_protocols.py')
NODE_LOCATION = devtools_paths.node_path()
TSC_LOCATION = devtools_paths.typescript_compiler_path()
def popen(arguments, cwd=ROOT_DIRECTORY, env=os.environ.copy()):
process = subprocess.Popen([sys.executable] + arguments, cwd=cwd, env=env)
process.communicate()
if process.returncode != 0:
sys.exit(process.returncode)
def runTsc(file_to_compile):
process = subprocess.Popen([NODE_LOCATION, TSC_LOCATION, file_to_compile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
# TypeScript does not correctly write to stderr because of https://github.com/microsoft/TypeScript/issues/33849
return process.returncode, stdout + stderr
def runNode(file_to_execute):
process = subprocess.Popen([NODE_LOCATION, file_to_execute], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
return process.returncode, stdout + stderr
def generate_protocol_typescript_definitions():
generator_script_to_compile = path.join(ROOT_DIRECTORY, 'scripts', 'protocol_typescript', 'protocol_dts_generator.ts')
# first run TSC to convert the script from TS to JS
typescript_found_errors, typescript_stderr = runTsc(generator_script_to_compile)
if typescript_found_errors:
print('')
print('TypeScript compilation failed on %s' % generator_script_to_compile)
print('')
print(typescript_stderr)
print('')
return 1
outputted_file_path = generator_script_to_compile.replace('.ts', '.js')
node_found_errors, node_stderr = runNode(outputted_file_path)
if node_found_errors:
print('')
print('Generating protocol typedefs failed')
print('')
print(node_stderr)
print('')
return 1
# Generate the required `front_end/generated` files that are based on files living in Blink
def main():
popen([GENERATE_ARIA_SCRIPT])
popen([GENERATE_SUPPORTED_CSS_SCRIPT])
popen([CONCATENATE_PROTOCOL_SCRIPT] + [
path.join(PROTOCOL_LOCATION, 'browser_protocol.pdl'),
path.join(V8_DIRECTORY_PATH, 'include', 'js_protocol.pdl'),
# output_file
path.join(PROTOCOL_LOCATION, 'browser_protocol.json'),
])
popen([GENERATE_PROTOCOL_DEFINITIONS_SCRIPT])
generate_protocol_typescript_definitions()
if __name__ == '__main__':
main()
| 33.819048 | 126 | 0.73951 |
ace54f449c1f22055c98954b161106eaed08e5e2 | 427 | py | Python | masar_cable/custom/sales_order/sales_order.py | karamakcsc/masar_cable | 501618733c25e15c56a0a5f9d331719c51914047 | [
"MIT"
] | null | null | null | masar_cable/custom/sales_order/sales_order.py | karamakcsc/masar_cable | 501618733c25e15c56a0a5f9d331719c51914047 | [
"MIT"
] | null | null | null | masar_cable/custom/sales_order/sales_order.py | karamakcsc/masar_cable | 501618733c25e15c56a0a5f9d331719c51914047 | [
"MIT"
] | null | null | null | import frappe
import erpnext
import math
from frappe.utils import flt, get_datetime, getdate, date_diff, cint, nowdate, get_link_to_form, time_diff_in_hours
from frappe import throw, msgprint, _
@frappe.whitelist()
def apply_vfcu(doctype,name):
so_doc = frappe.get_doc(doctype, name)
for d in so_doc.get("items"):
item_doc = frappe.get_doc("Item", d.item_code)
d.rate = d.rate+ item_doc.vfcu *(so_doc.lme1-so_doc.lme2);
| 32.846154 | 115 | 0.76815 |
ace54fb2fe46264de426d246fa1576aa895757fb | 1,982 | py | Python | project/access/management/commands/importtokens.py | markus456/asylum | 45e77fc346b7e7c8514a8641412c7b4f014b76c5 | [
"MIT"
] | 1 | 2017-04-08T21:31:37.000Z | 2017-04-08T21:31:37.000Z | project/access/management/commands/importtokens.py | markus456/asylum | 45e77fc346b7e7c8514a8641412c7b4f014b76c5 | [
"MIT"
] | 9 | 2016-01-23T22:40:26.000Z | 2021-09-13T17:44:11.000Z | project/access/management/commands/importtokens.py | markus456/asylum | 45e77fc346b7e7c8514a8641412c7b4f014b76c5 | [
"MIT"
] | 1 | 2017-04-08T22:13:42.000Z | 2017-04-08T22:13:42.000Z | # -*- coding: utf-8 -*-
import csv
from access.models import AccessType, Grant, Token, TokenType
from django.core.management.base import BaseCommand, CommandError
from members.models import Member
class Command(BaseCommand):
help = 'Exports tokens and their access controls to sqlite file'
def add_arguments(self, parser):
parser.add_argument('filepath')
parser.add_argument('tokentype')
parser.add_argument('accessids')
def handle(self, *args, **options):
ttype = TokenType.objects.get(pk=int(options['tokentype']))
atype_ids = [int(x) for x in options['accessids'].split(',')]
grant_atypes = AccessType.objects.filter(pk__in=atype_ids)
with open(options['filepath'], 'rt') as fp:
reader = csv.reader(fp, delimiter=";")
i = 0
for row in reader:
i += 1
if i == 1:
continue
if Token.objects.filter(value=row[0]).count():
if options['verbosity'] > 0:
print("Card %s already exists" % row[0])
continue
if not Member.objects.filter(email=row[1]).count():
if options['verbosity'] > 0:
print("Member with email %s not found" % row[1])
continue
member = Member.objects.get(email=row[1])
token = Token(owner=member, value=row[0], ttype=ttype)
if row[2] == 'True':
token.revoked = True
token.save()
if options['verbosity'] > 0:
print("Created token %s" % token)
for access in grant_atypes:
grant, created = Grant.objects.get_or_create(owner=member, atype=access)
if created:
if options['verbosity'] > 0:
print("Created grant %s" % (grant))
| 39.64 | 92 | 0.528254 |
ace551546d7e46f7d2b7124907d38fd01889e92b | 21,892 | py | Python | nautobot/dcim/models/racks.py | jfach/nautobot | 8965c30ffb0c1ea64f9710eada7fb49a6f2a4b62 | [
"Apache-2.0"
] | 2 | 2021-11-01T10:17:02.000Z | 2021-11-08T08:35:44.000Z | nautobot/dcim/models/racks.py | jfach/nautobot | 8965c30ffb0c1ea64f9710eada7fb49a6f2a4b62 | [
"Apache-2.0"
] | null | null | null | nautobot/dcim/models/racks.py | jfach/nautobot | 8965c30ffb0c1ea64f9710eada7fb49a6f2a4b62 | [
"Apache-2.0"
] | 1 | 2021-02-24T23:02:28.000Z | 2021-02-24T23:02:28.000Z | from collections import OrderedDict
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.db.models import Count, Sum
from django.urls import reverse
from mptt.models import MPTTModel, TreeForeignKey
from nautobot.dcim.choices import *
from nautobot.dcim.constants import *
from nautobot.dcim.elevations import RackElevationSVG
from nautobot.extras.models import ObjectChange, StatusModel
from nautobot.extras.utils import extras_features
from nautobot.core.models.generics import OrganizationalModel, PrimaryModel
from nautobot.utilities.choices import ColorChoices
from nautobot.utilities.fields import ColorField, NaturalOrderingField, JSONArrayField
from nautobot.utilities.mptt import TreeManager
from nautobot.utilities.utils import array_to_string, serialize_object, UtilizationData
from .device_components import PowerOutlet, PowerPort
from .devices import Device
from .power import PowerFeed
__all__ = (
"Rack",
"RackGroup",
"RackReservation",
"RackRole",
)
#
# Racks
#
@extras_features(
"custom_fields",
"custom_validators",
"export_templates",
"graphql",
"relationships",
)
class RackGroup(MPTTModel, OrganizationalModel):
"""
Racks can be grouped as subsets within a Site. The scope of a group will depend on how Sites are defined. For
example, if a Site spans a corporate campus, a RackGroup might be defined to represent each building within that
campus. If a Site instead represents a single building, a RackGroup might represent a single room or floor.
"""
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100)
site = models.ForeignKey(to="dcim.Site", on_delete=models.CASCADE, related_name="rack_groups")
parent = TreeForeignKey(
to="self",
on_delete=models.CASCADE,
related_name="children",
blank=True,
null=True,
db_index=True,
)
description = models.CharField(max_length=200, blank=True)
objects = TreeManager()
csv_headers = ["site", "parent", "name", "slug", "description"]
class Meta:
ordering = ["site", "name"]
unique_together = [
["site", "name"],
["site", "slug"],
]
class MPTTMeta:
order_insertion_by = ["name"]
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("dcim:rackgroup", args=[self.pk])
def to_csv(self):
return (
self.site,
self.parent.name if self.parent else "",
self.name,
self.slug,
self.description,
)
def to_objectchange(self, action):
# Remove MPTT-internal fields
return ObjectChange(
changed_object=self,
object_repr=str(self),
action=action,
object_data=serialize_object(self, exclude=["level", "lft", "rght", "tree_id"]),
)
def clean(self):
super().clean()
# Parent RackGroup (if any) must belong to the same Site
if self.parent and self.parent.site != self.site:
raise ValidationError(f"Parent rack group ({self.parent}) must belong to the same site ({self.site})")
@extras_features(
"custom_fields",
"custom_validators",
"graphql",
"relationships",
)
class RackRole(OrganizationalModel):
"""
Racks can be organized by functional role, similar to Devices.
"""
name = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
color = ColorField(default=ColorChoices.COLOR_GREY)
description = models.CharField(
max_length=200,
blank=True,
)
csv_headers = ["name", "slug", "color", "description"]
class Meta:
ordering = ["name"]
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("dcim:rackrole", args=[self.pk])
def to_csv(self):
return (
self.name,
self.slug,
self.color,
self.description,
)
@extras_features(
"custom_fields",
"custom_links",
"custom_validators",
"export_templates",
"graphql",
"relationships",
"statuses",
"webhooks",
)
class Rack(PrimaryModel, StatusModel):
"""
Devices are housed within Racks. Each rack has a defined height measured in rack units, and a front and rear face.
Each Rack is assigned to a Site and (optionally) a RackGroup.
"""
name = models.CharField(max_length=100)
_name = NaturalOrderingField(target_field="name", max_length=100, blank=True)
facility_id = models.CharField(
max_length=50,
blank=True,
null=True,
verbose_name="Facility ID",
help_text="Locally-assigned identifier",
)
site = models.ForeignKey(to="dcim.Site", on_delete=models.PROTECT, related_name="racks")
group = models.ForeignKey(
to="dcim.RackGroup",
on_delete=models.SET_NULL,
related_name="racks",
blank=True,
null=True,
help_text="Assigned group",
)
tenant = models.ForeignKey(
to="tenancy.Tenant",
on_delete=models.PROTECT,
related_name="racks",
blank=True,
null=True,
)
role = models.ForeignKey(
to="dcim.RackRole",
on_delete=models.PROTECT,
related_name="racks",
blank=True,
null=True,
help_text="Functional role",
)
serial = models.CharField(max_length=50, blank=True, verbose_name="Serial number")
asset_tag = models.CharField(
max_length=50,
blank=True,
null=True,
unique=True,
verbose_name="Asset tag",
help_text="A unique tag used to identify this rack",
)
type = models.CharField(choices=RackTypeChoices, max_length=50, blank=True, verbose_name="Type")
width = models.PositiveSmallIntegerField(
choices=RackWidthChoices,
default=RackWidthChoices.WIDTH_19IN,
verbose_name="Width",
help_text="Rail-to-rail width",
)
u_height = models.PositiveSmallIntegerField(
default=RACK_U_HEIGHT_DEFAULT,
verbose_name="Height (U)",
validators=[MinValueValidator(1), MaxValueValidator(100)],
help_text="Height in rack units",
)
desc_units = models.BooleanField(
default=False,
verbose_name="Descending units",
help_text="Units are numbered top-to-bottom",
)
outer_width = models.PositiveSmallIntegerField(blank=True, null=True, help_text="Outer dimension of rack (width)")
outer_depth = models.PositiveSmallIntegerField(blank=True, null=True, help_text="Outer dimension of rack (depth)")
outer_unit = models.CharField(
max_length=50,
choices=RackDimensionUnitChoices,
blank=True,
)
comments = models.TextField(blank=True)
images = GenericRelation(to="extras.ImageAttachment")
csv_headers = [
"site",
"group",
"name",
"facility_id",
"tenant",
"status",
"role",
"type",
"serial",
"asset_tag",
"width",
"u_height",
"desc_units",
"outer_width",
"outer_depth",
"outer_unit",
"comments",
]
clone_fields = [
"site",
"group",
"tenant",
"status",
"role",
"type",
"width",
"u_height",
"desc_units",
"outer_width",
"outer_depth",
"outer_unit",
]
class Meta:
ordering = ("site", "group", "_name") # (site, group, name) may be non-unique
unique_together = (
# Name and facility_id must be unique *only* within a RackGroup
("group", "name"),
("group", "facility_id"),
)
def __str__(self):
return self.display or super().__str__()
def get_absolute_url(self):
return reverse("dcim:rack", args=[self.pk])
def clean(self):
super().clean()
# Validate group/site assignment
if self.site and self.group and self.group.site != self.site:
raise ValidationError(f"Assigned rack group must belong to parent site ({self.site}).")
# Validate outer dimensions and unit
if (self.outer_width is not None or self.outer_depth is not None) and not self.outer_unit:
raise ValidationError("Must specify a unit when setting an outer width/depth")
elif self.outer_width is None and self.outer_depth is None:
self.outer_unit = ""
if self.present_in_database:
# Validate that Rack is tall enough to house the installed Devices
top_device = Device.objects.filter(rack=self).exclude(position__isnull=True).order_by("-position").first()
if top_device:
min_height = top_device.position + top_device.device_type.u_height - 1
if self.u_height < min_height:
raise ValidationError(
{
"u_height": "Rack must be at least {}U tall to house currently installed devices.".format(
min_height
)
}
)
# Validate that Rack was assigned a group of its same site, if applicable
if self.group:
if self.group.site != self.site:
raise ValidationError({"group": "Rack group must be from the same site, {}.".format(self.site)})
def to_csv(self):
return (
self.site.name,
self.group.name if self.group else None,
self.name,
self.facility_id,
self.tenant.name if self.tenant else None,
self.get_status_display(),
self.role.name if self.role else None,
self.get_type_display() if self.type else None,
self.serial,
self.asset_tag,
self.width,
self.u_height,
self.desc_units,
self.outer_width,
self.outer_depth,
self.outer_unit,
self.comments,
)
@property
def units(self):
if self.desc_units:
return range(1, self.u_height + 1)
else:
return reversed(range(1, self.u_height + 1))
@property
def display(self):
if self.facility_id:
return f"{self.name} ({self.facility_id})"
return self.name
def get_rack_units(
self,
user=None,
face=DeviceFaceChoices.FACE_FRONT,
exclude=None,
expand_devices=True,
):
"""
Return a list of rack units as dictionaries. Example: {'device': None, 'face': 0, 'id': 48, 'name': 'U48'}
Each key 'device' is either a Device or None. By default, multi-U devices are repeated for each U they occupy.
:param face: Rack face (front or rear)
:param user: User instance to be used for evaluating device view permissions. If None, all devices
will be included.
:param exclude: PK of a Device to exclude (optional); helpful when relocating a Device within a Rack
:param expand_devices: When True, all units that a device occupies will be listed with each containing a
reference to the device. When False, only the bottom most unit for a device is included and that unit
contains a height attribute for the device
"""
elevation = OrderedDict()
for u in self.units:
elevation[u] = {
"id": u,
"name": f"U{u}",
"face": face,
"device": None,
"occupied": False,
}
# Add devices to rack units list
if self.present_in_database:
# Retrieve all devices installed within the rack
queryset = (
Device.objects.prefetch_related("device_type", "device_type__manufacturer", "device_role")
.annotate(devicebay_count=Count("devicebays"))
.exclude(pk=exclude)
.filter(rack=self, position__gt=0, device_type__u_height__gt=0)
.filter(Q(face=face) | Q(device_type__is_full_depth=True))
)
# Determine which devices the user has permission to view
permitted_device_ids = []
if user is not None:
permitted_device_ids = self.devices.restrict(user, "view").values_list("pk", flat=True)
for device in queryset:
if expand_devices:
for u in range(device.position, device.position + device.device_type.u_height):
if user is None or device.pk in permitted_device_ids:
elevation[u]["device"] = device
elevation[u]["occupied"] = True
else:
if user is None or device.pk in permitted_device_ids:
elevation[device.position]["device"] = device
elevation[device.position]["occupied"] = True
elevation[device.position]["height"] = device.device_type.u_height
for u in range(
device.position + 1,
device.position + device.device_type.u_height,
):
elevation.pop(u, None)
return [u for u in elevation.values()]
def get_available_units(self, u_height=1, rack_face=None, exclude=None):
"""
Return a list of units within the rack available to accommodate a device of a given U height (default 1).
Optionally exclude one or more devices when calculating empty units (needed when moving a device from one
position to another within a rack).
:param u_height: Minimum number of contiguous free units required
:param rack_face: The face of the rack (front or rear) required; 'None' if device is full depth
:param exclude: List of devices IDs to exclude (useful when moving a device within a rack)
"""
# Gather all devices which consume U space within the rack
devices = self.devices.prefetch_related("device_type").filter(position__gte=1)
if exclude is not None:
devices = devices.exclude(pk__in=exclude)
# Initialize the rack unit skeleton
units = list(range(1, self.u_height + 1))
# Remove units consumed by installed devices
for d in devices:
if rack_face is None or d.face == rack_face or d.device_type.is_full_depth:
for u in range(d.position, d.position + d.device_type.u_height):
try:
units.remove(u)
except ValueError:
# Found overlapping devices in the rack!
pass
# Remove units without enough space above them to accommodate a device of the specified height
available_units = []
for u in units:
if set(range(u, u + u_height)).issubset(units):
available_units.append(u)
return list(reversed(available_units))
def get_reserved_units(self):
"""
Return a dictionary mapping all reserved units within the rack to their reservation.
"""
reserved_units = {}
for r in self.reservations.all():
for u in r.units:
reserved_units[u] = r
return reserved_units
def get_elevation_svg(
self,
face=DeviceFaceChoices.FACE_FRONT,
user=None,
unit_width=settings.RACK_ELEVATION_DEFAULT_UNIT_WIDTH,
unit_height=settings.RACK_ELEVATION_DEFAULT_UNIT_HEIGHT,
legend_width=RACK_ELEVATION_LEGEND_WIDTH_DEFAULT,
include_images=True,
base_url=None,
):
"""
Return an SVG of the rack elevation
:param face: Enum of [front, rear] representing the desired side of the rack elevation to render
:param user: User instance to be used for evaluating device view permissions. If None, all devices
will be included.
:param unit_width: Width in pixels for the rendered drawing
:param unit_height: Height of each rack unit for the rendered drawing. Note this is not the total
height of the elevation
:param legend_width: Width of the unit legend, in pixels
:param include_images: Embed front/rear device images where available
:param base_url: Base URL for links and images. If none, URLs will be relative.
"""
elevation = RackElevationSVG(self, user=user, include_images=include_images, base_url=base_url)
return elevation.render(face, unit_width, unit_height, legend_width)
def get_0u_devices(self):
return self.devices.filter(position=0)
def get_utilization(self):
"""Gets utilization numerator and denominator for racks.
Returns:
UtilizationData: (numerator=Occupied Unit Count, denominator=U Height of the rack)
"""
# Determine unoccupied units
available_units = self.get_available_units()
# Remove reserved units
for u in self.get_reserved_units():
if u in available_units:
available_units.remove(u)
# Return the numerator and denominator as percentage is to be calculated later where needed
return UtilizationData(numerator=self.u_height - len(available_units), denominator=self.u_height)
def get_power_utilization(self):
"""Determine the utilization numerator and denominator for power utilization on the rack.
Returns:
UtilizationData: (numerator, denominator)
"""
powerfeeds = PowerFeed.objects.filter(rack=self)
available_power_total = sum(pf.available_power for pf in powerfeeds)
if not available_power_total:
return UtilizationData(numerator=0, denominator=0)
pf_powerports = PowerPort.objects.filter(
_cable_peer_type=ContentType.objects.get_for_model(PowerFeed),
_cable_peer_id__in=powerfeeds.values_list("id", flat=True),
)
poweroutlets = PowerOutlet.objects.filter(power_port_id__in=pf_powerports)
allocated_draw_total = (
PowerPort.objects.filter(
_cable_peer_type=ContentType.objects.get_for_model(PowerOutlet),
_cable_peer_id__in=poweroutlets.values_list("id", flat=True),
).aggregate(Sum("allocated_draw"))["allocated_draw__sum"]
or 0
)
return UtilizationData(numerator=allocated_draw_total, denominator=available_power_total)
@extras_features(
"custom_fields",
"custom_links",
"custom_validators",
"export_templates",
"graphql",
"relationships",
"webhooks",
)
class RackReservation(PrimaryModel):
"""
One or more reserved units within a Rack.
"""
rack = models.ForeignKey(to="dcim.Rack", on_delete=models.CASCADE, related_name="reservations")
units = JSONArrayField(base_field=models.PositiveSmallIntegerField())
tenant = models.ForeignKey(
to="tenancy.Tenant",
on_delete=models.PROTECT,
related_name="rackreservations",
blank=True,
null=True,
)
user = models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.PROTECT)
description = models.CharField(max_length=200)
csv_headers = [
"site",
"rack_group",
"rack",
"units",
"tenant",
"user",
"description",
]
class Meta:
ordering = ["created"]
def __str__(self):
return "Reservation for rack {}".format(self.rack)
def get_absolute_url(self):
return reverse("dcim:rackreservation", args=[self.pk])
def clean(self):
super().clean()
if hasattr(self, "rack") and self.units:
# Validate that all specified units exist in the Rack.
invalid_units = [u for u in self.units if u not in self.rack.units]
if invalid_units:
raise ValidationError(
{
"units": "Invalid unit(s) for {}U rack: {}".format(
self.rack.u_height,
", ".join([str(u) for u in invalid_units]),
),
}
)
# Check that none of the units has already been reserved for this Rack.
reserved_units = []
for resv in self.rack.reservations.exclude(pk=self.pk):
reserved_units += resv.units
conflicting_units = [u for u in self.units if u in reserved_units]
if conflicting_units:
raise ValidationError(
{
"units": "The following units have already been reserved: {}".format(
", ".join([str(u) for u in conflicting_units]),
)
}
)
def to_csv(self):
return (
self.rack.site.name,
self.rack.group if self.rack.group else None,
self.rack.name,
",".join([str(u) for u in self.units]),
self.tenant.name if self.tenant else None,
self.user.username,
self.description,
)
@property
def unit_list(self):
return array_to_string(self.units)
| 34.91547 | 118 | 0.609903 |
ace5517924d855839b119f1a9e898184d43c8277 | 1,411 | py | Python | earthshotsoil/_nbdev.py | qAp/earthshotsoil | 29386e80f4e0188cd69334d7ddb526d923732f14 | [
"Apache-2.0"
] | null | null | null | earthshotsoil/_nbdev.py | qAp/earthshotsoil | 29386e80f4e0188cd69334d7ddb526d923732f14 | [
"Apache-2.0"
] | null | null | null | earthshotsoil/_nbdev.py | qAp/earthshotsoil | 29386e80f4e0188cd69334d7ddb526d923732f14 | [
"Apache-2.0"
] | null | null | null | # AUTOGENERATED BY NBDEV! DO NOT EDIT!
__all__ = ["index", "modules", "custom_doc_links", "git_url"]
index = {"Path.ls": "00_core.ipynb",
"DIR_DATA": "01_data.wise3.ipynb",
"pth_csv": "01_data.env.ipynb",
"ENCOV": "01_data.env.ipynb",
"pth_site": "01_data.wise3.ipynb",
"site": "01_data.wise3.ipynb",
"site.columns": "01_data.wise3.ipynb",
"vars_loc": "01_data.wise3.ipynb",
"vars_time": "01_data.wise3.ipynb",
"pth_horizon": "01_data.wise3.ipynb",
"horizon": "01_data.wise3.ipynb",
"horizon.columns": "01_data.wise3.ipynb",
"vars_cont": "01_data.wise3.ipynb",
"vars_disc": "01_data.wise3.ipynb",
"vars_id": "01_data.wise3.ipynb",
"depth_cols": "01_data.wise3.ipynb",
"horizon_cont": "01_data.wise3.ipynb",
"horizon_mean_cont": "01_data.wise3.ipynb",
"horizon_std_cont": "01_data.wise3.ipynb",
"imagify_profile": "01_data.wise3.ipynb",
"plt_profile_image": "01_data.wise3.ipynb",
"hv_depth_variable_image": "01_data.wise3.ipynb",
"jargon": "jargon.ipynb"}
modules = ["core.py",
"data/env.py",
"data/wise3.py",
"jargon.py"]
doc_url = "https://qAp.github.io/earthshotsoil/"
git_url = "https://github.com/qAp/earthshotsoil/tree/master/"
def custom_doc_links(name): return None
| 36.179487 | 61 | 0.600992 |
ace551eeada96253a57fb2d844c84b1397e9d7f5 | 385 | py | Python | ScopeReaders/__init__.py | smisra87/ScopeReaders | bfce7944a9d005d6ae03923e6b5767056fb3326b | [
"MIT"
] | null | null | null | ScopeReaders/__init__.py | smisra87/ScopeReaders | bfce7944a9d005d6ae03923e6b5767056fb3326b | [
"MIT"
] | null | null | null | ScopeReaders/__init__.py | smisra87/ScopeReaders | bfce7944a9d005d6ae03923e6b5767056fb3326b | [
"MIT"
] | null | null | null | """
The ScopeReaders package
Submodules
----------
.. autosummary::
:toctree: _autosummary
"""
from .__version__ import version as __version__
from ScopeReaders import em, generic, ion, spm
__all__ = ['__version__', 'em', 'generic', 'ion', 'spm']
# Traditional hierarchical approach - importing submodules
# Making things easier by surfacing all low-level modules directly:
| 19.25 | 67 | 0.722078 |
ace552105ec1b67303ec2fa8e434d687d16bf1e1 | 4,887 | py | Python | model/train_classifier.py | ranjeetraj2005/Disaster_Response_System | 5b4f10c43ae99afc823807968785c4a307ade335 | [
"MIT"
] | null | null | null | model/train_classifier.py | ranjeetraj2005/Disaster_Response_System | 5b4f10c43ae99afc823807968785c4a307ade335 | [
"MIT"
] | null | null | null | model/train_classifier.py | ranjeetraj2005/Disaster_Response_System | 5b4f10c43ae99afc823807968785c4a307ade335 | [
"MIT"
] | null | null | null | import sys
import joblib
import nltk
import pandas as pd
from sqlalchemy import create_engine
import string
from sklearn.decomposition import TruncatedSVD
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import classification_report
from sklearn.model_selection import GridSearchCV, train_test_split
from sklearn.multioutput import MultiOutputClassifier
from sklearn.pipeline import Pipeline
from sklearn.feature_extraction.text import TfidfVectorizer
nltk.download('stopwords')
STOP_WORDS = nltk.corpus.stopwords.words("english")
lemmatizer = nltk.stem.wordnet.WordNetLemmatizer()
PUNCTUATION_TABLE = str.maketrans('', '', string.punctuation)
def load_data(database_filepath):
"""Loads X and Y and gets category names
Args:
database_filepath (str): string filepath of the sqlite database
Returns:
X (pandas dataframe): Feature data, just the messages
Y (pandas dataframe): Classification labels
category_names (list): List of the category names for classification
"""
engine = create_engine('sqlite:///' + database_filepath)
df = pd.read_sql_table('labeled_messages', engine)
engine.dispose()
X = df['message']
Y = df.drop(['message', 'genre', 'id', 'original'], axis=1)
category_names = Y.columns.tolist()
return X, Y, category_names
def tokenize(text):
"""Basic tokenizer that removes punctuation and stopwords then lemmatizes
Args:
text (string): input message to tokenize
Returns:
tokens (list): list of cleaned tokens in the message
"""
# normalize case and remove punctuation
text = text.translate(PUNCTUATION_TABLE).lower()
# tokenize text
tokens = nltk.word_tokenize(text)
# lemmatize and remove stop words
return [lemmatizer.lemmatize(word) for word in tokens
if word not in STOP_WORDS]
def build_model():
"""Returns the GridSearchCV object to be used as the model
Args:
None
Returns:
cv (scikit-learn GridSearchCV): Grid search model object
"""
clf = RandomForestClassifier(n_estimators=100)
# The pipeline has tfidf, dimensionality reduction, and classifier
pipeline = Pipeline([
('tfidf', TfidfVectorizer(tokenizer=tokenize)),
('best', TruncatedSVD(n_components=100)),
('clf', MultiOutputClassifier(clf))
])
# The param grid still takes ~2h to search even at this modest size
param_grid = {
'tfidf__ngram_range': ((1, 1), (1, 2)),
'tfidf__max_df': [0.8, 1.0],
'tfidf__max_features': [None, 10000],
'clf__estimator__n_estimators': [50, 100],
'clf__estimator__min_samples_split': [2, 4]
}
Initialize a gridsearch object that is parallelized
cv = GridSearchCV(pipeline, param_grid, cv=3, verbose=10), n_jobs=-1)
return cv
def evaluate_model(model, X_test, Y_test, category_names):
"""Prints multi-output classification results
Args:
model (pandas dataframe): the scikit-learn fitted model
X_text (pandas dataframe): The X test set
Y_test (pandas dataframe): the Y test classifications
category_names (list): the category names
Returns:
None
"""
# Generate predictions
Y_pred = model.predict(X_test)
# Print out the full classification report
print(classification_report(Y_test, Y_pred, target_names=category_names))
def save_model(model, model_filepath):
"""dumps the model to the given filepath
Args:
model (scikit-learn model): The fitted model
model_filepath (string): the filepath to save the model to
Returns:
None
"""
joblib.dump(model, model_filepath)
def main():
if len(sys.argv) == 3:
database_filepath, model_filepath = sys.argv[1:]
print('Loading data...\n DATABASE: {}'.format(database_filepath))
X, Y, category_names = load_data(database_filepath)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
print('Building model...')
model = build_model()
print('Training model...')
model.fit(X_train, Y_train)
print('Evaluating model...')
evaluate_model(model, X_test, Y_test, category_names)
print('Saving model...\n MODEL: {}'.format(model_filepath))
save_model(model, model_filepath)
print('Trained model saved!')
else:
print('Please provide the filepath of the disaster messages database '\
'as the first argument and the filepath of the pickle file to '\
'save the model to as the second argument. \n\nExample: python '\
'train_classifier.py ../data/DisasterResponse.db classifier.pkl')
if __name__ == '__main__':
main()
| 31.529032 | 80 | 0.668713 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.