blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
49e35d732d050a8de37689f7459907f5c429e2fa
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/request/AlipayCloudDevopsDictQueryRequest.py
|
980977a75413bdc34d657ea1e8ece1c6b0ddb700
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 3,955
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayCloudDevopsDictQueryModel import AlipayCloudDevopsDictQueryModel
class AlipayCloudDevopsDictQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayCloudDevopsDictQueryModel):
self._biz_content = value
else:
self._biz_content = AlipayCloudDevopsDictQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.cloud.devops.dict.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
[
"jishupei.jsp@alibaba-inc.com"
] |
jishupei.jsp@alibaba-inc.com
|
f8597c8ce3dfbc755d8bf76575047963a0ec8beb
|
6c74c8babd2f94cbed185af75940774a2750f3e5
|
/src/georinex/base.py
|
ccfff852795a64c572afd92589a410550c92cf2e
|
[
"MIT"
] |
permissive
|
geospace-code/georinex
|
c28c8a17196bb1fa8093c818ce43bcb74ec52171
|
c689a5a6bc2ffb68bc055f150f1da1b6bab12812
|
refs/heads/main
| 2023-04-13T15:01:50.903458
| 2022-12-27T19:25:58
| 2022-12-27T19:26:15
| 34,296,204
| 106
| 40
|
MIT
| 2023-04-10T02:54:45
| 2015-04-21T01:19:29
|
Python
|
UTF-8
|
Python
| false
| false
| 7,148
|
py
|
from __future__ import annotations
import typing as T
from pathlib import Path
import xarray
from datetime import datetime, timedelta
import logging
from .rio import rinexinfo
from .obs2 import rinexobs2
from .obs3 import rinexobs3
from .nav2 import rinexnav2
from .nav3 import rinexnav3
from .sp3 import load_sp3
from .utils import _tlim
# for NetCDF compression. too high slows down with little space savings.
ENC = {"zlib": True, "complevel": 1, "fletcher32": True}
def load(
rinexfn: T.TextIO | str | Path,
out: Path = None,
use: set[str] = None,
tlim: tuple[datetime, datetime] = None,
useindicators: bool = False,
meas: list[str] = None,
verbose: bool = False,
*,
overwrite: bool = False,
fast: bool = True,
interval: float | int | timedelta = None,
):
"""
Reads OBS, NAV in RINEX 2.x and 3.x
Files / StringIO input may be plain ASCII text or compressed (including Hatanaka)
"""
if verbose:
logging.basicConfig(level=logging.INFO)
if isinstance(rinexfn, (str, Path)):
rinexfn = Path(rinexfn).expanduser()
# %% determine if/where to write NetCDF4/HDF5 output
outfn = None
if out:
out = Path(out).expanduser()
if out.is_dir():
outfn = out / (
rinexfn.name + ".nc"
) # not with_suffix to keep unique RINEX 2 filenames
elif out.suffix == ".nc":
outfn = out
else:
raise ValueError(f"not sure what output is wanted: {out}")
# %% main program
if tlim is not None:
if len(tlim) != 2:
raise ValueError("time bounds are specified as start stop")
if tlim[1] < tlim[0]:
raise ValueError("stop time must be after start time")
info = rinexinfo(rinexfn)
if info["rinextype"] == "nav":
return rinexnav(rinexfn, outfn, use=use, tlim=tlim, overwrite=overwrite)
elif info["rinextype"] == "obs":
return rinexobs(
rinexfn,
outfn,
use=use,
tlim=tlim,
useindicators=useindicators,
meas=meas,
verbose=verbose,
overwrite=overwrite,
fast=fast,
interval=interval,
)
assert isinstance(rinexfn, Path)
if info["rinextype"] == "sp3":
return load_sp3(rinexfn, outfn)
elif rinexfn.suffix == ".nc":
# outfn not used here, because we already have the converted file!
try:
nav = rinexnav(rinexfn)
except LookupError:
nav = None
try:
obs = rinexobs(rinexfn)
except LookupError:
obs = None
if nav is not None and obs is not None:
return {"nav": nav, "obs": rinexobs(rinexfn)}
elif nav is not None:
return nav
elif obs is not None:
return obs
else:
raise ValueError(f"No data of known format found in {rinexfn}")
else:
raise ValueError(f"What kind of RINEX file is: {rinexfn}")
def batch_convert(
path: Path,
glob: str,
out: Path,
use: set[str] = None,
tlim: tuple[datetime, datetime] = None,
useindicators: bool = False,
meas: list[str] = None,
verbose: bool = False,
*,
fast: bool = True,
):
path = Path(path).expanduser()
flist = (f for f in path.glob(glob) if f.is_file())
for fn in flist:
try:
load(
fn,
out,
use=use,
tlim=tlim,
useindicators=useindicators,
meas=meas,
verbose=verbose,
fast=fast,
)
except ValueError as e:
logging.error(f"{fn.name}: {e}")
def rinexnav(
fn: T.TextIO | str | Path,
outfn: Path = None,
use: set[str] = None,
group: str = "NAV",
tlim: tuple[datetime, datetime] = None,
*,
overwrite: bool = False,
) -> xarray.Dataset:
"""Read RINEX 2 or 3 NAV files"""
if isinstance(fn, (str, Path)):
fn = Path(fn).expanduser()
if fn.suffix == ".nc":
try:
return xarray.open_dataset(fn, group=group)
except OSError as e:
raise LookupError(f"Group {group} not found in {fn} {e}")
tlim = _tlim(tlim)
info = rinexinfo(fn)
if int(info["version"]) == 2:
nav = rinexnav2(fn, tlim=tlim)
elif int(info["version"]) == 3:
nav = rinexnav3(fn, use=use, tlim=tlim)
else:
raise LookupError(f"unknown RINEX {info} {fn}")
# %% optional output write
if outfn:
outfn = Path(outfn).expanduser()
wmode = _groupexists(outfn, group, overwrite)
enc = {k: ENC for k in nav.data_vars}
nav.to_netcdf(outfn, group=group, mode=wmode, encoding=enc)
return nav
# %% Observation File
def rinexobs(
fn: T.TextIO | Path,
outfn: Path = None,
use: set[str] = None,
group: str = "OBS",
tlim: tuple[datetime, datetime] = None,
useindicators: bool = False,
meas: list[str] = None,
verbose: bool = False,
*,
overwrite: bool = False,
fast: bool = True,
interval: float | int | timedelta = None,
):
"""
Read RINEX 2.x and 3.x OBS files in ASCII or GZIP (or Hatanaka)
"""
if isinstance(fn, (str, Path)):
fn = Path(fn).expanduser()
# %% NetCDF4
if fn.suffix == ".nc":
try:
return xarray.open_dataset(fn, group=group)
except OSError as e:
raise LookupError(f"Group {group} not found in {fn} {e}")
tlim = _tlim(tlim)
# %% version selection
info = rinexinfo(fn)
if int(info["version"]) in (1, 2):
obs = rinexobs2(
fn,
use,
tlim=tlim,
useindicators=useindicators,
meas=meas,
verbose=verbose,
fast=fast,
interval=interval,
)
elif int(info["version"]) == 3:
obs = rinexobs3(
fn,
use,
tlim=tlim,
useindicators=useindicators,
meas=meas,
verbose=verbose,
fast=fast,
interval=interval,
)
else:
raise ValueError(f"unknown RINEX {info} {fn}")
# %% optional output write
if outfn:
outfn = Path(outfn).expanduser()
wmode = _groupexists(outfn, group, overwrite)
enc = {k: ENC for k in obs.data_vars}
# Pandas >= 0.25.0 requires this, regardless of xarray version
if obs.time.dtype != "datetime64[ns]":
obs["time"] = obs.time.astype("datetime64[ns]")
obs.to_netcdf(outfn, group=group, mode=wmode, encoding=enc)
return obs
def _groupexists(fn: Path, group: str, overwrite: bool) -> str:
print(f"saving {group}:", fn)
if overwrite or not fn.is_file():
return "w"
# be sure there isn't already NAV in it
try:
xarray.open_dataset(fn, group=group)
raise ValueError(f"{group} already in {fn}")
except OSError:
pass
return "a"
|
[
"scivision@users.noreply.github.com"
] |
scivision@users.noreply.github.com
|
ee391734bbe1d920f7349971047cc74c0c565f36
|
e9ef3cd143478660d098668a10e67544a42b5878
|
/Lib/corpuscrawler/crawl_mpx.py
|
71bb3a7ee49333cc9c4fc1cee863a89f398c5aa2
|
[
"Apache-2.0"
] |
permissive
|
google/corpuscrawler
|
a5c790c19b26e6397b768ce26cf12bbcb641eb90
|
10adaecf4ed5a7d0557c8e692c186023746eb001
|
refs/heads/master
| 2023-08-26T04:15:59.036883
| 2022-04-20T08:18:11
| 2022-04-20T08:18:11
| 102,909,145
| 119
| 40
|
NOASSERTION
| 2022-04-20T08:18:12
| 2017-09-08T22:21:03
|
Python
|
UTF-8
|
Python
| false
| false
| 799
|
py
|
# coding: utf-8
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function, unicode_literals
import re
def crawl(crawler):
out = crawler.get_output(language='mpx')
crawler.crawl_pngscriptures_org(out, language='mpx')
|
[
"sascha@brawer.ch"
] |
sascha@brawer.ch
|
e0c09849f0aec5951bf94adaa9bc3656ac75f05f
|
abc72a2f2072ab7a5a338e41d81c354324943b09
|
/MC 102 (Exemplos de aula)/eliminar_repeticao.py
|
55c15d25c81d25f12a60900b67da3c9af6354681
|
[] |
no_license
|
gigennari/mc102
|
a3d39fd9a942c97ef477a9b59d7955f4269b202a
|
fce680d5188a8dfb0bc1832d6f430cbcaf68ef55
|
refs/heads/master
| 2023-04-05T01:40:58.839889
| 2020-07-27T20:33:56
| 2020-07-27T20:33:56
| 354,130,720
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 453
|
py
|
def eliminar_repeticao(lista1, lista2):
lista_sem_rep = []
freq_sem_rep = []
for i in range(len(lista1)):
if lista1[i] not in lista_sem_rep:
lista_sem_rep.append(lista1[i])
freq_sem_rep.append(lista2[i])
return lista_sem_rep, freq_sem_rep
def main():
lista1 = [3, 3, 6, 5, 8, 8, 10]
lista2 = [2, 2, 1, 1, 2, 2, 1]
lista3, lista4 = eliminar_repeticao(lista1, lista2)
print(lista3)
main()
|
[
"g198010@dac.unicamp.br"
] |
g198010@dac.unicamp.br
|
f7ee63e6b92678782ec9da34b96b0addaf69997c
|
b9571590d8cc83a99293d777f57e5ebeea5bcc92
|
/spiders/DoctorSpider.py
|
1cc8539b8017fa62c7ea2ce5c7a731be27f7fec8
|
[] |
no_license
|
LiuQL2/Crawler_xywy_doctor_communication
|
585a0a3230f397640e5fc54506cd6585bfd04f57
|
3374f08ea34ae8ea7e96501188a4fec247c72b5d
|
refs/heads/master
| 2020-06-30T13:28:01.048195
| 2017-08-04T07:29:19
| 2017-08-04T07:29:19
| 74,369,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,508
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
用来获取病例和心得帖子内容的类,传入一个帖子的URL,调用不同的方法得到不同的数据。
"""
# Author: Liu Qianlong <LiuQL2@163.com>
# Date: 2016.12.08
import datetime
import json
import sys
import urllib2
from BaseSpider import BaseSpider
reload(sys)
sys.setdefaultencoding('utf-8')
class DoctorSpider(BaseSpider):
def __init__(self,url, crawl_number, try_number = 20):
self.target_url = url
request = urllib2.Request(url=self.target_url, headers=self.get_header())
self.status = True
self.try_number = try_number
self.crawl_number = crawl_number
self.selector = None
self.number_url = 'http://club.xywy.com/doctorShare/index.php?type=share_operation&uid=' + self.target_url.split('/')[4] + '&stat=14'
def get_number(self):
doc = self.process_url_request(self.number_url,xpath_type=False)
if doc != None:
doc = json.loads(doc)
crawl_time = datetime.datetime.now().strftime('%Y-%m-%d')
return {'attention_number':str(doc['attenNum']), 'fans_number':str(doc['fansNum']),'web_number':str(doc['wbNum']),'doctor_url':self.target_url, 'crawl_time':crawl_time, 'crawl_number':self.crawl_number}
else:
return None
if __name__ == '__main__':
doctor = DoctorSpider(url='http://club.xywy.com/doc_card/55316663/blog')
print doctor.get_number()
|
[
"LiuQL2@sina.com"
] |
LiuQL2@sina.com
|
d71180f0bd321d3d7193738b32581743b75440f3
|
3257372291236aac1737b057c9ac6c61da9ccca0
|
/tutorials/W0D5_Statistics/solutions/W0D5_Tutorial2_Solution_281848de.py
|
65dc66a635bd53e9c76bb3d72f597aebb3c00512
|
[
"CC-BY-4.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
NeuromatchAcademy/precourse
|
230ead0d11ae7b0dba21c8df97695a1796e9797d
|
b7f2432c6a68a7984ca923ceed8e07d5cfdb77c3
|
refs/heads/main
| 2023-07-26T11:18:24.493966
| 2023-07-09T14:42:49
| 2023-07-09T14:42:49
| 256,327,558
| 639
| 174
|
MIT
| 2023-07-09T14:42:50
| 2020-04-16T20:54:03
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 621
|
py
|
""" You will learn more about "Bayesian brains" and the theory surrounding
these ideas once the course begins. Here is a brief explanation: it may
be ideal for human brains to implement Bayesian inference by integrating "prior"
information the brain has about the world (memories, prior knowledge, etc.) with
new evidence that updates its "beliefs"/prior. This process seems to parallel
the brain's method of learning about its environment, making it a compelling
theory for many neuroscience researchers. One of Bonus exercises below examines a possible
real world model for Bayesian inference: sound localization.
""";
|
[
"noreply@github.com"
] |
NeuromatchAcademy.noreply@github.com
|
50c2dde48b456f93aa0260584ded425981eeb60e
|
e7e029a04319afce21c43317e2cc8f3dc92091ca
|
/pex/commands/command.py
|
2104e893ca0f0b603d42816941bd46692371ae1a
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
asherf/pex
|
e451b28a088968736e47285396813f6a2b4c6058
|
c15508ad1a2d48bdef9fbac677dbfa32927e6625
|
refs/heads/master
| 2022-07-22T23:01:48.331316
| 2022-07-05T14:41:35
| 2022-07-05T14:41:35
| 237,337,647
| 0
| 0
|
Apache-2.0
| 2020-01-31T01:12:31
| 2020-01-31T01:12:30
| null |
UTF-8
|
Python
| false
| false
| 13,469
|
py
|
# Copyright 2020 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, print_function
import functools
import json
import logging
import os
import subprocess
import sys
import tempfile
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace, _ActionsContainer
from contextlib import contextmanager
from pex import pex_warnings
from pex.argparse import HandleBoolAction
from pex.common import safe_mkdtemp, safe_open
from pex.result import Error, Ok, Result
from pex.typing import TYPE_CHECKING, Generic, cast
from pex.variables import ENV, Variables
from pex.version import __version__
if TYPE_CHECKING:
from typing import (
IO,
Any,
Dict,
Iterable,
Iterator,
NoReturn,
Optional,
Sequence,
Type,
TypeVar,
)
import attr # vendor:skip
else:
from pex.third_party import attr
if TYPE_CHECKING:
_T = TypeVar("_T")
def try_run_program(
program, # type: str
args, # type: Iterable[str]
url=None, # type: Optional[str]
error=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Result
try:
subprocess.check_call([program] + list(args), **kwargs)
return Ok()
except OSError as e:
msg = [error] if error else []
msg.append("Do you have `{}` installed on the $PATH?: {}".format(program, e))
if url:
msg.append(
"Find more information on `{program}` at {url}.".format(program=program, url=url)
)
return Error("\n".join(msg))
except subprocess.CalledProcessError as e:
return Error(str(e), exit_code=e.returncode)
def try_open_file(
path, # type: str
error=None, # type: Optional[str]
):
# type: (...) -> Result
opener, url = (
("xdg-open", "https://www.freedesktop.org/wiki/Software/xdg-utils/")
if "Linux" == os.uname()[0]
else ("open", None)
)
with open(os.devnull, "wb") as devnull:
return try_run_program(opener, [path], url=url, error=error, stdout=devnull)
@attr.s(frozen=True)
class Command(object):
@staticmethod
def show_help(
parser, # type: ArgumentParser
*_args, # type: Any
**_kwargs # type: Any
):
# type: (...) -> NoReturn
parser.error("a subcommand is required")
@staticmethod
def register_global_arguments(
parser, # type: _ActionsContainer
include_verbosity=True, # type: bool
):
# type: (...) -> None
register_global_arguments(parser, include_verbosity=include_verbosity)
@classmethod
def name(cls):
# type: () -> str
return cls.__name__.lower()
@classmethod
def description(cls):
# type: () -> Optional[str]
return cls.__doc__
@classmethod
def add_arguments(cls, parser):
# type: (ArgumentParser) -> None
pass
options = attr.ib() # type: Namespace
class OutputMixin(object):
@staticmethod
def add_output_option(
parser, # type: _ActionsContainer
entity, # type: str
):
# type: (...) -> None
parser.add_argument(
"-o",
"--output",
metavar="PATH",
help=(
"A file to output the {entity} to; STDOUT by default or when `-` is "
"specified.".format(entity=entity)
),
)
@staticmethod
def is_stdout(options):
# type: (Namespace) -> bool
return options.output == "-" or not options.output
@classmethod
@contextmanager
def output(
cls,
options, # type: Namespace
binary=False, # type: bool
):
# type: (...) -> Iterator[IO]
if cls.is_stdout(options):
stdout = getattr(sys.stdout, "buffer", sys.stdout) if binary else sys.stdout
yield stdout
else:
with safe_open(options.output, mode="wb" if binary else "w") as out:
yield out
class JsonMixin(object):
@staticmethod
def add_json_options(
parser, # type: _ActionsContainer
entity, # type: str
include_switch=True, # type: bool
):
flags = ("-i", "--indent") if include_switch else ("--indent",)
parser.add_argument(
*flags,
type=int,
default=None,
help="Pretty-print {entity} json with the given indent.".format(entity=entity)
)
@staticmethod
def dump_json(
options, # type: Namespace
data, # type: Dict[str, Any]
out, # type: IO
**json_dump_kwargs # type: Any
):
json.dump(data, out, indent=options.indent, **json_dump_kwargs)
def register_global_arguments(
parser, # type: _ActionsContainer
include_verbosity=True, # type: bool
):
# type: (...) -> None
"""Register Pex global environment configuration options with the given parser.
:param parser: The parser to register global options with.
:param include_verbosity: Whether to include the verbosity option `-v`.
"""
group = parser.add_argument_group(title="Global options")
if include_verbosity:
group.add_argument(
"-v",
dest="verbosity",
action="count",
default=0,
help="Turn on logging verbosity, may be specified multiple times.",
)
group.add_argument(
"--emit-warnings",
"--no-emit-warnings",
dest="emit_warnings",
action=HandleBoolAction,
default=True,
help=(
"Emit runtime UserWarnings on stderr. If false, only emit them when PEX_VERBOSE "
"is set."
),
)
group.add_argument(
"--pex-root",
dest="pex_root",
default=None,
help=(
"Specify the pex root used in this invocation of pex "
"(if unspecified, uses {}).".format(ENV.PEX_ROOT)
),
)
group.add_argument(
"--disable-cache",
dest="disable_cache",
default=False,
action="store_true",
help="Disable caching in the pex tool entirely.",
)
group.add_argument(
"--cache-dir",
dest="cache_dir",
default=None,
help=(
"DEPRECATED: Use --pex-root instead. The local cache directory to use for speeding up "
"requirement lookups."
),
)
group.add_argument(
"--tmpdir",
dest="tmpdir",
default=tempfile.gettempdir(),
help="Specify the temporary directory Pex and its subprocesses should use.",
)
group.add_argument(
"--rcfile",
dest="rc_file",
default=None,
help=(
"An additional path to a pexrc file to read during configuration parsing, in addition "
"to reading `/etc/pexrc` and `~/.pexrc`. If `PEX_IGNORE_RCFILES=true`, then all rc "
"files will be ignored."
),
)
class GlobalConfigurationError(Exception):
"""Indicates an error processing global options."""
@contextmanager
def _configured_env(options):
# type: (Namespace) -> Iterator[None]
if options.rc_file or not ENV.PEX_IGNORE_RCFILES:
with ENV.patch(**Variables(rc=options.rc_file).copy()):
yield
else:
yield
@contextmanager
def global_environment(options):
# type: (Namespace) -> Iterator[Dict[str, str]]
"""Configures the Pex global environment.
This includes configuration of basic Pex infrastructure like logging, warnings and the
`PEX_ROOT` to use.
:param options: The global options registered by `register_global_arguments`.
:yields: The configured global environment.
:raises: :class:`GlobalConfigurationError` if invalid global option values were specified.
"""
if not hasattr(options, "rc_file"):
# We don't register the global args on the root command (but do on every subcommand).
# So if the user runs just `pex` with no subcommand we must not attempt to use those
# global args, including rc_file, which we check for here as a representative of the
# global args.
# Note that we can't use command_type here because the legacy command line parser in
# pex/bin/pex.py uses this function as well, and it doesn't set command_type.
with ENV.patch() as env:
yield env
with _configured_env(options):
verbosity = Variables.PEX_VERBOSE.strip_default(ENV)
if verbosity is None:
verbosity = getattr(options, "verbosity", 0)
emit_warnings = True
if not options.emit_warnings:
emit_warnings = False
if emit_warnings and ENV.PEX_EMIT_WARNINGS is not None:
emit_warnings = ENV.PEX_EMIT_WARNINGS
with ENV.patch(PEX_VERBOSE=str(verbosity), PEX_EMIT_WARNINGS=str(emit_warnings)):
pex_warnings.configure_warnings(env=ENV)
# Ensure the TMPDIR is an absolute path (So subprocesses that change CWD can find it)
# and that it exists.
tmpdir = os.path.realpath(options.tmpdir)
if not os.path.exists(tmpdir):
raise GlobalConfigurationError(
"The specified --tmpdir does not exist: {}".format(tmpdir)
)
if not os.path.isdir(tmpdir):
raise GlobalConfigurationError(
"The specified --tmpdir is not a directory: {}".format(tmpdir)
)
tempfile.tempdir = os.environ["TMPDIR"] = tmpdir
if options.cache_dir:
pex_warnings.warn("The --cache-dir option is deprecated, use --pex-root instead.")
if options.pex_root and options.cache_dir != options.pex_root:
raise GlobalConfigurationError(
"Both --cache-dir and --pex-root were passed with conflicting values. "
"Just set --pex-root."
)
if options.disable_cache:
def warn_ignore_pex_root(set_via):
pex_warnings.warn(
"The pex root has been set via {via} but --disable-cache is also set. "
"Ignoring {via} and disabling caches.".format(via=set_via)
)
if options.cache_dir:
warn_ignore_pex_root("--cache-dir")
elif options.pex_root:
warn_ignore_pex_root("--pex-root")
elif os.environ.get("PEX_ROOT"):
warn_ignore_pex_root("PEX_ROOT")
pex_root = safe_mkdtemp()
else:
pex_root = options.cache_dir or options.pex_root or ENV.PEX_ROOT
with ENV.patch(PEX_ROOT=pex_root, TMPDIR=tmpdir) as env:
yield env
if TYPE_CHECKING:
_C = TypeVar("_C", bound=Command)
class Main(Generic["_C"]):
def __init__(
self,
command_types, # type: Iterable[Type[_C]]
description=None, # type: Optional[str]
subparsers_description=None, # type: Optional[str]
prog=None, # type: Optional[str]
):
# type: (...) -> None
self._prog = prog
self._description = description or self.__doc__
self._subparsers_description = subparsers_description
self._command_types = command_types
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
pass
@contextmanager
def parsed_command(self, args=None):
# type: (Optional[Sequence[str]]) -> Iterator[_C]
logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO)
# By default, let argparse derive prog from sys.argv[0].
prog = self._prog
if os.path.basename(sys.argv[0]) == "__main__.py":
prog = "{python} -m {module}".format(
python=sys.executable, module=".".join(type(self).__module__.split(".")[:-1])
)
parser = ArgumentParser(
prog=prog,
formatter_class=ArgumentDefaultsHelpFormatter,
description=self._description,
)
parser.add_argument("-V", "--version", action="version", version=__version__)
parser.set_defaults(command_type=functools.partial(Command.show_help, parser))
self.add_arguments(parser)
if self._command_types:
subparsers = parser.add_subparsers(description=self._subparsers_description)
for command_type in self._command_types:
name = command_type.name()
description = command_type.description()
help_text = description.splitlines()[0] if description else None
command_parser = subparsers.add_parser(
name,
formatter_class=ArgumentDefaultsHelpFormatter,
help=help_text,
description=description,
)
command_type.add_arguments(command_parser)
command_parser.set_defaults(command_type=command_type)
options = parser.parse_args(args=args)
with global_environment(options):
command_type = cast("Type[_C]", options.command_type)
yield command_type(options)
|
[
"noreply@github.com"
] |
asherf.noreply@github.com
|
f369d5667a7f0255f82296fbbee935075af34b7e
|
7b5ec17918cb2328d53bf2edd876c153af26b38d
|
/scripts/ingestors/rwis/process_idot_awos.py
|
c29e696ecbcafd40fb720a5612021a2b033ca115
|
[
"MIT"
] |
permissive
|
Xawwell/iem
|
78e62f749661f3ba292327f82acf4ef0f0c8d55b
|
88177cc096b9a66d1bd51633fea448585b5e6573
|
refs/heads/master
| 2020-09-06T09:03:54.174221
| 2019-11-08T03:23:44
| 2019-11-08T03:23:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,063
|
py
|
"""Process AWOS METAR file"""
from __future__ import print_function
import re
import sys
import os
import datetime
import ftplib
import subprocess
import tempfile
from io import StringIO
from pyiem import util
INCOMING = "/mesonet/data/incoming"
def fetch_files():
"""Fetch files """
props = util.get_properties()
fn = "%s/iaawos_metar.txt" % (INCOMING,)
try:
ftp = ftplib.FTP("165.206.203.34")
except TimeoutError:
print("process_idot_awos FTP server timeout error")
sys.exit()
ftp.login("rwis", props["rwis_ftp_password"])
ftp.retrbinary("RETR METAR.txt", open(fn, "wb").write)
ftp.close()
return fn
def main():
"""Go Main"""
fn = fetch_files()
utc = datetime.datetime.utcnow().strftime("%Y%m%d%H%M")
data = {}
# Sometimes, the file gets gobbled it seems
for line in open(fn, "rb"):
line = line.decode("utf-8", "ignore")
match = re.match("METAR K(?P<id>[A-Z1-9]{3})", line)
if not match:
continue
gd = match.groupdict()
data[gd["id"]] = line
sio = StringIO()
sio.write("\001\r\r\n")
sio.write(
("SAUS00 KISU %s\r\r\n")
% (datetime.datetime.utcnow().strftime("%d%H%M"),)
)
sio.write("METAR\r\r\n")
for sid in data:
sio.write("%s=\r\r\n" % (data[sid].strip().replace("METAR ", ""),))
sio.write("\003")
sio.seek(0)
(tmpfd, tmpname) = tempfile.mkstemp()
os.write(tmpfd, sio.getvalue().encode("utf-8"))
os.close(tmpfd)
proc = subprocess.Popen(
(
"/home/ldm/bin/pqinsert -i -p 'data c %s "
"LOCDSMMETAR.dat LOCDSMMETAR.dat txt' %s"
)
% (utc, tmpname),
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
(stdout, stderr) = proc.communicate()
os.remove(tmpname)
if stdout != b"" or stderr is not None:
print("process_idot_awos\nstdout: %s\nstderr: %s" % (stdout, stderr))
if __name__ == "__main__":
main()
|
[
"akrherz@iastate.edu"
] |
akrherz@iastate.edu
|
bff32a13096803e1b074e49cecebb181d4d6913f
|
785a06d576cd4f7486a8a4306481392d0b65f621
|
/0x06-python-classes/0-square.py
|
b295cba0972b97fb25477fe35a3935cfbf691fec
|
[] |
no_license
|
LauraPeraltaV85/holbertonschool-higher_level_programming
|
7c3d0a99c2dbd4f2f6951999634dbc2ae9acf1c4
|
264fe99bf5fc128d2faf59057e9062c2408e6065
|
refs/heads/master
| 2021-07-21T07:49:24.049890
| 2020-08-19T00:07:20
| 2020-08-19T00:07:20
| 207,329,486
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 87
|
py
|
#!/usr/bin/python3
class Square:
"""Empty class that defines a squares"""
pass
|
[
"="
] |
=
|
567c5930ce4dce2b362ee9ebf34f4f2c604b528e
|
88df15f1c36960f3473caf54904cbaae5f3bab52
|
/function.py
|
b9deefeceb27b05ad098eda3a0941d75fe731747
|
[] |
no_license
|
pigpigman8686/listen
|
eef86b391b7399a96edfe1f8136dcd26d0ffd646
|
aa3b3f7d2e49ffb557739c19c3712b9e6d823e43
|
refs/heads/master
| 2020-06-19T17:46:29.283378
| 2019-07-14T07:34:39
| 2019-07-14T07:34:39
| 196,806,426
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 897
|
py
|
import random
import os,re
def getMessage(yes=True,snr=28,count=1):
if count == 1:
page = int(9)
elif yes==True:
page = int((snr-10)/2-1)
if page<0:
page = 0
else:
page = int((snr-10)/2+1)
if page>18:
page=18
filepath = "static/voice/"+str(page)
#print("filepath:"+filepath)
filelist = os.listdir(filepath)
#print("mess len:"+str(random.randint(0,len(message))))
#print(random.randint(0,len(message)))
message = "voice/"+str(page)+"/"+ str(filelist[random.randint(0,len(filelist)-1)])
return message
def average(result):
patten = re.compile('(\d{1,})-(\d{1,})')
snr = 0
for i in range(1,20):
#print("--------------------i:"+str(i))
s = '%s'%str(result[0]['right'+str(i)])
right = re.search(patten,s)
snr += int(right.group(2))
return snr/19.0
|
[
"952361195@qq.com"
] |
952361195@qq.com
|
06e60323dd57f2def66299b5acfc0b773762fb62
|
a3c4935537a42330758c7ac54553ae45daad069d
|
/.history/backend/src/api_20210807005753.py
|
d5a98b48745dbac2d9550540ef0d1b41a0699108
|
[] |
no_license
|
saraalmuraytib/Coffee-Shop
|
219292b4e2dd9b39621f9cd42bdcb049f5bb20c8
|
cec4b8df3a4c38600fb2964f8fa85d3c820ddb6c
|
refs/heads/main
| 2023-07-12T19:39:26.617313
| 2021-08-28T19:22:23
| 2021-08-28T19:22:23
| 392,365,613
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,532
|
py
|
import os
from flask import Flask, request, jsonify, abort
from sqlalchemy import exc
import json
from flask_cors import CORS
from .database.models import db_drop_and_create_all, setup_db, Drink
from .auth.auth import AuthError, requires_auth
app = Flask(__name__)
setup_db(app)
CORS(app)
'''
@TODO uncomment the following line to initialize the datbase
!! NOTE THIS WILL DROP ALL RECORDS AND START YOUR DB FROM SCRATCH
!! NOTE THIS MUST BE UNCOMMENTED ON FIRST RUN
!! Running this funciton will add one
'''
# db_drop_and_create_all()
# ROUTES
'''
@TODO implement endpoint
GET /drinks
it should be a public endpoint
it should contain only the drink.short() data representation
returns status code 200 and json {"success": True, "drinks": drinks} where drinks is the list of drinks
or appropriate status code indicating reason for failure
'''
@app.route('/drinks')
def drinks():
try:
drinks = Drink.query.all()
return jsonify({
'success': True,
'drinks': [drink.short() for drink in drinks]
}),200
except:
abort(404)
'''
@TODO implement endpoint
GET /drinks-detail
it should require the 'get:drinks-detail' permission
it should contain the drink.long() data representation
returns status code 200 and json {"success": True, "drinks": drinks} where drinks is the list of drinks
or appropriate status code indicating reason for failure
'''
@app.route("/drinks-detail")
@requires_auth('get:drinks-detail')
def get_drink_detail(payload):
try:
drinks = Drink.query.all()
return jsonify({
'success': True,
'drinks': [drink.long() for drink in drinks]
}),200
except:
abort(404)
'''
@TODO implement endpoint
POST /drinks
it should create a new row in the drinks table
it should require the 'post:drinks' permission
it should contain the drink.long() data representation
returns status code 200 and json {"success": True, "drinks": drink} where drink an array containing only the newly created drink
or appropriate status code indicating reason for failure
'''
@app.route("/drinks", methods=['POST'])
@requires_auth('post:drinks')
def add_drink(payload):
# Fetch the request body
body = request.get_json()
# Get title and recipe to create the drink
title = body.get('title')
recipe = body.get('recipe')
try:
# json.dumps ---> Serialize obj to a JSON formatted str.
new_drink = Drink(title=title, recipe=json.dumps(recipe))
new_drink.insert()
return jsonify({
'success': True,
'drinks': [new_drink.long()],
})
except:
abort(422)
'''
@TODO implement endpoint
PATCH /drinks/<id>
where <id> is the existing model id
it should respond with a 404 error if <id> is not found
it should update the corresponding row for <id>
it should require the 'patch:drinks' permission
it should contain the drink.long() data representation
returns status code 200 and json {"success": True, "drinks": drink} where drink an array containing only the updated drink
or appropriate status code indicating reason for failure
'''
'''
@TODO implement endpoint
DELETE /drinks/<id>
where <id> is the existing model id
it should respond with a 404 error if <id> is not found
it should delete the corresponding row for <id>
it should require the 'delete:drinks' permission
returns status code 200 and json {"success": True, "delete": id} where id is the id of the deleted record
or appropriate status code indicating reason for failure
'''
# Error Handling
'''
Example error handling for unprocessable entity
'''
@app.errorhandler(422)
def unprocessable(error):
return jsonify({
"success": False,
"error": 422,
"message": "unprocessable"
}), 422
'''
@TODO implement error handlers using the @app.errorhandler(error) decorator
each error handler should return (with approprate messages):
jsonify({
"success": False,
"error": 404,
"message": "resource not found"
}), 404
'''
'''
@TODO implement error handler for 404
error handler should conform to general task above
'''
'''
@TODO implement error handler for AuthError
error handler should conform to general task above
'''
|
[
"sara.almuraytib@gmail.com"
] |
sara.almuraytib@gmail.com
|
0bb96f30a42e50bc0408cf4a6f607b6796d6546d
|
edf91e2614f0bf0dbfea1c77d2f41add5a14fac1
|
/twitterino/twitterino/urls.py
|
5e36063f9d99be119b11bcd0d2d3164226497241
|
[] |
no_license
|
Razhelq/Twitterino
|
c5c4a9de47566a21240f1de316c9cb980b9fe01d
|
88269625aa2306f58c197477f3d682db270ca469
|
refs/heads/master
| 2020-03-26T18:23:15.149839
| 2018-08-27T19:59:57
| 2018-08-27T19:59:57
| 145,210,513
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,831
|
py
|
"""twitterino URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.urls import path
from t.views import BaseView, IndexView, AddTweetView, LoginView, LogoutView, UserTweetsView, TweetDetailsView
from t.views import UserMessagesView, SendMessageView, MessageDetailsView, AddCommentView
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^$', BaseView.as_view(), name='base'),
url(r'^index/$', IndexView.as_view(), name='index'),
url(r'^add_tweet/$', AddTweetView.as_view(), name='add-tweet'),
url(r'^login/$', LoginView.as_view(), name='login'),
url(r'^logout/$', LogoutView.as_view(), name='logout'),
url(r'^user_tweets/(?P<id>(\d)+)/$', UserTweetsView.as_view(), name='user-tweets'),
url(r'^tweet_details/(?P<id>(\d)+)/$', TweetDetailsView.as_view(), name='tweet-details'),
url(r'^user_messages/(?P<id>(\d)+)/$', UserMessagesView.as_view(), name='user-messages'),
url(r'^send_message/(?P<id>(\d)+)/$', SendMessageView.as_view(), name='send-message'),
url(r'^message_details/(?P<id>(\d)+)/$', MessageDetailsView.as_view(), name='message-details'),
url(r'^add_comment/(?P<id>(\d)+)/$', AddCommentView.as_view(), name='add-comment')
]
|
[
"mateuszszpakowski@wp.pl"
] |
mateuszszpakowski@wp.pl
|
81b87030e4f49031523e25eeadd2033600229db8
|
ec00584ab288267a7cf46c5cd4f76bbec1c70a6b
|
/Django/webapp/webapp/urls.py
|
b992ee479948a6c3cd42bf76a7571f15eb468e76
|
[] |
no_license
|
rahuldbhadange/Python
|
b4cc806ff23953389c9507f43d817b3815260e19
|
7e162117f1acc12537c7eeb36d6983d804122ff3
|
refs/heads/master
| 2021-06-23T05:04:20.053777
| 2020-01-28T10:34:28
| 2020-01-28T10:34:28
| 217,307,612
| 0
| 0
| null | 2021-06-10T22:44:11
| 2019-10-24T13:35:42
|
Python
|
UTF-8
|
Python
| false
| false
| 1,494
|
py
|
"""webapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URL conf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# from django.contrib import admin
# from django.urls import path
# from django.conf.urls import include, url
# from login.views import
urlpatterns = [
# path('', include('login.urls')),
# path('admin/', admin.site.urls),
# url(r'^login/', include('login.urls')),
# url(r'^API1/', include('API1.urls')),
# path('', PersonListView.as_view(), name='person_list'),
# path('add/', PersonCreateView.as_view(), name='person_add'),
# path('<int:pk>/edit/', PersonUpdateView.as_view(), name='person_edit'),
]
'''
from django.urls import path
from login.views import PersonListView, PersonCreateView, PersonUpdateView
urlpatterns = [
path('', PersonListView.as_view(), name='person_list'),
path('add/', PersonCreateView.as_view(), name='person_add'),
path('<int:pk>/edit/', PersonUpdateView.as_view(), name='person_edit'),
]'''
|
[
"46024570+rahuldbhadange@users.noreply.github.com"
] |
46024570+rahuldbhadange@users.noreply.github.com
|
6e23c5de1d7411fa2a34e57a9a50d0e75aa00440
|
abc422f58ad053bcbb6653ba15b66e46d220a199
|
/serial_scripts/rsyslog/mylogging.py
|
d5ca62ae2331a8ee83e6c2ec8c2eda766a56adab
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
tungstenfabric/tf-test
|
d3efff59bca931b614d0008260b2c0881d1fc009
|
4b9eca7eb182e5530223131ecab09d3bdf366407
|
refs/heads/master
| 2023-02-26T19:14:34.345423
| 2023-01-11T08:45:18
| 2023-01-11T10:37:25
| 265,231,958
| 8
| 22
| null | 2023-02-08T00:53:29
| 2020-05-19T11:46:12
|
Python
|
UTF-8
|
Python
| false
| false
| 3,406
|
py
|
#!/usr/bin/python2.7
from __future__ import print_function
from builtins import str
from builtins import range
import syslog
import random
import time
import sys
def send_10_log_messages_with_delay():
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_MAIL)
for ind in range(10):
msg = str(ind + 1) + '. Test Syslog Messages being sent.'
syslog.syslog(syslog.LOG_EMERG, msg)
time.sleep(1)
syslog.closelog()
# end send_10_log_messages_with_delay
def send_10_log_messages():
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_MAIL)
for ind in range(10):
msg = str(ind + 1) + '. Test Syslog Messages being sent without delay.'
syslog.syslog(syslog.LOG_EMERG, msg)
syslog.closelog()
# end send_10_log_messages
def send_messages_grater_than_1024_bytes():
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_MAIL)
with open("message.txt", "r") as myfile:
msg = myfile.readlines()[0]
myfile.close()
for ind in range(100):
syslog.syslog(
syslog.LOG_EMERG, (msg[:5] + str(ind) + 'mymark' + msg[6:]))
time.sleep(1)
syslog.closelog()
# end send_messages_grater_than_1024_bytes
def send_messages_of_all_facility_and_severity():
dict_of_facility = {
'LOG_KERN': 0,
'LOG_USER': 1,
'LOG_MAIL': 2,
'LOG_DAEMON': 3,
'LOG_AUTH': 4,
'LOG_NEWS': 7,
'LOG_UUCP': 8,
'LOG_LOCAL0': 16,
'LOG_CRON': 15,
'LOG_SYSLOG': 5,
'LOG_LOCAL1': 17}
list_of_severity = ['LOG_EMERG', 'LOG_ALERT', 'LOG_CRIT', 'LOG_ERR',
'LOG_WARNING', 'LOG_NOTICE', 'LOG_INFO', 'LOG_DEBUG']
for each_facility in dict_of_facility:
log_facility = dict_of_facility[each_facility]
syslog.openlog(logoption=syslog.LOG_PID, facility=log_facility)
for each_severity in list_of_severity:
log_severity = list_of_severity.index(each_severity)
msg = 'Test Message from ' + each_facility + \
' with severity ' + each_severity + '.'
syslog.syslog(log_severity, msg)
syslog.closelog()
time.sleep(1)
# end send_messages_of_all_facility_and_severity
def send_test_log_message():
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_KERN)
for ind in range(5):
msg = str(ind + 1) + '. Test Syslog Messages from different nodes.'
syslog.syslog(syslog.LOG_EMERG, msg)
time.sleep(1)
syslog.closelog()
# end send_test_log_message
help_string = '\nusage:\n\n./mylogging.py <function-name>\n\nwhere function names are:\
\n1. send_10_log_messages\n2. send_10_log_messages_with_delay\
\n3. send_messages_grater_than_1024_bytes\n4. send_messages_of_all_facility_and_severity\
\n5. send_test_log_message\n\n'
FuncCallDict = {
'send_10_log_messages': send_10_log_messages,
'send_test_log_message': send_test_log_message,
'send_10_log_messages_with_delay': send_10_log_messages_with_delay,
'send_messages_grater_than_1024_bytes': send_messages_grater_than_1024_bytes,
'send_messages_of_all_facility_and_severity': send_messages_of_all_facility_and_severity}
NumberOfArgs = len(sys.argv)
if NumberOfArgs != 2:
print(help_string)
sys.exit(2)
FunctionName = sys.argv[1]
FuncCallDict[FunctionName]()
|
[
"andrey-mp@yandex.ru"
] |
andrey-mp@yandex.ru
|
eac10a806ad58329f8f11936dda99609b1065f55
|
7b5828edda7751700ca7002b40a214e39e5f48a8
|
/EA/simulation/interactions/object_retrieval_liability.py
|
2f36b95318e52aa7996e4d60ba902bea61025b51
|
[] |
no_license
|
daniela-venuta/Sims-4-Python-Script-Workspace
|
54c33dac02f84daed66f46b7307f222fede0fa62
|
f408b28fb34626b2e3b2953152343d591a328d66
|
refs/heads/main
| 2023-03-29T18:08:39.202803
| 2021-03-30T19:00:42
| 2021-03-30T19:00:42
| 353,111,243
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,849
|
py
|
import build_buy
import enum
import services
import sims4
from element_utils import build_element, CleanupType
from interactions import ParticipantType, ParticipantTypeSingle
from interactions.aop import AffordanceObjectPair
from interactions.base.interaction_constants import InteractionQueuePreparationStatus
from interactions.context import InteractionContext, QueueInsertStrategy, InteractionBucketType
from interactions.interaction_finisher import FinishingType
from interactions.liability import PreparationLiability
from interactions.priority import Priority
from objects.object_enums import ResetReason
from sims4.tuning.tunable import HasTunableFactory, AutoFactoryInit, OptionalTunable, TunableReference, TunableEnumEntry, TunableTuple, TunableVariant, HasTunableSingletonFactory
logger = sims4.log.Logger('ObjectRetrievalLiability', default_owner='skorman')
class ObjectRetrievalPhase(enum.Int, export=False):
PUTTING_DOWN = 1
USE_AND_RETRIEVE = ...
PUSHED_RETRIEVAL_AFFORDANCE = ...
class _ObjectRetrievalFallbackBase(HasTunableSingletonFactory):
def run_fallback(self, actor, obj):
raise NotImplementedError
class _SendToInventoryFallback(_ObjectRetrievalFallbackBase):
def run_fallback(self, actor, obj):
if actor.inventory_component.can_add(obj) and actor.inventory_component.player_try_add_object(obj):
return
if not build_buy.move_object_to_household_inventory(actor):
logger.warn('Failed to move {} to inventory. The object will be destroyed', obj)
obj.schedule_destroy_asap()
class _DestroyObjectFallback(_ObjectRetrievalFallbackBase):
def run_fallback(self, actor, obj):
obj.schedule_destroy_asap()
class ObjectRetrievalLiability(HasTunableFactory, AutoFactoryInit, PreparationLiability):
LIABILITY_TOKEN = 'ObjectRetrievalLiability'
FACTORY_TUNABLES = {'put_down_tuning': OptionalTunable(description='\n If enabled, the liability will push this tuned affordance to place\n the object before running the owning interaction. \n ', tunable=TunableTuple(affordance=TunableReference(description='\n The affordance to place the object before the owning\n interaction runs.\n ', manager=services.affordance_manager()), participant=TunableEnumEntry(description='\n The target of the put down affordance.\n ', tunable_type=ParticipantTypeSingle, default=ParticipantTypeSingle.Object))), 'object_to_retrieve': TunableEnumEntry(description='\n The target of the retrieval affordance.\n ', tunable_type=ParticipantTypeSingle, default=ParticipantTypeSingle.Object), 'retrieval_affordance': TunableReference(description="\n The affordance to push after the liability is released to pick up\n the 'object to retrieve'.\n ", manager=services.affordance_manager()), 'fallback_behavior': TunableVariant(description='\n The fallback behavior to use on the retrieval object if the \n retrieval affordance fails.\n ', send_to_inventory=_SendToInventoryFallback.TunableFactory(), destroy_object=_DestroyObjectFallback.TunableFactory(), default='send_to_inventory')}
def __init__(self, interaction, *args, **kwargs):
super().__init__(*args, **kwargs)
self._interaction = interaction
self._actor = interaction.get_participant(ParticipantType.Actor)
self._object = interaction.get_participant(self.object_to_retrieve)
self._phase = ObjectRetrievalPhase.PUTTING_DOWN
self._pushed_preparation_interactions = []
def _prepare_gen(self, timeline, *args, **kwargs):
if self.put_down_tuning is None:
self._phase = ObjectRetrievalPhase.USE_AND_RETRIEVE
if self._phase == ObjectRetrievalPhase.PUTTING_DOWN:
target = self._interaction.get_participant(self.put_down_tuning.participant)
if target is None:
cancel_reason = 'ObjectRetrievalLiability: Failed to get participant to put down.'
self._interaction.cancel(FinishingType.TRANSITION_FAILURE, cancel_reason_msg=cancel_reason)
return InteractionQueuePreparationStatus.FAILURE
result = self._push_affordance(self.put_down_tuning.affordance, target)
if not result:
cancel_reason = 'ObjectRetrievalLiability: Failed to put down object.'
self._interaction.cancel(FinishingType.TRANSITION_FAILURE, cancel_reason_msg=cancel_reason)
return InteractionQueuePreparationStatus.FAILURE
self._pushed_preparation_interactions.append(result.interaction)
self._phase = ObjectRetrievalPhase.USE_AND_RETRIEVE
return InteractionQueuePreparationStatus.NEEDS_DERAIL
self._pushed_preparation_interactions = []
return InteractionQueuePreparationStatus.SUCCESS
def release(self):
if self._object is None or self._object.is_in_inventory():
if self.put_down_tuning is not None:
for interaction in self._interaction.sim.get_all_running_and_queued_interactions():
if interaction.affordance is self.put_down_tuning.affordance:
interaction.cancel(FinishingType.FAILED_TESTS, cancel_reason_msg='ObjectRetrievalLiability released.')
return
if self._phase < ObjectRetrievalPhase.PUSHED_RETRIEVAL_AFFORDANCE:
result = self._push_affordance(self.retrieval_affordance, self._object)
if result:
retrieval_interaction = result.interaction
self.transfer(retrieval_interaction)
retrieval_interaction.add_liability(ObjectRetrievalLiability, self)
self._phase = ObjectRetrievalPhase.PUSHED_RETRIEVAL_AFFORDANCE
return
self._clean_up_object()
def should_transfer(self, continuation):
return continuation.affordance is self.retrieval_affordance and self._phase < ObjectRetrievalPhase.PUSHED_RETRIEVAL_AFFORDANCE
def transfer(self, interaction):
self._interaction = interaction
def on_reset(self):
self._clean_up_object()
def _push_affordance(self, affordance, target):
picked_item_ids = self._interaction.interaction_parameters.get('picked_item_ids')
aop = AffordanceObjectPair(affordance, target, affordance, None, route_fail_on_transition_fail=False, allow_posture_changes=True, picked_item_ids=picked_item_ids)
context = InteractionContext(self._actor, InteractionContext.SOURCE_SCRIPT, Priority.High, insert_strategy=QueueInsertStrategy.FIRST, bucket=InteractionBucketType.DEFAULT)
return aop.test_and_execute(context)
def _clean_up_object(self):
if self._object is None or self._object in self._actor.inventory_component:
return
for child_obj in tuple(self._object.children):
if child_obj.is_sim:
logger.warn('Trying to clean up object {} that sim {} is using. The sim will be reset.', self._object, child_obj)
reset_fn = lambda _: child_obj.reset(ResetReason.RESET_EXPECTED, source=self, cause='ObjectRetrievalLiability is cleaning up parent object {}.'.format(self._object))
element = build_element([reset_fn, lambda _: self._clean_up_object()], critical=CleanupType.OnCancelOrException)
services.time_service().sim_timeline.schedule(element)
return
self.fallback_behavior.run_fallback(self._actor, self._object)
def path_generation_deferred(self):
return self._pushed_preparation_interactions
|
[
"44103490+daniela-venuta@users.noreply.github.com"
] |
44103490+daniela-venuta@users.noreply.github.com
|
e9d169335a5f914f653a37844eac60d56eb8136e
|
b96ed10d6247e22d4fa1d28bc3314bc319d3109c
|
/LessonSample/chapter12/03_进程池.py
|
2ba823ca2fd1e127c6f7ef24b29591aaddf7cb6d
|
[] |
no_license
|
13555785106/PythonPPT-01
|
ac1b22b9b1851f2b3ea6e4ab0a100e5f6896ee8c
|
40e5883f248cb342f3a7fc7ad12ba02ebde4c619
|
refs/heads/master
| 2020-04-26T16:49:59.675964
| 2019-03-04T07:16:21
| 2019-03-04T07:16:21
| 157,095,747
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 941
|
py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import os
import random
import time
from multiprocessing import Pool
def run(name):
print("子进程%d启动--%s" % (name, os.getpid()))
start = time.time()
time.sleep(random.choice([5, 7, 9]))
end = time.time()
print("子进程%d结束--%s--耗时%.2f" % (name, os.getpid(), end - start))
print("父进程启动")
# 创建多个进程
# 进程池
# 表示可以同时执行的进程数量
# Pool默认大小是CPU核心数
pp = Pool()
for i in range(10):
# 创建进程,放入进程池中统一管理
pp.apply_async(run, args=(i,))
# 在调用join之前必须先调用close,并且调用close之后就不能再继续添加新的进程了
pp.close() # close以后进程池pp将被关闭,不能再继续向pp中加入新的进程.
# 进程池对象调用join,会等待进程池中所有的子进程结束完毕再去执行父进程
pp.join()
print("父进程结束")
|
[
"312655424@qq.com"
] |
312655424@qq.com
|
1bdc224dca8b596ac6e94359d90f12c8b5e1815f
|
631901398be90a2dda7b703e8229165e719dae7f
|
/orm/entities/Area/__init__.py
|
fb86137b571e9985fd20e226197292e6b653f21d
|
[] |
no_license
|
hevayo/results-tabulation-api
|
4b22cc7cb3c535862429fef84422e97e1ca5890a
|
939c1cbd360fd908dcdf563623ee91393959bcd5
|
refs/heads/master
| 2020-08-08T22:58:34.133675
| 2019-11-02T07:54:07
| 2019-11-02T07:54:07
| 213,940,023
| 0
| 0
| null | 2019-10-09T14:27:02
| 2019-10-09T14:27:00
| null |
UTF-8
|
Python
| false
| false
| 15,851
|
py
|
from app import db
from sqlalchemy.orm import relationship, aliased
from sqlalchemy import and_, func, or_
from orm.enums import AreaTypeEnum
from orm.entities import Election
from sqlalchemy.ext.hybrid import hybrid_property
class AreaModel(db.Model):
__tablename__ = 'area'
areaId = db.Column(db.Integer, primary_key=True, autoincrement=True)
areaName = db.Column(db.String(300), nullable=False)
areaType = db.Column(db.Enum(AreaTypeEnum), nullable=False)
electionId = db.Column(db.Integer, db.ForeignKey(Election.Model.__table__.c.electionId), nullable=False)
# parentAreaId = db.Column(db.Integer, db.ForeignKey(areaId), nullable=True)
_registeredVotersCount = db.Column(db.Integer(), nullable=True)
election = relationship(Election.Model, foreign_keys=[electionId])
# parentArea = relationship("AreaModel", remote_side=[areaId])
# childAreas = relationship("AreaModel", foreign_keys=[parentAreaId])
# pollingStations = relationship("PollingStationModel")
# this relationship is used for persistence
children = relationship("AreaModel", secondary="area_area",
primaryjoin="AreaModel.areaId==AreaAreaModel.parentAreaId",
secondaryjoin="AreaModel.areaId==AreaAreaModel.childAreaId"
)
parents = relationship("AreaModel", secondary="area_area",
primaryjoin="AreaModel.areaId==AreaAreaModel.childAreaId",
secondaryjoin="AreaModel.areaId==AreaAreaModel.parentAreaId"
)
tallySheets = relationship("TallySheetModel", secondary="submission",
primaryjoin="AreaModel.areaId==SubmissionModel.areaId",
secondaryjoin="SubmissionModel.submissionId==TallySheetModel.tallySheetId"
)
tallySheets_PRE_41 = relationship(
"TallySheetModel", secondary="submission",
primaryjoin="AreaModel.areaId==SubmissionModel.areaId",
secondaryjoin="and_(SubmissionModel.submissionId==TallySheetModel.tallySheetId, TallySheetModel.tallySheetCode=='PRE_41')"
)
def __init__(self, areaName, electionId):
super(AreaModel, self).__init__(
areaName=areaName,
electionId=electionId
)
db.session.add(self)
db.session.flush()
def add_parent(self, parentId):
parentArea = get_by_id(areaId=parentId)
parentArea.add_child(self.areaId)
return self
def add_child(self, childId):
existing_mapping = AreaAreaModel.query.filter(
AreaAreaModel.parentAreaId == self.areaId,
AreaAreaModel.childAreaId == childId
).one_or_none()
if existing_mapping is None:
areaParent = AreaAreaModel(parentAreaId=self.areaId, childAreaId=childId)
db.session.add(areaParent)
db.session.flush()
return self
def get_associated_areas_query(self, areaType, electionId=None):
return get_associated_areas_query(areas=[self], areaType=areaType, electionId=electionId)
def get_associated_areas(self, areaType, electionId=None):
return self.get_associated_areas_query(areaType, electionId).all()
def get_submissions(self, submissionType):
return [submission for submission in self.submissions if submission.submissionType is submissionType]
# def get_reports(self, reportCode):
# return [report for report in self.reports if report.reportCode is reportCode]
@hybrid_property
def pollingStations(self):
return get_associated_areas(self, AreaTypeEnum.PollingStation)
@hybrid_property
def countingCentres(self):
# return []
return get_associated_areas(self, AreaTypeEnum.CountingCentre)
@hybrid_property
def districtCentres(self):
# return []
return get_associated_areas(self, AreaTypeEnum.DistrictCentre)
@hybrid_property
def pollingDistricts(self):
# TODO review
# Sending the list of polling districts only for polling stations.
if self.areaType is AreaTypeEnum.PollingStation:
return get_associated_areas(self, AreaTypeEnum.PollingDistrict)
else:
return []
@hybrid_property
def registeredVotersCount(self):
polling_stations_subquery = get_associated_areas_query(areas=[self],
areaType=AreaTypeEnum.PollingStation).subquery()
total_registered_voters_count = db.session.query(
func.sum(polling_stations_subquery.c._registeredVotersCount)
).scalar()
return total_registered_voters_count
__mapper_args__ = {
'polymorphic_on': areaType
}
class AreaAreaModel(db.Model):
__tablename__ = 'area_area'
parentAreaId = db.Column(db.Integer, db.ForeignKey("area.areaId"), primary_key=True)
childAreaId = db.Column(db.Integer, db.ForeignKey("area.areaId"), primary_key=True)
Model = AreaModel
def get_presidential_area_map_query():
election_commission_mapping = aliased(AreaAreaModel)
district_centre_mapping = aliased(AreaAreaModel)
counting_centre_mapping = aliased(AreaAreaModel)
postal_vote_counting_centre_mapping = aliased(AreaAreaModel)
polling_station_mapping = aliased(AreaAreaModel)
polling_district_mapping = aliased(AreaAreaModel)
polling_division_mapping = aliased(AreaAreaModel)
electoral_district_mapping = aliased(AreaAreaModel)
country_mapping = aliased(AreaAreaModel)
election_commission = aliased(AreaModel)
district_centre = aliased(AreaModel)
counting_centre = aliased(AreaModel)
polling_station = aliased(AreaModel)
polling_district = aliased(AreaModel)
polling_division = aliased(AreaModel)
electoral_district = aliased(AreaModel)
country = aliased(AreaModel)
presidential_area_map_query = db.session.query(
counting_centre.areaId.label("countingCentreId"),
polling_station.areaId.label("pollingStationId"),
district_centre.areaId.label("districtCentreId"),
election_commission.areaId.label("electionCommissionId"),
polling_district.areaId.label("pollingDistrictId"),
polling_division.areaId.label("pollingDivisionId"),
electoral_district.areaId.label("electoralDistrictId"),
country.areaId.label("countryId")
).join(
polling_station_mapping,
polling_station_mapping.parentAreaId == counting_centre.areaId,
isouter=True
).join(
polling_station,
and_(
polling_station.areaId == polling_station_mapping.childAreaId,
polling_station.areaType == AreaTypeEnum.PollingStation
),
isouter=True
).join(
district_centre_mapping,
district_centre_mapping.childAreaId == counting_centre.areaId,
isouter=True
).join(
district_centre,
and_(
district_centre.areaId == district_centre_mapping.parentAreaId,
district_centre.areaType == AreaTypeEnum.DistrictCentre
),
isouter=True
).join(
election_commission_mapping,
election_commission_mapping.childAreaId == district_centre.areaId,
isouter=True
).join(
election_commission,
and_(
election_commission.areaId == election_commission_mapping.parentAreaId,
election_commission.areaType == AreaTypeEnum.ElectionCommission
),
isouter=True
).join(
polling_district_mapping,
polling_district_mapping.childAreaId == polling_station.areaId,
isouter=True
).join(
polling_district,
and_(
polling_district.areaId == polling_district_mapping.parentAreaId,
polling_district.areaType == AreaTypeEnum.PollingDistrict
),
isouter=True
).join(
polling_division_mapping,
polling_division_mapping.childAreaId == polling_district.areaId,
isouter=True
).join(
polling_division,
and_(
polling_division.areaId == polling_division_mapping.parentAreaId,
polling_division.areaType == AreaTypeEnum.PollingDivision
),
isouter=True
).join(
electoral_district_mapping,
electoral_district_mapping.childAreaId == polling_division.areaId,
isouter=True
).join(
postal_vote_counting_centre_mapping,
postal_vote_counting_centre_mapping.childAreaId == counting_centre.areaId,
isouter=True
).join(
electoral_district,
or_(
and_(
electoral_district.areaId == electoral_district_mapping.parentAreaId,
electoral_district.areaType == AreaTypeEnum.ElectoralDistrict
),
and_(
electoral_district.areaId == postal_vote_counting_centre_mapping.parentAreaId,
electoral_district.areaType == AreaTypeEnum.ElectoralDistrict
),
),
isouter=True
).join(
country_mapping,
country_mapping.childAreaId == electoral_district.areaId,
isouter=True
).join(
country,
and_(
country.areaId == country_mapping.parentAreaId,
country.areaType == AreaTypeEnum.Country
),
isouter=True
)
return presidential_area_map_query
def get_associated_areas_query(areas, areaType, electionId=None):
presidential_area_map_sub_query = get_presidential_area_map_query().subquery()
election = Election.get_by_id(electionId=electionId)
query = db.session.query(
AreaModel
).join(
Election.Model,
Election.Model.electionId == AreaModel.electionId
)
if areaType is AreaTypeEnum.PollingStation:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.pollingStationId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.CountingCentre:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.countingCentreId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.DistrictCentre:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.districtCentreId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.ElectionCommission:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.electionCommissionId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.PollingDistrict:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.pollingDistrictId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.PollingDivision:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.pollingDivisionId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.ElectoralDistrict:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.electoralDistrictId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.Country:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.countryId == AreaModel.areaId
)
elif areaType is AreaTypeEnum.PostalVoteCountingCentre:
query = query.join(
presidential_area_map_sub_query,
presidential_area_map_sub_query.c.postalVoteCountingCentreId == AreaModel.areaId
)
query = query.group_by(AreaModel.areaId)
filtered_polling_stations = [area.areaId for area in areas if area.areaType == AreaTypeEnum.PollingStation]
filtered_counting_centres = [area.areaId for area in areas if area.areaType == AreaTypeEnum.CountingCentre]
filtered_district_centres = [area.areaId for area in areas if area.areaType == AreaTypeEnum.DistrictCentre]
filtered_election_commissions = [area.areaId for area in areas if area.areaType == AreaTypeEnum.ElectionCommission]
filtered_polling_districts = [area.areaId for area in areas if area.areaType == AreaTypeEnum.PollingDistrict]
filtered_polling_divisions = [area.areaId for area in areas if area.areaType == AreaTypeEnum.PollingDivision]
filtered_electoral_districts = [area.areaId for area in areas if area.areaType == AreaTypeEnum.ElectoralDistrict]
filtered_countries = [area.areaId for area in areas if area.areaType == AreaTypeEnum.Country]
if len(filtered_polling_stations) > 0:
query = query.filter(
presidential_area_map_sub_query.c.pollingStationId.in_(filtered_polling_stations)
)
elif len(filtered_counting_centres) > 0:
query = query.filter(
presidential_area_map_sub_query.c.countingCentreId.in_(filtered_counting_centres)
)
elif len(filtered_district_centres) > 0:
query = query.filter(
presidential_area_map_sub_query.c.districtCentreId.in_(filtered_district_centres)
)
elif len(filtered_election_commissions) > 0:
query = query.filter(
presidential_area_map_sub_query.c.electionCommissionId.in_(filtered_election_commissions)
)
elif len(filtered_polling_districts) > 0:
query = query.filter(
presidential_area_map_sub_query.c.pollingDistrictId.in_(filtered_polling_districts)
)
elif len(filtered_polling_divisions) > 0:
query = query.filter(
presidential_area_map_sub_query.c.pollingDivisionId.in_(filtered_polling_divisions)
)
elif len(filtered_electoral_districts) > 0:
query = query.filter(
presidential_area_map_sub_query.c.electoralDistrictId.in_(filtered_electoral_districts)
)
elif len(filtered_countries) > 0:
query = query.filter(
presidential_area_map_sub_query.c.countryId.in_(filtered_countries)
)
if electionId is not None:
query = query.filter(
or_(
Model.electionId.in_(election.mappedElectionIds),
Model.electionId.in_(election.subElectionIds)
)
)
query = query.filter(
AreaModel.areaType == areaType
)
return query
def get_associated_areas(area, areaType, electionId=None):
result = get_associated_areas_query(areas=[area], areaType=areaType, electionId=electionId).all()
return result
def create(areaName, electionId):
area = Model(
areaName=areaName,
electionId=electionId
)
return area
def get_all(election_id=None, area_name=None, associated_area_id=None, area_type=None):
election = Election.get_by_id(electionId=election_id)
if associated_area_id is not None and area_type is not None:
associated_area = get_by_id(areaId=associated_area_id)
query = get_associated_areas_query(areas=[associated_area], areaType=area_type, electionId=election_id)
else:
query = Model.query
if area_name is not None:
query = query.filter(Model.areaName.like(area_name))
if election is not None:
query = query.filter(
or_(
Model.electionId.in_(election.mappedElectionIds),
Model.electionId.in_(election.subElectionIds)
)
)
if area_type is not None:
query = query.filter(Model.areaType == area_type)
query = query.order_by(Model.areaId)
return query
def get_by_id(areaId):
result = Model.query.filter(
Model.areaId == areaId
).one_or_none()
return result
|
[
"l.dinukadesilva@gmail.com"
] |
l.dinukadesilva@gmail.com
|
0588499572b5db43202033b8b7e52943684e02b8
|
6811dc616a18898f565ee1e59f52a889da2b4d47
|
/clog/urls.py
|
7e24d23a5403e98aa3fd5f1454f3dabc71e0531f
|
[] |
no_license
|
rtreharne/clog
|
163e28b8e387e7f2fe926d88e28972c115294508
|
69320d929d96fd291e6da8fc11023d14da998655
|
refs/heads/master
| 2021-01-10T18:24:16.863481
| 2015-08-18T13:58:07
| 2015-08-18T13:58:07
| 40,085,671
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 563
|
py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
urlpatterns = patterns('',
url(r'^$', 'clog.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^user/', include('profiles.urls')),
url(r'^upload/', include('upload.urls')),
url(r'^invite/', include('invite.urls')),
)
if settings.DEBUG:
urlpatterns += patterns(
'django.views.static',
(r'^media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}),
)
|
[
"R.Treharne@liverpool.ac.uk"
] |
R.Treharne@liverpool.ac.uk
|
3d16576478fa4173a6b4ac13bd24082a5243908a
|
507e9728307439fa3b343de015237e2f179b0032
|
/hospital/api/AConfig.py
|
9fa547be309e9abd3052ecb9319676517a13cbcb
|
[] |
no_license
|
haobin12358/hospital
|
3075f31c4ed527190292f2655350ef0bbc9ac7f3
|
9705e03eaf9514eb47f1d44d2bbe18ccf5cd5b30
|
refs/heads/master
| 2022-12-14T01:48:56.759520
| 2021-03-02T04:00:47
| 2021-03-02T04:00:47
| 245,367,609
| 2
| 1
| null | 2022-12-08T03:45:40
| 2020-03-06T08:32:44
|
Python
|
UTF-8
|
Python
| false
| false
| 1,128
|
py
|
# -*- coding : utf-8 -*-
from hospital.extensions.base_resource import Resource
from hospital.control.CConfig import CConfig
class AConfig(Resource):
def __init__(self):
self.cconfig = CConfig()
def get(self, config):
apis = {
"list_banner": self.cconfig.list_banner,
"get_csd": self.cconfig.get_csd,
"get_about_us": self.cconfig.get_about_us,
"get_vip_price": self.cconfig.get_vip_price,
"get_pointtask": self.cconfig.get_pointtask,
"get_integral": self.cconfig.get_integral
}
return apis
def post(self, config):
apis = {
"set_banner": self.cconfig.set_banner,
"set_csd": self.cconfig.set_csd,
"set_about_us": self.cconfig.set_about_us,
"set_characteristic_team": self.cconfig.set_characteristic_team,
"set_honour": self.cconfig.set_honour,
"set_vip_price": self.cconfig.set_vip_price,
"update_pointtask": self.cconfig.update_pointtask,
"get_point": self.cconfig.get_point
}
return apis
|
[
"1276121237@qq.com"
] |
1276121237@qq.com
|
bc2a86c16fdc46755d1f68ef3d15c959ac845b19
|
b2ed893d04f04eeaf7209187133de7431c476a96
|
/user_net/activity_info.py
|
94dc0c294de672dfcb333926cf41fa221e1c7235
|
[] |
no_license
|
liruikaiyao/workshop
|
4b5221259f59ad504d87d73c31f5fa0e58d4a1f0
|
6dbde74e35ef02f5e92c76dcdd1909f1d0afb89e
|
refs/heads/master
| 2021-01-17T16:09:13.248109
| 2015-08-05T09:43:21
| 2015-08-05T09:43:21
| 23,420,887
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,674
|
py
|
# coding=utf-8
__author__ = 'Carry lee'
from collections import Counter
import datetime
import urllib2
import json
from config.db import ICCv1, sh, utc
api_url = u'http://api.map.baidu.com/geocoder/v2/?ak='
parameter = u'&output=json&coordtype=wgs84ll&location='
ak = 'SIpMcORCSogM916QMOz5tx7S'
weixin = ICCv1['weixin']
detail = ICCv1['detail']
begin = datetime.datetime(year=2015, month=5, day=12, hour=0, minute=0, second=0, tzinfo=sh)
end = datetime.datetime(year=2015, month=5, day=19, hour=0, minute=0, second=0, tzinfo=sh)
begin_utc = begin.astimezone(utc)
end_utc = end.astimezone(utc)
# 性别统计
gender = {}
male = 0
female = 0
for elem in detail.find({'__REMOVED__': False,
'__CREATE_TIME__': {'$gt': begin_utc, '$lt': end_utc}}):
if 'sex' in elem:
if elem['sex'] == 1:
male += 1
elif elem['sex'] == 2:
female += 1
else:
pass
gender['male'] = male
gender['female'] = female
# 根据lbs数据获取城市信息的函数
def getcity(one):
location = one['Latitude'] + ',' + one['Longitude']
try:
res = urllib2.urlopen(api_url + ak + parameter + location, timeout=0.5)
address = json.loads(res.read())['result']
except Exception as e:
print e
else:
city = address['addressComponent']['city']
return city
city_list = []
for elem in weixin.find({'Event': 'LOCATION',
'__REMOVED__': False,
'__CREATE_TIME__': {'$gt': begin_utc, '$lt': end_utc}}).add_option(16):
city_name = getcity(elem)
city_list.append(city_name)
city_info = Counter(city_list)
|
[
"liruikaiyao@gmail.com"
] |
liruikaiyao@gmail.com
|
7670f28ba7aabb549461ab2ef055a921d977f465
|
19236d9e966cf5bafbe5479d613a175211e1dd37
|
/cohesity_management_sdk/controllers/certificates.py
|
419684769ccf8289a0dc95ec3fbc113bd7eed60f
|
[
"MIT"
] |
permissive
|
hemanshu-cohesity/management-sdk-python
|
236c44fbd9604809027f8ddd0ae6c36e4e727615
|
07c5adee58810979780679065250d82b4b2cdaab
|
refs/heads/master
| 2020-04-29T23:22:08.909550
| 2019-04-10T02:42:16
| 2019-04-10T02:42:16
| 176,474,523
| 0
| 0
|
NOASSERTION
| 2019-03-19T09:27:14
| 2019-03-19T09:27:12
| null |
UTF-8
|
Python
| false
| false
| 6,978
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
import logging
from cohesity_management_sdk.api_helper import APIHelper
from cohesity_management_sdk.configuration import Configuration
from cohesity_management_sdk.controllers.base_controller import BaseController
from cohesity_management_sdk.http.auth.auth_manager import AuthManager
from cohesity_management_sdk.models.ssl_certificate_configuration import SSLCertificateConfiguration
from cohesity_management_sdk.exceptions.error_error_exception import ErrorErrorException
class Certificates(BaseController):
"""A Controller to access Endpoints in the cohesity_management_sdk API."""
def __init__(self, client=None, call_back=None):
super(Certificates, self).__init__(client, call_back)
self.logger = logging.getLogger(__name__)
def delete_web_server_certificate(self):
"""Does a DELETE request to /public/certificates/webServer.
Returns delete status upon completion.
Returns:
void: Response from the API. No Content
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('delete_web_server_certificate called.')
# Prepare query URL
self.logger.info('Preparing query URL for delete_web_server_certificate.')
_url_path = '/public/certificates/webServer'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare and execute request
self.logger.info('Preparing and executing request for delete_web_server_certificate.')
_request = self.http_client.delete(_query_url)
AuthManager.apply(_request)
_context = self.execute_request(_request, name = 'delete_web_server_certificate')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for delete_web_server_certificate.')
if _context.response.status_code == 0:
raise ErrorErrorException('Error', _context)
self.validate_response(_context)
except Exception as e:
self.logger.error(e, exc_info = True)
raise
def get_web_server_certificate(self):
"""Does a GET request to /public/certificates/webServer.
Returns the Server Certificate configured on the cluster.
Returns:
SSLCertificateConfiguration: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('get_web_server_certificate called.')
# Prepare query URL
self.logger.info('Preparing query URL for get_web_server_certificate.')
_url_path = '/public/certificates/webServer'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for get_web_server_certificate.')
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
self.logger.info('Preparing and executing request for get_web_server_certificate.')
_request = self.http_client.get(_query_url, headers=_headers)
AuthManager.apply(_request)
_context = self.execute_request(_request, name = 'get_web_server_certificate')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for get_web_server_certificate.')
if _context.response.status_code == 0:
raise ErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, SSLCertificateConfiguration.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info = True)
raise
def update_web_server_certificate(self,
body=None):
"""Does a PUT request to /public/certificates/webServer.
Returns the updated Web Server Certificate on the cluster.
Args:
body (SSLCertificateConfiguration, optional): TODO: type
description here. Example:
Returns:
SSLCertificateConfiguration: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
try:
self.logger.info('update_web_server_certificate called.')
# Prepare query URL
self.logger.info('Preparing query URL for update_web_server_certificate.')
_url_path = '/public/certificates/webServer'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
self.logger.info('Preparing headers for update_web_server_certificate.')
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
self.logger.info('Preparing and executing request for update_web_server_certificate.')
_request = self.http_client.put(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
AuthManager.apply(_request)
_context = self.execute_request(_request, name = 'update_web_server_certificate')
# Endpoint and global error handling using HTTP status codes.
self.logger.info('Validating response for update_web_server_certificate.')
if _context.response.status_code == 0:
raise ErrorErrorException('Error', _context)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, SSLCertificateConfiguration.from_dictionary)
except Exception as e:
self.logger.error(e, exc_info = True)
raise
|
[
"ashish@cohesity.com"
] |
ashish@cohesity.com
|
a907b10a10fa739cebb6a2d9d28ef3ddc01bc2f9
|
e770533cc7d8517134d6f9159f5f9e52747c7153
|
/python/04selenium/selenium014.py
|
88e3348deb7fa6521253c9474ea652722cafae13
|
[] |
no_license
|
code1990/bootPython
|
5d878f7fac8aaa09a2b9e4a6d50a3c0f86c6dea5
|
e5debd59b07a2c713f3e692aa4f44a9d2e5baeae
|
refs/heads/master
| 2022-07-27T04:31:00.292692
| 2020-08-07T07:07:15
| 2020-08-07T07:07:23
| 206,805,170
| 0
| 0
| null | 2020-10-13T15:51:34
| 2019-09-06T13:56:39
|
Python
|
UTF-8
|
Python
| false
| false
| 634
|
py
|
# (十四)下拉框选择
from selenium import webdriver
from selenium.webdriver.support.select import Select
from time import sleep
driver = webdriver.Chrome(executable_path='C:\driver\chromedriver.exe')
driver.implicitly_wait(10)
driver.get('http://www.baidu.com')
# 鼠标悬停至“设置”链接
driver.find_element_by_link_text('设置').click()
sleep(1)
# 打开搜索设置
driver.find_element_by_link_text("搜索设置").click()
sleep(2)
# 搜索结果显示条数
sel = driver.find_element_by_xpath("//select[@id='nr']")
Select(sel).select_by_value('50') # 显示50条
# ……
driver.quit()
|
[
"s1332177151@sina.com"
] |
s1332177151@sina.com
|
a9f4fef1e4376bfc10ef1a75e7fe509d20b30fac
|
1e0e610166b36e5c73e7ff82c4c0b8b1288990bf
|
/mail/mail02.py
|
dbc58ae4fa61f846897bc4421ee3a39ac60cfbbc
|
[] |
no_license
|
PythonOpen/PyhonProjects
|
4ef1e70a971b9ebd0eb6a09e63e22581ad302534
|
ede93314009564c31aa586d2f89ed8b1e4751c1b
|
refs/heads/master
| 2022-05-20T23:21:03.536846
| 2020-04-27T00:59:32
| 2020-04-27T00:59:32
| 250,142,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,665
|
py
|
from email.mime.text import MIMEText
import smtplib
# MIMEText三个主要参数
# 1.邮件内容
# 2.MIME子类型,在此案例我们用plain表示text类型
# 3.邮件编码格式
main_content="""
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
</head>
<body>
<h1>这是一封HTML格式邮件</h1>
</body>
</html>
"""
msg=MIMEText(main_content,"html","utf-8")
# 发送email地址,此处地址直接使用个人QQ,密码一般需要临时输入,此处偷懒
from_addr="1083138609@qq.com"
# 此处密码是经过申请设置后的授权码,不是不是不是你的qq邮箱密码
from_pwd="ofgxddnrkxkqbaaf"
# 收件人信息
# 此处使用qq邮箱,我给我自己发送
to_addr="1083138609@qq.com"
# 输入SMTP服务器地址
# 此处根据不同的邮件服务商有不同的值
# 现在基本任何一家邮件服务商,如果采用第三方收发邮件,都需要开启授权选项
# 腾讯qq邮箱的smtp地址是 smtp.qq.com
smtp_srv="smtp.qq.com"
try:
# 两个参数
# 第一个是服务器地址,但一定是bytes格式,所以需要编码
# 第二个参数是服务器的接受访问端
# SMTP协议默认端口
srv= smtplib.SMTP_SSL(smtp_srv.encode(), 25)
# 登录邮箱发送
srv.login(from_addr, from_pwd)
# 发送邮件
# 三个参数
# 1.发送地址
# 2. 接收地址,必须是list形式
# 3.发送内容,作为字符串发送
srv.sendmail(from_addr,[to_addr],msg.as_string())
srv.quit()
except Exception as e:
print(e)
|
[
"1083138609@qq.com"
] |
1083138609@qq.com
|
5704a6e8200a1842c1da7c558ef26fbc91662ce3
|
c071eb46184635818e8349ce9c2a78d6c6e460fc
|
/system/python_stubs/-745935208/_ast/Eq.py
|
65f93d36a7d2abe45c41f6fe9cdfc0b757574bc7
|
[] |
no_license
|
sidbmw/PyCharm-Settings
|
a71bc594c83829a1522e215155686381b8ac5c6e
|
083f9fe945ee5358346e5d86b17130d521d1b954
|
refs/heads/master
| 2020-04-05T14:24:03.216082
| 2018-12-28T02:29:29
| 2018-12-28T02:29:29
| 156,927,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 347
|
py
|
# encoding: utf-8
# module _ast
# from C:\Users\siddh\AppData\Local\Programs\Python\Python37\lib\site-packages\numpy\core\_dummy.cp37-win_amd64.pyd
# by generator 1.146
# no doc
# no imports
from .cmpop import cmpop
class Eq(cmpop):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
_fields = ()
|
[
"siddharthnatamai@gmail.com"
] |
siddharthnatamai@gmail.com
|
95513d0222466b05775626ce21dbbcff1a0f2158
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/data/p4VQE/R1/benchmark/startQiskit_noisy68.py
|
2abe4bba4276c4ad5fc8fe827f963e48f6c2e57f
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,423
|
py
|
# qubit number=3
# total number=9
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.cx(input_qubit[1],input_qubit[0]) # number=5
prog.cx(input_qubit[1],input_qubit[0]) # number=6
prog.swap(input_qubit[3],input_qubit[0]) # number=7
prog.swap(input_qubit[3],input_qubit[0]) # number=8
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5200
writefile = open("../data/startQiskit_noisy68.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
8d76957311696f5d8031a0f6af07845ca6839a63
|
6223dc2e5de7921696cb34fb62142fd4a4efe361
|
/.metadata/.plugins/org.eclipse.core.resources/.history/0/10c54487a564001418adf2b9b78fa3c6
|
9c5974137244fcb2386c19b2e81426b669f02921
|
[] |
no_license
|
Mushirahmed/python_workspace
|
5ef477b2688e8c25b1372f546752501ee53d93e5
|
46e2ed783b17450aba29e4e2df7b656522b2b03b
|
refs/heads/master
| 2021-03-12T19:24:50.598982
| 2015-05-25T10:23:54
| 2015-05-25T10:23:54
| 24,671,376
| 0
| 1
| null | 2015-02-06T09:27:40
| 2014-10-01T08:40:33
|
Python
|
UTF-8
|
Python
| false
| false
| 2,411
|
#!/usr/bin/env python
#
# Copyright 2014 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import numpy as np
from gnuradio import gr
import gras
class ztransform(gras.Block):
"""
docstring for block ztransform
"""
def __init__(self):
gras.Block.__init__(self,
name="ztransform",
in_sig=[np.float32],
out_sig=[np.float32])
def set_parameters(self,num,den,window):
self.num = list(map(float,num.split(" ")))
self.den = list(map(float,den.split(" ")))
print("self.num")
print("self.den")
self.n = window
self.num = np.poly1d(self.num)
self.den = np.poly1d(self.den)
self.den_coeff = self.den.c
nm_coeff = self.num.c
#print self.den_coeff
self.den_ord = self.den.order
self.num_ord = self.num.order
for i in range(0,self.den_ord-self.num_ord):
nm_coeff = np.insert(nm_coeff,0,0)
self.num_coeff = nm_coeff
#print self.num_coeff
self.in_q = [0]*(self.den_ord + 1)
self.out_q = [0]*(self.den_ord + 1)
self.final_q = []
def work(self, input_items, output_items):
in0 = input_items[0]
out = output_items[0]
#print "i am in work function"
# <+signal processing here+>
ans1 = 0
ans2 = 0
for i in range(1,self.den_ord + 1):
ans1 += self.den_coeff[i]*self.out_q[len(self.out_q)-i]
self.in_q.append(float(in0[0]))
#print self.in_q
for i in range(0,self.den_ord + 1):
ans2 += self.num_coeff[i]*self.in_q[len(self.in_q)-i-1]
#print ans2
ans = ans2 - ans1
ans = ans/self.den_coeff[0]
self.out_q.append(ans)
self.out_q.pop(0)
self.in_q.pop(0)
out[0] = ans
print "OUTPUT:",out[0]
#self.final_q.append(ans)
self.consume(0,1)
self.produce(0,1)
|
[
"imushir@gmail.com"
] |
imushir@gmail.com
|
|
c79f5defc4669af504b01120e06bf5dba6eb51f4
|
2bdedcda705f6dcf45a1e9a090377f892bcb58bb
|
/src/main/output/community/way/lot_world/month/car_level/president_program.py
|
09d22c760a6b771bc4aee758f82f7a48327f5762
|
[] |
no_license
|
matkosoric/GenericNameTesting
|
860a22af1098dda9ea9e24a1fc681bb728aa2d69
|
03f4a38229c28bc6d83258e5a84fce4b189d5f00
|
refs/heads/master
| 2021-01-08T22:35:20.022350
| 2020-02-21T11:28:21
| 2020-02-21T11:28:21
| 242,123,053
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,135
|
py
|
## Get supported languages for translation
The following code gets a list of language codes representing languages supported for translation, using the GetLanguagesForTranslate method.
1. Create a new Node.JS project in your favorite IDE.
2. Add the code provided below.
3. Replace the `key` value with an access key valid for your subscription.
4. Run the program.
```nodejs
'use strict';
let https = require ('https');
// **********************************************
// *** Update or verify the following values. ***
// **********************************************
// Replace the subscriptionKey string value with your valid subscription key.
let subscriptionKey = '970028e5525647c21ea1b2b2ab045fbf';
let host = 'api.microsofttranslator.com';
let path = '/V2/Http.svc/GetLanguagesForTranslate';
let params = '';
let response_handler = function (response) {
let body = '';
response.on ('data', function (d) {
body += d;
});
response.on ('end', function () {
console.log (body);
});
response.on ('error', function (e) {
console.log ('Error: ' + e.message);
});
};
let GetLanguagesForTranslate = function () {
let request_params = {
method : 'GET',
hostname : host,
path : path + params,
headers : {
'ed56e4d6e94b67527cced64771b62d28' : subscriptionKey,
}
};
let req = https.request (request_params, response_handler);
req.end ();
}
GetLanguagesForTranslate ();
```
**Get supported languages for translation response**
A successful response is returned in XML, as shown in the following example:
```xml
<ArrayOfstring xmlns="http://schemas.microsoft.com/2003/10/Serialization/Arrays" xmlns:i="http://www.w3.org/2001/XMLSchema-instance">
<string>af</string>
<string>ar</string>
<string>bn</string>
<string>bs-Latn</string>
<string>bg</string>
<string>ca</string>
<string>zh-CHS</string>
<string>zh-CHT</string>
<string>yue</string>
<string>hr</string>
<string>cs</string>
<string>da</string>
<string>nl</string>
<string>en</string>
<string>et</string>
<string>fj</string>
<string>fil</string>
<string>fi</string>
<string>fr</string>
<string>de</string>
<string>el</string>
<string>ht</string>
<string>he</string>
<string>hi</string>
<string>mww</string>
<string>hu</string>
<string>id</string>
<string>it</string>
<string>ja</string>
<string>sw</string>
<string>tlh</string>
<string>tlh-Qaak</string>
<string>ko</string>
<string>lv</string>
<string>lt</string>
<string>mg</string>
<string>ms</string>
<string>mt</string>
<string>yua</string>
<string>no</string>
<string>otq</string>
<string>fa</string>
<string>pl</string>
<string>pt</string>
<string>ro</string>
<string>ru</string>
<string>sm</string>
<string>sr-Cyrl</string>
<string>sr-Latn</string>
<string>sk</string>
<string>sl</string>
<string>es</string>
<string>sv</string>
<string>ty</string>
<string>ta</string>
<string>th</string>
<string>to</string>
<string>tr</string>
<string>uk</string>
<string>ur</string>
<string>vi</string>
<string>cy</string>
</ArrayOfstring>
```
|
[
"soric.matko@gmail.com"
] |
soric.matko@gmail.com
|
8e81c83eca27fafe4374430fd062b2cda78d9877
|
ca045f22bc2660a24d44ecacdb41f7a646df3d19
|
/src/exemplos/2_Operadores/9_exponenciacao.py
|
4888c9e422a96a463e3625ae6a742f61a4d519ba
|
[] |
no_license
|
gabriel1997castro/CIC-APC
|
db7e4114bfa0925e976b64638cac9e94845d8376
|
217f66ab04b0529886d2ef22ce1de15103440ba3
|
refs/heads/master
| 2020-12-25T11:32:06.276710
| 2016-01-27T02:57:24
| 2016-01-27T02:57:24
| 50,466,976
| 0
| 0
| null | 2016-01-26T23:32:49
| 2016-01-26T23:32:49
| null |
UTF-8
|
Python
| false
| false
| 2,414
|
py
|
# -*- coding: utf-8 -*-
# @file: 9_exponenciacao.py
# @author: Guilherme N. Ramos (gnramos@unb.br)
# @disciplina: Algoritmos e Programação de Computadores
#
# Exemplos de utilização do operador de exponenciação.
print 'Exponenciação:'
base = 0
expoente = 0
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 1
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 2
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 3
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
base = 1
expoente = 0
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 1
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 2
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 3
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
base = 2
expoente = 0
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 1
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 2
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 3
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
base = 3
expoente = 0
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 1
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 2
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
expoente = 3
resultado = base ** expoente
print ' (%d)**(%d) = %d' % (base, expoente, resultado)
base = 2
expoente = -2
resultado = base ** expoente
print ' (%d)**(%d) = %f' % (base, expoente, resultado)
expoente = -1
resultado = base ** expoente
print ' (%d)**(%d) = %f' % (base, expoente, resultado)
expoente = 0.5
resultado = base ** expoente
print ' (%d)**(%f) = %f' % (base, expoente, resultado)
base = 4
expoente = 0.5
resultado = base ** expoente
print ' (%d)**(%f) = %f' % (base, expoente, resultado)
base = 8
expoente = 1.0/3.0
resultado = base ** expoente
print ' (%d)**(%f) = %f' % (base, expoente, resultado)
|
[
"ramos@gnramos.com"
] |
ramos@gnramos.com
|
dc55ed13f4103e66e4e5edcf55079267753cb476
|
ee4db47ccecd23559b3b6f3fce1822c9e5982a56
|
/Build Chatbots/Tokenization.py
|
db3b63f6326833b215d84cf8c42b27248d31c56d
|
[] |
no_license
|
meoclark/Data-Science-DropBox
|
d51e5da75569626affc89fdcca1975bed15422fd
|
5f365cedc8d0a780abeb4e595cd0d90113a75d9d
|
refs/heads/master
| 2022-10-30T08:43:22.502408
| 2020-06-16T19:45:05
| 2020-06-16T19:45:05
| 265,558,242
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 586
|
py
|
from nltk.tokenize import word_tokenize,sent_tokenize
ecg_text = 'An electrocardiogram is used to record the electrical conduction through a person\'s heart. The readings can be used to diagnose cardiac arrhythmias.'
tokenized_by_word = word_tokenize(ecg_text)
tokenized_by_sentence = sent_tokenize(ecg_text)
try:
print('Word Tokenization:')
print(tokenized_by_word)
except:
print('Expected a variable called `tokenized_by_word`')
try:
print('Sentence Tokenization:')
print(tokenized_by_sentence)
except:
print('Expected a variable called `tokenized_by_sentence`')
|
[
"oluchukwuegbo@gmail.com"
] |
oluchukwuegbo@gmail.com
|
2d78ec6bc312d47fc94f57d817cd005695b414fe
|
1b87d5f7cba7e068f7b2ea902bba494599d20a78
|
/contrib/wydget/wydget/__init__.py
|
fe326d70a56e2ec6b127a655b859e95144cddc65
|
[
"BSD-3-Clause"
] |
permissive
|
jpaalasm/pyglet
|
906d03fe53160885665beaed20314b5909903cc9
|
bf1d1f209ca3e702fd4b6611377257f0e2767282
|
refs/heads/master
| 2021-01-25T03:27:08.941964
| 2014-01-25T17:50:57
| 2014-01-25T17:50:57
| 16,236,090
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,005
|
py
|
'''wydget is a graphical user interface (GUI) toolkit for pyglet.
This module allows applications to create a user interface comprised of
widgets and attach event handling to those widgets.
GUIs are managed by the top-level GUI class::
from pyglet.window import Window
from wydget import GUI
window = Window(100, 100)
gui = GUI(window)
window.push_handlers(gui)
You may then add components to the GUI by importing them from the
`wydget.widgets` package::
from wydget.widgets import TextButton
b = TextButton(gui, 'Press me!')
To handle click events on the button, create a handler::
@gui.select(b)
def on_click(button, *args):
print 'I was pressed!'
Finally, use a standard pyglet event loop to have the GUI run, and invoke
``gui.draw()`` to render the GUI. The GUI will render to an area the
dimensions of the window and at z = 0::
while not window.has_exit:
window.dispatch_events()
window.clear()
gui.draw()
window.flip()
'''
import sys
import collections
from xml.etree import ElementTree
from pyglet.gl import *
from pyglet import media
import style
import event
import loadxml
import widgets
import util
class GUI(event.GUIEventDispatcher):
'''GUI oganisation and event handling.
'''
id = '-gui'
name = 'gui'
classes = ()
parent = None
def __init__(self, window, x=0, y=0, z=0, width=None, height=None):
super(GUI, self).__init__()
self.window = window
self.style = style.Style()
# element.Element stuff
self.x, self.y, self.z = x, y, z
self.width = self.inner_width = width or window.width
self.height = self.inner_height = height or window.height
self.children = []
# map Element id, class and name to Element
self._by_id = {}
self._by_class = collections.defaultdict(set)
self._by_name = collections.defaultdict(set)
# list Element.ids in the order they're registered for tabbing
self._focus_order = []
self.debug_display = None
self.debug = '--debug' in sys.argv
if self.debug:
self.debug_display = widgets.Label(self, 'dummy',
bgcolor="white", padding=1, width=self.width)
def __repr__(self):
return '<%s at (%s, %s, %s) (%sx%s)>'%(self.__class__.__name__,
self.x, self.y, self.z, self.width, self.height)
def dump(self, s=''):
print s + str(self)
for child in self.children: child.dump(s+' ')
# clipboard support
clipboard_element = None
def setSelection(self, element):
'''The element has some data that may interact with the clipboard.
'''
if self.clipboard_element not in (element, None):
self.clipboard_element.clearSelection()
self.clipboard_element = element
def clearSelection(self, element):
'''The element doesn't want to interact with the clipboard any
longer.
'''
# might already have been bumped for another
if self.clipboard_element is element:
self.clipoard_element = None
# Registration of elements
# XXX I suspect that this is duplicating functionality in layout
def register(self, element):
'''Register the element with the gui.
IDs must be unique.
'''
if element.id in self._by_id:
raise KeyError, 'ID %r already exists as %r (trying to add %r)'%(
element.id, self._by_id[element.id], element)
self._by_id[element.id] = element
self._by_name[element.name].add(element.id)
for klass in element.classes:
self._by_class[klass].add(element.id)
if element.is_focusable:
self._focus_order.append(element.id)
self.setDirty()
self._layout_needed = True
def unregister(self, element):
del self._by_id[element.id]
self._by_name[element.name].remove(element.id)
for klass in element.classes:
self._by_class[klass].remove(element.id)
if self.focused_element is element:
self.focused_element = None
if element.is_focusable:
self._focus_order.remove(element.id)
self.setDirty()
self._layout_needed = True
def has(self, spec):
if spec[0] == '#':
return spec[1:] in self._by_id
elif spec[0] == '.':
return spec[1:] in self._by_class
else:
return spec in self._by_name
def get(self, spec):
if spec[0] == '#':
return self._by_id[spec[1:]]
elif spec[0] == '.':
return (self._by_id[id] for id in self._by_class[spec[1:]])
else:
return (self._by_id[id] for id in self._by_name[spec])
# rendering / hit detection
_rects = None
def setDirty(self):
'''Indicate that one or more of the gui's children have changed
geometry and a new set of child rects is needed.
'''
self._rects = None
_layout_needed = True
def layout(self):
'''Layout the entire GUI in response to its dimensions changing or
the contents changing (in a way that would alter internal layout).
'''
#print '>'*75
#self.dump()
# resize all elements
while True:
for element in self.children:
element.resetGeometry()
ok = False
try:
while not ok:
ok = True
for element in self.children:
ok = ok and element.resize()
except util.RestartLayout:
pass
else:
break
# position top-level elements
for c in self.children:
if c.x is None or c.x_spec.percentage:
c.x = c.x_spec.calculate()
if c.y is None or c.y_spec.percentage:
c.y = c.y_spec.calculate()
self._rects = None
self._layout_needed = False
#print '-'*75
#self.dump()
#print '<'*75
def layoutNeeded(self):
self._layout_needed = True
def getRects(self, exclude=None):
'''Get the rects for all the children to draw & interact with.
Prune the tree at "exclude" if provided.
'''
if self._layout_needed:
try:
self.layout()
except:
print '*'*75
self.dump()
print '*'*75
raise
if self._rects is not None and exclude is None:
return self._rects
# now get their rects
rects = []
clip = self.rect
for element in self.children:
if element is exclude: continue
rects.extend(element.getRects(clip, exclude))
rects.sort(lambda a,b: cmp(a[1][2], b[1][2]))
if exclude is None:
self._rects = rects
return rects
def determineHit(self, x, y, exclude=None):
'''Determine which element is at the absolute (x, y) position.
"exclude" allows us to ignore a single element (eg. an element
under the cursor being dragged - we wish to know which element is
under *that)
'''
for o, (ox, oy, oz, clip) in reversed(self.getRects(exclude)):
ox += clip.x
oy += clip.y
if x < ox or y < oy: continue
if x > ox + clip.width: continue
if y > oy + clip.height: continue
return o
return None
def draw(self):
'''Render all the elements on display.'''
glPushAttrib(GL_ENABLE_BIT)
glDisable(GL_DEPTH_TEST)
# get the rects and sort by Z (yay for stable sort!)
rects = self.getRects()
# draw
oz = 0
for element, (x, y, z, c) in rects:
if element is self.debug_display:
continue
element.draw(x, y, z, c)
if self.debug:
# render the debugging displays
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_BLEND)
for o, (x, y, z, c) in rects:
w, h = c.width, c.height
x += c.x
y += c.y
glColor4f(1, 0, 0, .1)
glRectf(x, y, x+w, y+h)
glColor4f(1, 1, 1, .1)
glBegin(GL_LINE_LOOP)
glVertex2f(x, y)
glVertex2f(x+w, y)
glVertex2f(x+w, y+h)
glVertex2f(x, y+h)
glEnd()
if o.view_clip:
v = o.view_clip
glColor4f(0, 0, 1, .1)
glRectf(x+v.x, y+v.y, x+v.x+v.width, y+v.y+v.height)
glDisable(GL_BLEND)
self.debug_display.draw(0, 0, 0, util.Rect(0, 0,
self.width, self.debug_display.height))
glPopAttrib()
# Element API (mostly terminators)
def getStyle(self): return self.style
def getGUI(self): return self
def isEnabled(self): return True
def isVisible(self): return True
def getParent(self, selector):
if isinstance(selector, str):
selector = [s.strip() for s in selector.split(',')]
if self.name in selector: return self
return None
def calculateAbsoluteCoords(self, x, y):
return (x + self.x, y + self.y)
def calculateRelativeCoords(self, x, y):
return (x - self.x, y - self.y)
def layoutDimensionsChanged(self, layout):
pass
padding = 0
def get_rect(self):
return util.Rect(0, 0, self.width, self.height)
rect = property(get_rect)
inner_rect = property(get_rect)
def addChild(self, child):
self.children.append(child)
self.register(child)
def delete(self):
for child in self.children: child.delete()
self.children = []
|
[
"joonas.paalasmaa@gmail.com"
] |
joonas.paalasmaa@gmail.com
|
39ccfa1bfe82238f935dda6943bcfeabd47426bd
|
f200651e624d5e5cd2f2262359a5932216d2d443
|
/demo-effects-html5-canvas/fire_controls/conv.py
|
2431b8a237fa8e6630f04a1d1e18c70d1a4332e7
|
[] |
no_license
|
lwerdna/lwerdna.github.io
|
fbea38c62029884930ebfac70c9d455979c43fde
|
f80c7cb173359e13b2894d64fb735c0396278b7e
|
refs/heads/master
| 2023-07-19T17:07:20.169897
| 2023-07-07T18:39:02
| 2023-07-07T18:39:02
| 38,472,392
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,112
|
py
|
palette = [
[0,0,0], [0,1,1], [0,4,5], [0,7,9], [0,8,11], [0,9,12], [15,6,8], [25,4,4],
[33,3,3], [40,2,2], [48,2,2], [55,1,1], [63,0,0], [63,0,0], [63,3,0], [63,7,0],
[63,10,0], [63,13,0], [63,16,0], [63,20,0], [63,23,0], [63,26,0], [63,29,0],
[63,33,0], [63,36,0], [63,39,0], [63,39,0], [63,40,0], [63,40,0], [63,41,0],
[63,42,0], [63,42,0], [63,43,0], [63,44,0], [63,44,0], [63,45,0], [63,45,0],
[63,46,0], [63,47,0], [63,47,0], [63,48,0], [63,49,0], [63,49,0], [63,50,0],
[63,51,0], [63,51,0], [63,52,0], [63,53,0], [63,53,0], [63,54,0], [63,55,0],
[63,55,0], [63,56,0], [63,57,0], [63,57,0], [63,58,0], [63,58,0], [63,59,0],
[63,60,0], [63,60,0], [63,61,0], [63,62,0], [63,62,0], [63,63,0],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63],
[63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63], [63,63,63]
]
for p in palette:
print '[%d,%d,%d],' % (p[0]*4, p[1]*4, p[2]*4)
|
[
"andrew@vector35.com"
] |
andrew@vector35.com
|
142c92391f03f9036f5df23bd5d855af23e4e0ac
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/YXjx9G5uQ4CdYPuB4_20.py
|
f0a10dccbee452d6af543673b232e8f6d5b7d4b0
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264
| 2021-03-23T16:08:01
| 2021-03-23T16:08:01
| 350,773,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,110
|
py
|
"""
**Mubashir** needs your help to compare two lists.
First list `lst1` contains some numbers and second list `lst2` contains
**squared values of numbers given in the first list**.
Create a function which takes these two lists and returns `True` if all square
values are available, `False` otherwise.
lst1 = [121, 144, 19, 161, 19, 144, 19, 11]
lst2 = [121, 14641, 20736, 361, 25921, 361, 20736, 361]
Returns `True` because **121 is square of 11, 14641 is square of 121, 20736 is
square of 144, 361 is square of 19, 25921 the square of 161, and so on...**
lst1 = [121, 144, 19, 161, 19, 144, 19, 11]
lst2 = [11*11, 121*121, 144*144, 19*19, 161*161, 19*19, 144*144, 19*19]
### Examples
simple_comp([121, 144, 19, 161, 19, 144, 19, 11], [121, 14641, 20736, 361, 25921, 361, 20736, 361]) ➞ True
simple_comp([4, 4], [1, 31]) ➞ False
simple_comp([2, 2, 3], [4, 4, 9]) ➞ True
### Notes
Numbers can be in any order.
"""
def simple_comp(lst1, lst2):
if lst1==None or lst2==None:
return lst1==lst2
return sorted(x**2 for x in lst1)==sorted(lst2)
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
37d5fa2a6f6b3325e6960b512dbb88914fa86b99
|
b2ba670818623f8ab18162382f7394baed97b7cb
|
/test-data/AndroidSlicer/Mitzuli/DD/10.py
|
d2f993240a11bbf9562d22402b0c95323cee8d8d
|
[
"MIT"
] |
permissive
|
hsumyatwin/ESDroid-artifact
|
012c26c40537a79b255da033e7b36d78086b743a
|
bff082c4daeeed62ceda3d715c07643203a0b44b
|
refs/heads/main
| 2023-04-11T19:17:33.711133
| 2022-09-30T13:40:23
| 2022-09-30T13:40:23
| 303,378,286
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,184
|
py
|
#start monkey test seedNo 0
import os;
from subprocess import Popen
from subprocess import PIPE
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice, MonkeyImage
from com.android.monkeyrunner.MonkeyDevice import takeSnapshot
from com.android.monkeyrunner.easy import EasyMonkeyDevice
from com.android.monkeyrunner.easy import By
from com.android.chimpchat.hierarchyviewer import HierarchyViewer
from com.android.monkeyrunner import MonkeyView
import random
import sys
import subprocess
from sys import exit
from random import randint
device = MonkeyRunner.waitForConnection()
package = 'com.mitzuli'
activity ='com.mitzuli.MainActivity'
runComponent = package+'/'+activity
device.startActivity(component=runComponent)
MonkeyRunner.sleep(0.5)
MonkeyRunner.sleep(0.5)
device.touch(1300,113, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(1020,121, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(1001,127, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(863,125, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(355,1601, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(247,1839, 'DOWN_AND_UP')
MonkeyRunner.sleep(0.5)
device.touch(80,154, 'DOWN_AND_UP')
|
[
"hsumyatwin@gmail.com"
] |
hsumyatwin@gmail.com
|
8f0dd18ff0e2846a87a5f2ca82b2163c648938b6
|
2479345dafbf0ac1118f34fbd3471871a3ac5c11
|
/demo/libdemo/list_countries.py
|
9292611d6422dfbe06ee3e2c9b7058f6e10a215d
|
[] |
no_license
|
srikanthpragada/PYTHON_06_MAY_2021
|
e2fc4d32a38f085658f87d35f31df65ee837a440
|
f30a3c4541e0fc15d157446721b514f791602919
|
refs/heads/master
| 2023-06-02T23:13:53.786444
| 2021-06-16T03:00:38
| 2021-06-16T03:00:38
| 365,402,518
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
import requests
resp = requests.get("https://restcountries.eu/rest/v2/all")
if resp.status_code != 200:
print('Sorry! Could not get details!')
exit(1)
countries = resp.json()
for c in countries:
print(f"{c['name']:50} - {c['capital']}")
|
[
"srikanthpragada@gmail.com"
] |
srikanthpragada@gmail.com
|
bb363c5ddd3739e93a04900c1353f55c9f17c3ab
|
923f9270a12be35fdd297d8f27e522c601e94eab
|
/src/decay/test/test_dc_nose.py
|
00a9741044a433b8333c1da2f59dfc64f2536274
|
[] |
no_license
|
t-bltg/INF5620
|
a06b6e06b6aba3bc35e933abd19c58cd78584c1f
|
d3e000462302839b49693cfe06a2f2df924c5027
|
refs/heads/master
| 2021-05-31T00:41:41.624838
| 2016-03-22T09:29:00
| 2016-03-22T09:29:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,328
|
py
|
import nose.tools as nt
import sys, os
sys.path.insert(0, os.pardir)
import dc_mod_unittest as dc_mod
import numpy as np
def exact_discrete_solution(n, I, a, theta, dt):
"""Return exact discrete solution of the theta scheme."""
dt = float(dt) # avoid integer division
factor = (1 - (1-theta)*a*dt)/(1 + theta*dt*a)
return I*factor**n
def test_against_discrete_solution():
"""
Compare result from solver against
formula for the discrete solution.
"""
theta = 0.8; a = 2; I = 0.1; dt = 0.8
N = int(8/dt) # no of steps
u, t = dc_mod.solver(I=I, a=a, T=N*dt, dt=dt, theta=theta)
u_de = np.array([exact_discrete_solution(n, I, a, theta, dt)
for n in range(N+1)])
diff = np.abs(u_de - u).max()
nt.assert_almost_equal(diff, 0, delta=1E-14)
def test_solver():
"""
Compare result from solver against
precomputed arrays for theta=0, 0.5, 1.
"""
I=0.8; a=1.2; T=4; dt=0.5 # fixed parameters
precomputed = {
't': np.array([ 0. , 0.5, 1. , 1.5, 2. , 2.5,
3. , 3.5, 4. ]),
0.5: np.array(
[ 0.8 , 0.43076923, 0.23195266, 0.12489759,
0.06725255, 0.03621291, 0.01949926, 0.0104996 ,
0.00565363]),
0: np.array(
[ 8.00000000e-01, 3.20000000e-01,
1.28000000e-01, 5.12000000e-02,
2.04800000e-02, 8.19200000e-03,
3.27680000e-03, 1.31072000e-03,
5.24288000e-04]),
1: np.array(
[ 0.8 , 0.5 , 0.3125 , 0.1953125 ,
0.12207031, 0.07629395, 0.04768372, 0.02980232,
0.01862645]),
}
for theta in 0, 0.5, 1:
u, t = dc_mod.solver(I, a, T, dt, theta=theta)
diff = np.abs(u - precomputed[theta]).max()
# Precomputed numbers are known to 8 decimal places
nt.assert_almost_equal(diff, 0, places=8,
msg='theta=%s' % theta)
def test_potential_integer_division():
"""Choose variables that can trigger integer division."""
theta = 1; a = 1; I = 1; dt = 2
N = 4
u, t = dc_mod.solver(I=I, a=a, T=N*dt, dt=dt, theta=theta)
u_de = np.array([exact_discrete_solution(n, I, a, theta, dt)
for n in range(N+1)])
diff = np.abs(u_de - u).max()
nt.assert_almost_equal(diff, 0, delta=1E-14)
def test_convergence_rates():
"""Compare empirical convergence rates to exact ones."""
# Set command-line arguments directly in sys.argv
sys.argv[1:] = '--I 0.8 --a 2.1 --T 5 '\
'--dt 0.4 0.2 0.1 0.05 0.025'.split()
# Suppress output from dc_mod.main()
stdout = sys.stdout # save standard output for later use
scratchfile = open('.tmp', 'w') # fake standard output
sys.stdout = scratchfile
r = dc_mod.main()
for theta in r:
nt.assert_true(r[theta]) # check for non-empty list
scratchfile.close()
sys.stdout = stdout # restore standard output
expected_rates = {0: 1, 1: 1, 0.5: 2}
for theta in r:
r_final = r[theta][-1]
# Compare to 1 decimal place
nt.assert_almost_equal(expected_rates[theta], r_final,
places=1, msg='theta=%s' % theta)
# no need for any main
|
[
"hpl@simula.no"
] |
hpl@simula.no
|
34e0d339fa61eb2fba8a107ea109b6b0c56efc1e
|
743d4545702532c967efee2c12015d91853b6b80
|
/orders/migrations/0001_initial.py
|
50adf5b21efe66d7cf544e46d52e15ce62c1faa2
|
[] |
no_license
|
SOAD-Group-36/server
|
81a7ced2149174fe4d9c1644ee2afd78054d7d29
|
5a5a1e2cd4a361cff8fff008600d65d6dc8edaab
|
refs/heads/main
| 2023-02-03T06:44:36.041311
| 2020-12-12T10:45:21
| 2020-12-12T10:45:21
| 305,055,627
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,344
|
py
|
# Generated by Django 3.1.2 on 2020-11-11 15:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(default=1)),
('price', models.DecimalField(decimal_places=2, max_digits=7)),
('placed_on', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(choices=[('Pl', 'Placed'), ('Pr', 'Processed'), ('Pk', 'Packed'), ('Sh', 'Shipped'), ('Dl', 'Delivered'), ('Rj', 'Rejected'), ('Rt', 'Returned'), ('Rc', 'Received')], default='Pl', max_length=2)),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='orders', to='products.product')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='orders', to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"masterashu@live.in"
] |
masterashu@live.in
|
8b57c9efa4983527dbd55908cbb5b5acbd4edbeb
|
20e3ee6642d20578e48756963798acfe307ac6b5
|
/Miscellaneous/Python XML Parser/Example.py
|
ef7e6dc6952d02a5cb41a0c433b4bb1594c14bce
|
[] |
no_license
|
sirinenisaikiran/Python
|
538f64276767435de3233b720f547aac0bf4d511
|
bdfef0d1c04c7f3b9fc91a164b5fd1789828176c
|
refs/heads/master
| 2023-01-31T00:53:01.650916
| 2021-06-06T10:39:20
| 2021-06-06T10:39:20
| 237,744,104
| 0
| 0
| null | 2023-01-26T03:38:47
| 2020-02-02T08:58:49
|
Python
|
UTF-8
|
Python
| false
| false
| 455
|
py
|
import xml.etree.ElementTree as ET
mytree = ET.parse('Sample.xml')
myroot = mytree.getroot()
# print(myroot)
# print(myroot.tag)
# print(myroot[0].tag)
# print(myroot[0].attrib)
#
# for x in myroot[0]:
# print(x.tag, x.attrib)
# for x in myroot[0]:
# print(x.text)
# for x in myroot[0]:
# print(x.tag, x.attrib, x.text)
for x in myroot.findall('food'):
item = x.find('item').text
price = x.find('price').text
print(item,price)
|
[
"saikiran.sirneni@gmail.com"
] |
saikiran.sirneni@gmail.com
|
a199a85117918b1c8fe6769bfdcbff3be408262e
|
5186cc912502f9f32948c3810b5adc2cd0f015d8
|
/soybean/reactor.py
|
b9e91523fe64d36b907749d9656b9625adbdbb63
|
[
"Apache-2.0"
] |
permissive
|
lcgong/soybean
|
c0ef4f1a88191a653bfd1f70881a2f1e470943fd
|
43fd891113b05c79419d7c0850145c8284e51206
|
refs/heads/main
| 2023-02-27T08:47:47.198713
| 2021-02-03T04:00:52
| 2021-02-03T04:00:52
| 334,369,214
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,575
|
py
|
import inspect
import asyncio
import logging
from rocketmq.client import PushConsumer, ConsumeStatus
from .utils import make_group_id, json_loads
from .event import OccupiedEvent
from .typing import HandlerType
from .exceptions import UnkownArgumentError
logger = logging.getLogger("soybean.reactor")
class Reactor:
def __init__(self, channel, topic: str, expression: str,
handler: HandlerType, depth: int):
self._channel = channel
self._topic = topic
self._expression = expression
self._handler = handler
self._reactor_id = make_group_id(channel.name, handler, depth)
self._consumer = None
argvals_getter = build_argvals_getter(handler)
self._handler_argvals_getter = argvals_getter
self._busy_event = None
@property
def reactor_id(self):
return self._reactor_id
async def start(self):
import threading
print(
f"reacter-start thread: {threading.get_ident()}, loop: {id(asyncio.get_event_loop())}")
consumer = PushConsumer(group_id=self._reactor_id)
consumer.set_thread_count(1)
consumer.set_name_server_address(self._channel.namesrv_addr)
self._busy_event = OccupiedEvent()
loop = asyncio.get_running_loop()
def run_coroutine(coroutine):
# 在其它线程以线程安全的方式执行协程,并阻塞等待执行结果
future = asyncio.run_coroutine_threadsafe(coroutine, loop)
return future.result
def _callback(msg):
run_coroutine(self._busy_event.acquire())
try:
arg_values = self._handler_argvals_getter(msg)
run_coroutine( self._handler(*arg_values))
return ConsumeStatus.CONSUME_SUCCESS
except Exception as exc:
logger.error((f"caught an error in reactor "
f"'{self._reactor_id}': {exc}"),
exc_info=exc)
return ConsumeStatus.RECONSUME_LATER
finally:
run_coroutine(self._busy_event.release())
consumer.subscribe(self._topic, _callback, expression=self._expression)
consumer.start()
self._consumer = consumer
async def stop(self):
await self._busy_event.wait_idle()
# 问题:当前rocket-client-cpp实现在shutdown之前并不能保证工作线程正常结束
# 这会导致工作线程和asyncio死锁,所以得到callback线程里任务结束后,再多等待
# 一会儿,等待rocket-client-cpp处理完consumer工作线程,再关闭consumer
await asyncio.sleep(0.5)
if self._consumer:
self._consumer.shutdown()
self._consumer = None
def build_argvals_getter(handler):
arguments = inspect.signature(handler).parameters
getters = []
unknowns = []
for arg_name, arg_spec in arguments.items():
getter_factory = _getter_factories.get(arg_name)
if getter_factory is not None:
getters.append(getter_factory(arg_spec))
continue
unknowns.append((arg_name, arg_spec))
if unknowns:
mod = handler.__module__
func = handler.__qualname__
args = ", ".join([f"'{name}'" for name, spec in unknowns])
errmsg = f"Unknown arguments: {args} of '{func}' in '{mod}'"
raise UnkownArgumentError(errmsg)
def _getter(msgobj):
return (arg_getter(msgobj) for arg_getter in getters)
return _getter
def getter_message(arg_spec):
if arg_spec.annotation == str:
return lambda msgobj: msgobj.body.decode("utf-8")
elif arg_spec.annotation == bytes:
return lambda msgobj: msgobj.body
else:
return lambda msgobj: json_loads(msgobj.body.decode("utf-8"))
def getter_msg_id(arg_spec):
return lambda msgobj: getattr(msgobj, "id")
def getter_msg_topic(arg_spec):
return lambda msgobj: getattr(msgobj, "tpoic").decode("utf-8")
def getter_msg_keys(arg_spec):
return lambda msgobj: getattr(msgobj, "keys").decode("utf-8")
def getter_msg_tags(arg_spec):
return lambda msgobj: getattr(msgobj, "tags").decode("utf-8")
_getter_factories = {
"message": getter_message,
"message_id": getter_msg_id,
"message_topic": getter_msg_topic,
"message_keys": getter_msg_keys,
"message_tags": getter_msg_tags,
"msg_id": getter_msg_id,
"msg_topic": getter_msg_topic,
"msg_keys": getter_msg_keys,
"msg_tags": getter_msg_tags,
}
|
[
"lcgong@gmail.com"
] |
lcgong@gmail.com
|
6c3f8ad91c11294558986e5612928dcb59119e90
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/303/usersdata/281/81893/submittedfiles/testes.py
|
9d5ad8d30fc63ed816896c55f3d77b98a8e9722a
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 157
|
py
|
# -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
x=int(input('Digite um número:'))
while x>0 and x<=13:
print('Ok')
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
067a7abea5aa8ea89d7339cdb1ac2cad200418bb
|
5fbf2adec8d7647b9aeefa51695aa3f13ee57810
|
/server/load_backup_locally.py
|
076c18cbae05647fcf9c789b079ff13e403dc7b7
|
[] |
no_license
|
angelacantfly/dancedeets-monorepo
|
8bb6579f6f5d30e88c8d4c0e239c6c8fed678094
|
6b7a48d91d0737010acd9e08a89d99c2c982205a
|
refs/heads/master
| 2021-01-20T09:14:22.613044
| 2017-08-26T21:48:14
| 2017-08-26T21:48:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,381
|
py
|
#!/usr/bin/python
"""
# App Engine import data from Datastore Backup to localhost
You can use this script to import large(ish) App Engine Datastore backups to your localohst dev server.
## Getting backup files
Follow instructions from Greg Bayer's awesome article to fetch the App Engine backups:
http://gbayer.com/big-data/app-engine-datastore-how-to-efficiently-export-your-data/
Basically, download and configure gsutil and run:
```
gsutil -m cp -R gs://your_bucket_name/your_path /local_target
```
## Reading data to your local (dev_appserver) application
Copy-paste this gist to your Interactive Console, set correct paths and press `Execute`.
(default: http://localhost:8000/console)
"""
import sys
sys.path.insert(0, '/usr/local/google_appengine')
print sys.path
from google.appengine.api.files import records
from google.appengine.datastore import entity_pb
from google.net.proto.ProtocolBuffer import ProtocolBufferDecodeError
from google.appengine.ext import ndb
from os.path import isfile
from os.path import join
from os import listdir
from events.eventdata import DBEvent
def run():
# Set your downloaded folder's path here (must be readable by dev_appserver)
mypath = '/Users/lambert/Dropbox/dancedeets/data/datastore_backup_datastore_backup_2016_11_19_DBEvent/15700286559371541387849311E815D'
# Se the class of the objects here
cls = DBEvent
# Set your app's name here
appname = "dev~None"
# Do the harlem shake
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
for file in onlyfiles:
i = 0
try:
raw = open(mypath + "/" + file, 'r')
reader = records.RecordsReader(raw)
to_put = list()
for record in reader:
entity_proto = entity_pb.EntityProto(contents=record)
entity_proto.key_.app_ = appname
obj = cls._from_pb(entity_proto)
to_put.append(obj)
i += 1
if i % 100 == 0:
print "Saved %d %ss" % (i, '') #entity.kind())
ndb.put_multi(to_put) # use_memcache=False)
to_put = list()
ndb.put_multi(to_put) # use_memcache=False)
to_put = list()
print "Saved %d" % i
except ProtocolBufferDecodeError:
""" All good """
run()
|
[
"mlambert@gmail.com"
] |
mlambert@gmail.com
|
3a6f927241b180e157f7756d4833dee91440dfa9
|
7c8bd2e26fdabf1555e0150272ecf035f6c21bbd
|
/삼성기출/새로운 게임2.py
|
3f7cacad987e8780f64a22bcecc01d30ec281fc1
|
[] |
no_license
|
hyeokjinson/algorithm
|
44090c2895763a0c53d48ff4084a96bdfc77f953
|
46c04e0f583d4c6ec4f51a24f19a373b173b3d5c
|
refs/heads/master
| 2021-07-21T10:18:43.918149
| 2021-03-27T12:27:56
| 2021-03-27T12:27:56
| 245,392,582
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,461
|
py
|
from _collections import deque
#체스판 말 갯수:k(1번~k번)
#이동방향:위,아래,왼쪽,오른쪽
#흰색인 경우 그 칸으로 이동,이동하는 칸에 말이 있으면 그곳에 스택 쌓기
#빨간색인 경우 이동하고 순서 reverse
#파란색인 경우 말의 이동방향을 역방향 한칸 이동 ,이동칸이 파란색인 경우 이동x
dx=[0,0,-1,1]
dy=[1,-1,0,0]
rev_direction={0:1,1:0,2:3,3:2}
def check():
for i in range(n):
for j in range(n):
if len(start[i][j])>=4:
return True
return False
def solve():
turn=0
p=0
while True:
turn+=1
if turn>1000:
return -1
for number in range(1,k+1):
x,y,d=horse[number]
nx,ny=x+dx[d],y+dy[d]
if nx<0 or nx>=n or ny<0 or ny>=n or arr[nx][ny]==2:
nd=rev_direction[d]
nx,ny=x+dx[nd],y+dy[nd]
if nx<0 or nx>=n or ny<0 or ny>=n or arr[nx][ny]==2:
horse[number][2]=nd
continue
p=1
if arr[nx][ny]==0:
left=start[x][y][:start[x][y].index(number)]
right=start[x][y][start[x][y].index(number):]
start[x][y]=left
start[nx][ny].extend(right)
if len(start[nx][ny])>=4:
return turn
for i in right:
horse[i][0],horse[i][1]=nx,ny
if p==1:
horse[number][2]=nd
p=0
elif arr[nx][ny]==1:
left = start[x][y][:start[x][y].index(number)]
right = start[x][y][start[x][y].index(number):]
start[x][y] = left
right.reverse()
start[nx][ny].extend(right)
if len(start[nx][ny]) >= 4:
return turn
for i in right:
horse[i][0], horse[i][1] = nx, ny
if p == 1:
horse[number][2] = nd
p = 0
if __name__ == '__main__':
n,k=map(int,input().split())
#0:흰색,1:빨간색,2:파란색
arr=[list(map(int,input().split()))for _ in range(n)]
start=[[[]*n for _ in range(n)] for _ in range(n)]
horse=dict()
for i in range(1,k+1):
x,y,v=map(int,input().split())
start[x-1][y-1].append(i)
horse[i]=[x-1,y-1,v-1]
print(solve())
|
[
"hjson817@gmail.com"
] |
hjson817@gmail.com
|
845db2f47f763ae4e09097e253320bf541736141
|
53eee7eb899cb518983008532257037fb89def13
|
/343.integer-break.py
|
e226facec72a5754c30be689c04e5eec6a509a9c
|
[] |
no_license
|
chenxu0602/LeetCode
|
0deb3041a66cb15e12ed4585bbe0fefce5dc6b26
|
3dc5af2bc870fcc8f2142130fcd2b7cab8733151
|
refs/heads/master
| 2023-07-05T19:26:21.608123
| 2023-07-02T08:35:35
| 2023-07-02T08:35:35
| 233,351,978
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,425
|
py
|
#
# @lc app=leetcode id=343 lang=python3
#
# [343] Integer Break
#
# https://leetcode.com/problems/integer-break/description/
#
# algorithms
# Medium (50.19%)
# Likes: 1086
# Dislikes: 227
# Total Accepted: 110.4K
# Total Submissions: 219.2K
# Testcase Example: '2'
#
# Given a positive integer n, break it into the sum of at least two positive
# integers and maximize the product of those integers. Return the maximum
# product you can get.
#
# Example 1:
#
#
#
# Input: 2
# Output: 1
# Explanation: 2 = 1 + 1, 1 × 1 = 1.
#
#
# Example 2:
#
#
# Input: 10
# Output: 36
# Explanation: 10 = 3 + 3 + 4, 3 × 3 × 4 = 36.
#
# Note: You may assume that n is not less than 2 and not larger than 58.
#
#
#
# @lc code=start
import math
class Solution:
def integerBreak(self, n: int) -> int:
# if n == 2:
# return 1
# if n == 3:
# return 2
# dp = [0] * (n + 1)
# dp[2] = 2
# dp[3] = 3
# for i in range(4, n + 1):
# dp[i] = max(dp[i-2] * 2, dp[i-3] * 3)
# return dp[n]
# O(logN)
if n == 2:
return 1
elif n == 3:
return 2
elif n % 3 == 0:
return int(math.pow(3, n // 3))
elif n % 3 == 1:
return 2 * 2 * int(math.pow(3, (n - 4) // 3))
else:
return 2 * int(math.pow(3, n // 3))
# @lc code=end
|
[
"chenxu@Chens-iMac.local"
] |
chenxu@Chens-iMac.local
|
1fad6fbeeeb619735e591e2a715bef13c07b1e3b
|
bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d
|
/lib/googlecloudsdk/generated_clients/apis/gkehub/v1alpha1/gkehub_v1alpha1_client.py
|
45d59b19c56748c72896a2a2c8b5b7fce532c530
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
05fbb473d629195f25887fc5bfaa712f2cbc0a24
|
392abf004b16203030e6efd2f0af24db7c8d669e
|
refs/heads/master
| 2023-08-31T05:40:41.317697
| 2023-08-23T18:23:16
| 2023-08-23T18:23:16
| 335,182,594
| 9
| 2
|
NOASSERTION
| 2022-10-29T20:49:13
| 2021-02-02T05:47:30
|
Python
|
UTF-8
|
Python
| false
| false
| 20,475
|
py
|
"""Generated client library for gkehub version v1alpha1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from googlecloudsdk.generated_clients.apis.gkehub.v1alpha1 import gkehub_v1alpha1_messages as messages
class GkehubV1alpha1(base_api.BaseApiClient):
"""Generated client library for service gkehub version v1alpha1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://gkehub.googleapis.com/'
MTLS_BASE_URL = 'https://gkehub.mtls.googleapis.com/'
_PACKAGE = 'gkehub'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
_VERSION = 'v1alpha1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'google-cloud-sdk'
_CLIENT_CLASS_NAME = 'GkehubV1alpha1'
_URL_VERSION = 'v1alpha1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new gkehub handle."""
url = url or self.BASE_URL
super(GkehubV1alpha1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.projects_locations_features = self.ProjectsLocationsFeaturesService(self)
self.projects_locations_global_features = self.ProjectsLocationsGlobalFeaturesService(self)
self.projects_locations_global = self.ProjectsLocationsGlobalService(self)
self.projects_locations_operations = self.ProjectsLocationsOperationsService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects = self.ProjectsService(self)
class ProjectsLocationsFeaturesService(base_api.BaseApiService):
"""Service class for the projects_locations_features resource."""
_NAME = 'projects_locations_features'
def __init__(self, client):
super(GkehubV1alpha1.ProjectsLocationsFeaturesService, self).__init__(client)
self._upload_configs = {
}
def GetIamPolicy(self, request, global_params=None):
r"""Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.
Args:
request: (GkehubProjectsLocationsFeaturesGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/features/{featuresId}:getIamPolicy',
http_method='GET',
method_id='gkehub.projects.locations.features.getIamPolicy',
ordered_params=['resource'],
path_params=['resource'],
query_params=['options_requestedPolicyVersion'],
relative_path='v1alpha1/{+resource}:getIamPolicy',
request_field='',
request_type_name='GkehubProjectsLocationsFeaturesGetIamPolicyRequest',
response_type_name='Policy',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
r"""Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
Args:
request: (GkehubProjectsLocationsFeaturesSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/features/{featuresId}:setIamPolicy',
http_method='POST',
method_id='gkehub.projects.locations.features.setIamPolicy',
ordered_params=['resource'],
path_params=['resource'],
query_params=[],
relative_path='v1alpha1/{+resource}:setIamPolicy',
request_field='setIamPolicyRequest',
request_type_name='GkehubProjectsLocationsFeaturesSetIamPolicyRequest',
response_type_name='Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
r"""Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning.
Args:
request: (GkehubProjectsLocationsFeaturesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestIamPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/features/{featuresId}:testIamPermissions',
http_method='POST',
method_id='gkehub.projects.locations.features.testIamPermissions',
ordered_params=['resource'],
path_params=['resource'],
query_params=[],
relative_path='v1alpha1/{+resource}:testIamPermissions',
request_field='testIamPermissionsRequest',
request_type_name='GkehubProjectsLocationsFeaturesTestIamPermissionsRequest',
response_type_name='TestIamPermissionsResponse',
supports_download=False,
)
class ProjectsLocationsGlobalFeaturesService(base_api.BaseApiService):
"""Service class for the projects_locations_global_features resource."""
_NAME = 'projects_locations_global_features'
def __init__(self, client):
super(GkehubV1alpha1.ProjectsLocationsGlobalFeaturesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Adds a new Feature.
Args:
request: (GkehubProjectsLocationsGlobalFeaturesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/global/features',
http_method='POST',
method_id='gkehub.projects.locations.global.features.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['featureId'],
relative_path='v1alpha1/{+parent}/features',
request_field='feature',
request_type_name='GkehubProjectsLocationsGlobalFeaturesCreateRequest',
response_type_name='Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Removes a Feature.
Args:
request: (GkehubProjectsLocationsGlobalFeaturesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/global/features/{featuresId}',
http_method='DELETE',
method_id='gkehub.projects.locations.global.features.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['force'],
relative_path='v1alpha1/{+name}',
request_field='',
request_type_name='GkehubProjectsLocationsGlobalFeaturesDeleteRequest',
response_type_name='Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets details of a single Feature.
Args:
request: (GkehubProjectsLocationsGlobalFeaturesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Feature) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/global/features/{featuresId}',
http_method='GET',
method_id='gkehub.projects.locations.global.features.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha1/{+name}',
request_field='',
request_type_name='GkehubProjectsLocationsGlobalFeaturesGetRequest',
response_type_name='Feature',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Features in a given project and location.
Args:
request: (GkehubProjectsLocationsGlobalFeaturesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListFeaturesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/global/features',
http_method='GET',
method_id='gkehub.projects.locations.global.features.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
relative_path='v1alpha1/{+parent}/features',
request_field='',
request_type_name='GkehubProjectsLocationsGlobalFeaturesListRequest',
response_type_name='ListFeaturesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an existing Feature.
Args:
request: (GkehubProjectsLocationsGlobalFeaturesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/global/features/{featuresId}',
http_method='PATCH',
method_id='gkehub.projects.locations.global.features.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1alpha1/{+name}',
request_field='feature',
request_type_name='GkehubProjectsLocationsGlobalFeaturesPatchRequest',
response_type_name='Operation',
supports_download=False,
)
class ProjectsLocationsGlobalService(base_api.BaseApiService):
"""Service class for the projects_locations_global resource."""
_NAME = 'projects_locations_global'
def __init__(self, client):
super(GkehubV1alpha1.ProjectsLocationsGlobalService, self).__init__(client)
self._upload_configs = {
}
class ProjectsLocationsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_operations resource."""
_NAME = 'projects_locations_operations'
def __init__(self, client):
super(GkehubV1alpha1.ProjectsLocationsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (GkehubProjectsLocationsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='gkehub.projects.locations.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha1/{+name}:cancel',
request_field='cancelOperationRequest',
request_type_name='GkehubProjectsLocationsOperationsCancelRequest',
response_type_name='Empty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (GkehubProjectsLocationsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
http_method='DELETE',
method_id='gkehub.projects.locations.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha1/{+name}',
request_field='',
request_type_name='GkehubProjectsLocationsOperationsDeleteRequest',
response_type_name='Empty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (GkehubProjectsLocationsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
http_method='GET',
method_id='gkehub.projects.locations.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha1/{+name}',
request_field='',
request_type_name='GkehubProjectsLocationsOperationsGetRequest',
response_type_name='Operation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.
Args:
request: (GkehubProjectsLocationsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}/operations',
http_method='GET',
method_id='gkehub.projects.locations.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1alpha1/{+name}/operations',
request_field='',
request_type_name='GkehubProjectsLocationsOperationsListRequest',
response_type_name='ListOperationsResponse',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = 'projects_locations'
def __init__(self, client):
super(GkehubV1alpha1.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets information about a location.
Args:
request: (GkehubProjectsLocationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Location) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations/{locationsId}',
http_method='GET',
method_id='gkehub.projects.locations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1alpha1/{+name}',
request_field='',
request_type_name='GkehubProjectsLocationsGetRequest',
response_type_name='Location',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists information about the supported locations for this service.
Args:
request: (GkehubProjectsLocationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListLocationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1alpha1/projects/{projectsId}/locations',
http_method='GET',
method_id='gkehub.projects.locations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'includeUnrevealedLocations', 'pageSize', 'pageToken'],
relative_path='v1alpha1/{+name}/locations',
request_field='',
request_type_name='GkehubProjectsLocationsListRequest',
response_type_name='ListLocationsResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(GkehubV1alpha1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
|
[
"cloudsdk.mirror@gmail.com"
] |
cloudsdk.mirror@gmail.com
|
09c2e1bc21335613f5e925b52bd82f0b8f9d9309
|
741c5c70bf4a0adb05db6b0777c8d07e28eb9cf6
|
/lib/python3.4/site-packages/IPython/core/profileapp.py
|
2a412589ca0dcc1cdc77a98a58967352a4566bca
|
[] |
no_license
|
andybp85/hyLittleSchemer
|
e686d2dc0f9067562367ea1173f275e8e2d2cb85
|
af5cb6adf6a196cc346aa7d14d7f9509e084c414
|
refs/heads/master
| 2021-01-19T07:48:31.309949
| 2015-01-04T00:57:30
| 2015-01-04T00:57:30
| 28,496,304
| 6
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,967
|
py
|
# encoding: utf-8
"""
An application for managing IPython profiles.
To be invoked as the `ipython profile` subcommand.
Authors:
* Min RK
"""
from __future__ import print_function
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
from IPython.config.application import Application
from IPython.core.application import (
BaseIPythonApplication, base_flags
)
from IPython.core.profiledir import ProfileDir
from IPython.utils.importstring import import_item
from IPython.utils.path import get_ipython_dir, get_ipython_package_dir
from IPython.utils import py3compat
from IPython.utils.traitlets import Unicode, Bool, Dict
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
create_help = """Create an IPython profile by name
Create an ipython profile directory by its name or
profile directory path. Profile directories contain
configuration, log and security related files and are named
using the convention 'profile_<name>'. By default they are
located in your ipython directory. Once created, you will
can edit the configuration files in the profile
directory to configure IPython. Most users will create a
profile directory by name,
`ipython profile create myprofile`, which will put the directory
in `<ipython_dir>/profile_myprofile`.
"""
list_help = """List available IPython profiles
List all available profiles, by profile location, that can
be found in the current working directly or in the ipython
directory. Profile directories are named using the convention
'profile_<profile>'.
"""
profile_help = """Manage IPython profiles
Profile directories contain
configuration, log and security related files and are named
using the convention 'profile_<name>'. By default they are
located in your ipython directory. You can create profiles
with `ipython profile create <name>`, or see the profiles you
already have with `ipython profile list`
To get started configuring IPython, simply do:
$> ipython profile create
and IPython will create the default profile in <ipython_dir>/profile_default,
where you can edit ipython_config.py to start configuring IPython.
"""
_list_examples = "ipython profile list # list all profiles"
_create_examples = """
ipython profile create foo # create profile foo w/ default config files
ipython profile create foo --reset # restage default config files over current
ipython profile create foo --parallel # also stage parallel config files
"""
_main_examples = """
ipython profile create -h # show the help string for the create subcommand
ipython profile list -h # show the help string for the list subcommand
ipython locate profile foo # print the path to the directory for profile 'foo'
"""
#-----------------------------------------------------------------------------
# Profile Application Class (for `ipython profile` subcommand)
#-----------------------------------------------------------------------------
def list_profiles_in(path):
"""list profiles in a given root directory"""
files = os.listdir(path)
profiles = []
for f in files:
try:
full_path = os.path.join(path, f)
except UnicodeError:
continue
if os.path.isdir(full_path) and f.startswith('profile_'):
profiles.append(f.split('_',1)[-1])
return profiles
def list_bundled_profiles():
"""list profiles that are bundled with IPython."""
path = os.path.join(get_ipython_package_dir(), u'config', u'profile')
files = os.listdir(path)
profiles = []
for profile in files:
full_path = os.path.join(path, profile)
if os.path.isdir(full_path) and profile != "__pycache__":
profiles.append(profile)
return profiles
class ProfileLocate(BaseIPythonApplication):
description = """print the path to an IPython profile dir"""
def parse_command_line(self, argv=None):
super(ProfileLocate, self).parse_command_line(argv)
if self.extra_args:
self.profile = self.extra_args[0]
def start(self):
print(self.profile_dir.location)
class ProfileList(Application):
name = u'ipython-profile'
description = list_help
examples = _list_examples
aliases = Dict({
'ipython-dir' : 'ProfileList.ipython_dir',
'log-level' : 'Application.log_level',
})
flags = Dict(dict(
debug = ({'Application' : {'log_level' : 0}},
"Set Application.log_level to 0, maximizing log output."
)
))
ipython_dir = Unicode(get_ipython_dir(), config=True,
help="""
The name of the IPython directory. This directory is used for logging
configuration (through profiles), history storage, etc. The default
is usually $HOME/.ipython. This options can also be specified through
the environment variable IPYTHONDIR.
"""
)
def _print_profiles(self, profiles):
"""print list of profiles, indented."""
for profile in profiles:
print(' %s' % profile)
def list_profile_dirs(self):
profiles = list_bundled_profiles()
if profiles:
print()
print("Available profiles in IPython:")
self._print_profiles(profiles)
print()
print(" The first request for a bundled profile will copy it")
print(" into your IPython directory (%s)," % self.ipython_dir)
print(" where you can customize it.")
profiles = list_profiles_in(self.ipython_dir)
if profiles:
print()
print("Available profiles in %s:" % self.ipython_dir)
self._print_profiles(profiles)
profiles = list_profiles_in(py3compat.getcwd())
if profiles:
print()
print("Available profiles in current directory (%s):" % py3compat.getcwd())
self._print_profiles(profiles)
print()
print("To use any of the above profiles, start IPython with:")
print(" ipython --profile=<name>")
print()
def start(self):
self.list_profile_dirs()
create_flags = {}
create_flags.update(base_flags)
# don't include '--init' flag, which implies running profile create in other apps
create_flags.pop('init')
create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}},
"reset config files in this profile to the defaults.")
create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}},
"Include the config files for parallel "
"computing apps (ipengine, ipcontroller, etc.)")
class ProfileCreate(BaseIPythonApplication):
name = u'ipython-profile'
description = create_help
examples = _create_examples
auto_create = Bool(True, config=False)
def _log_format_default(self):
return "[%(name)s] %(message)s"
def _copy_config_files_default(self):
return True
parallel = Bool(False, config=True,
help="whether to include parallel computing config files")
def _parallel_changed(self, name, old, new):
parallel_files = [ 'ipcontroller_config.py',
'ipengine_config.py',
'ipcluster_config.py'
]
if new:
for cf in parallel_files:
self.config_files.append(cf)
else:
for cf in parallel_files:
if cf in self.config_files:
self.config_files.remove(cf)
def parse_command_line(self, argv):
super(ProfileCreate, self).parse_command_line(argv)
# accept positional arg as profile name
if self.extra_args:
self.profile = self.extra_args[0]
flags = Dict(create_flags)
classes = [ProfileDir]
def _import_app(self, app_path):
"""import an app class"""
app = None
name = app_path.rsplit('.', 1)[-1]
try:
app = import_item(app_path)
except ImportError:
self.log.info("Couldn't import %s, config file will be excluded", name)
except Exception:
self.log.warn('Unexpected error importing %s', name, exc_info=True)
return app
def init_config_files(self):
super(ProfileCreate, self).init_config_files()
# use local imports, since these classes may import from here
from IPython.terminal.ipapp import TerminalIPythonApp
apps = [TerminalIPythonApp]
for app_path in (
'IPython.qt.console.qtconsoleapp.IPythonQtConsoleApp',
'IPython.html.notebookapp.NotebookApp',
'IPython.nbconvert.nbconvertapp.NbConvertApp',
):
app = self._import_app(app_path)
if app is not None:
apps.append(app)
if self.parallel:
from IPython.parallel.apps.ipcontrollerapp import IPControllerApp
from IPython.parallel.apps.ipengineapp import IPEngineApp
from IPython.parallel.apps.ipclusterapp import IPClusterStart
from IPython.parallel.apps.iploggerapp import IPLoggerApp
apps.extend([
IPControllerApp,
IPEngineApp,
IPClusterStart,
IPLoggerApp,
])
for App in apps:
app = App()
app.config.update(self.config)
app.log = self.log
app.overwrite = self.overwrite
app.copy_config_files=True
app.ipython_dir=self.ipython_dir
app.profile_dir=self.profile_dir
app.init_config_files()
def stage_default_config_file(self):
pass
class ProfileApp(Application):
name = u'ipython-profile'
description = profile_help
examples = _main_examples
subcommands = Dict(dict(
create = (ProfileCreate, ProfileCreate.description.splitlines()[0]),
list = (ProfileList, ProfileList.description.splitlines()[0]),
locate = (ProfileLocate, ProfileLocate.description.splitlines()[0]),
))
def start(self):
if self.subapp is None:
print("No subcommand specified. Must specify one of: %s"%(self.subcommands.keys()))
print()
self.print_description()
self.print_subcommands()
self.exit(1)
else:
return self.subapp.start()
|
[
"andy@youshallthrive.com"
] |
andy@youshallthrive.com
|
2c190be799017c52cc5a83639396080f5ef20ae9
|
82c54cab8e0c5b73e1fdb9615296613cc43929a0
|
/authentication/forms.py
|
d3f7b622935250beef47f85ac1ec6f9ee9435405
|
[] |
no_license
|
creechcorbin/twitter_clone
|
e4146657bd13043544f846c48b34fe83e90e91da
|
bd075bd53fd9e5558cda85ade86ed9995f72118c
|
refs/heads/master
| 2022-12-10T09:23:37.036180
| 2020-09-05T03:23:32
| 2020-09-05T03:23:32
| 292,993,852
| 0
| 0
| null | 2020-09-09T01:08:27
| 2020-09-05T03:22:43
|
Python
|
UTF-8
|
Python
| false
| false
| 345
|
py
|
from django import forms
class LoginForm(forms.Form):
username = forms.CharField(max_length=80)
password = forms.CharField(widget=forms.PasswordInput)
class SignupForm(forms.Form):
username = forms.CharField(max_length=80)
displayname = forms.CharField(max_length=80)
password = forms.CharField(widget=forms.PasswordInput)
|
[
"creechcorbin@gmail.com"
] |
creechcorbin@gmail.com
|
855c082aa1c28384a3ca3f6688c7cd52583b2287
|
47e93b916a6b55871997bfa95bb2f69676416b00
|
/landerdb.py
|
0486a4742f580c46200c8342d154cb857fb29434
|
[] |
no_license
|
Inqre/Melody
|
dcc88acb83b23a3c0786ab5b9529b1dcd71f6ece
|
84f298e5446f53c5f3fededd9f2920552db74c87
|
refs/heads/master
| 2020-05-15T22:32:28.959905
| 2013-11-08T02:45:06
| 2013-11-08T02:45:06
| 14,127,017
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,807
|
py
|
import json
import os
__version__ = "1.0.0"
class Connect:
def __init__(self, db_file):
self.db = db_file
self.json_data = {}
# allows find to be called multiple times, without
# re-reading from disk unless a change has occured
self.stale = True
if not os.path.exists(self.db):
self._save()
def _load(self):
if self.stale:
with open(self.db, 'rb') as fp:
try:
self.json_data = json.load(fp)
except:
with open(self.db, 'wb') as file:
file.write(json.dumps(self.json_data))
self._load()
def _save(self):
with open(self.db, 'wb') as fp:
json.dump(self.json_data, fp)
self.stale = True
def insert(self, collection, data):
self._load()
if collection not in self.json_data:
self.json_data[collection] = []
self.json_data[collection].append(data)
self._save()
def remove(self, collection, data):
self._load()
if collection not in self.json_data:
return False
self.json_data[collection].remove(data) #Will only delete one entry
self._save()
def find(self, collection, data):
self._load()
if collection not in self.json_data:
return False
output = []
for x in self.json_data[collection]:
if data != "all":
for y in data:
try:
if data[y] == x[y]:
output.append(x)
except KeyError:
continue
else:
output.append(x)
return output
|
[
"max00355@gmail.com"
] |
max00355@gmail.com
|
8074d9f48b99a19a25b95da45d02787fb65ed44d
|
771247a4498d50745c5fbff09e7446ea9213ab19
|
/Py8/export_openweather.py
|
a80a7c5c48213f7a13b051fcbfb593a6a75dd25e
|
[] |
no_license
|
ostrowsky/Parcer
|
42697f9a98f42c8220675d540e8dc2a95855783e
|
f953b7cbb6b948df894950ee7ed804fcd6b8e811
|
refs/heads/master
| 2021-01-21T06:39:46.184872
| 2017-06-23T16:07:15
| 2017-06-23T16:07:15
| 91,581,143
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,845
|
py
|
""" OpenWeatherMap (экспорт)
Сделать скрипт, экспортирующий данные из базы данных погоды,
созданной скриптом openweather.py. Экспорт происходит в формате CSV или JSON.
Скрипт запускается из командной строки и получает на входе:
export_openweather.py --csv filename [<город>]
export_openweather.py --json filename [<город>]
export_openweather.py --html filename [<город>]
При выгрузке в html можно по коду погоды (weather.id) подтянуть
соответствующие картинки отсюда: http://openweathermap.org/weather-conditions
Экспорт происходит в файл filename.
Опционально можно задать в командной строке город. В этом случае
экспортируются только данные по указанному городу. Если города нет в базе -
выводится соответствующее сообщение.
"""
import sys
import sqlite3
db_filename = 'db_weather.sqlite'
#sys.argv = ['export_openweather.py', 'weather.html', 'MX']
try:
filename = sys.argv[1]
country = sys.argv[2]
except IndexError:
print("Задан неверный параметр. Файл должен быть запущен с указанием параметров: export_openweather.py filename [<город>]")
print(sys.argv)
html_string = '''
<!DOCTYPE html>
<html>
<head>
<title>Weather</title>
</head>
<body>
<h1>Погода на момент актуализации базы данных</h1>
<table border = "1">
<tbody>
<tr>
<th align="center" width="auto">id_города</th>
<th align="center" width="auto">Город</th>
<th align="center" width="auto">Страна</th>
<th align="center" width="auto">Дата</th>
<th align="center" width="auto">Температура</th>
<th align="center" width="auto">id_погоды</th>
<th align="center" width="auto">Значок</th>
</tr>
'''
if len(sys.argv) == 3:
with sqlite3.connect(db_filename) as conn:
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute('''
select distinct id_города, Город, Страна, Дата, Температура, id_погоды, Значок
from weather
where Страна = ?''', (country,))
db_rows = cur.fetchall()
cities = list(db_rows)
for city in cities:
#print(list(city))
if city:
#print(city)
#print(list(city))
html_string += '\t<tr>\n'
for k in list(city):
if k == list(city)[-1]:
path = "http://openweathermap.org/img/w/" + str(k) + ".png"
html_string += '\t\t<td align="center" width="auto"><img src=' + path + '></td>\n'
else:
html_string += '\t\t<td align="center" width="auto">' + str(k) + '</td>\n'
html_string += '\t</tr>\n'
else:
print("Города указанной страны отсутствуют в базе")
html_string += '''
</tbody>
</table>
</body>
</html>'''
elif len(sys.argv) == 4:
city = sys.argv[3]
with sqlite3.connect(db_filename) as conn:
conn.row_factory = sqlite3.Row
cur = conn.cursor()
cur.execute('''
select distinct id_города, Город, Страна, Дата, Температура, id_погоды, Значок
from weather
where Город = ? and Страна = ?''', (city, country,))
db_rows = cur.fetchall()
cities = list(db_rows)
for city in cities:
# print(list(city))
if city:
# print(city)
# print(list(city))
html_string += '\t<tr>\n'
for k in list(city):
if k == list(city)[-1]:
path = "http://openweathermap.org/img/w/" + str(k) + ".png"
html_string += '\t\t<td align="center" width="auto"><img src=' + path + '></td>\n'
else:
html_string += '\t\t<td align="center" width="auto">' + str(k) + '</td>\n'
html_string += '\t</tr>\n'
else:
print("Город отсутствует в базе")
html_string += '''
</tbody>
</table>
</body>
</html>'''
encoded_str = html_string.encode(encoding='UTF-8')
with open(filename, 'w', encoding='UTF-8') as f:
f.write(html_string)
|
[
"ostrowskyi@gmail.com"
] |
ostrowskyi@gmail.com
|
7f12cf4f8c2a9dbbd0be88734b98d0c8b28eca87
|
e9bc070d1d9257c4a213bc1f33ca6269bbc37b43
|
/tests/roots/test-ext-autosummary/conf.py
|
f4d696cc912bb3108db71ca0fb841c3d904f7427
|
[
"BSD-3-Clause",
"Python-2.0",
"LicenseRef-scancode-secret-labs-2011",
"MIT",
"BSD-2-Clause"
] |
permissive
|
GoodRx/sphinx
|
99b33454afa06cf6a66d080c3c4019cc7ddde2f0
|
c310c73baffa4892cf35fd74918193824c86309a
|
refs/heads/1.6.x-py-type-xref
| 2021-01-01T06:02:33.415993
| 2017-07-16T03:12:58
| 2017-07-16T03:12:58
| 97,339,105
| 1
| 1
| null | 2017-07-16T03:12:58
| 2017-07-15T19:57:45
|
Python
|
UTF-8
|
Python
| false
| false
| 184
|
py
|
import sys, os
sys.path.insert(0, os.path.abspath('.'))
extensions = ['sphinx.ext.autosummary']
autosummary_generate = True
# The suffix of source filenames.
source_suffix = '.rst'
|
[
"i.tkomiya@gmail.com"
] |
i.tkomiya@gmail.com
|
c1b3876aae1a898188d4da189bd9db75e5afc8c6
|
41249d7d4ca9950b9c6fee89bf7e2c1929629767
|
/results/lz_optimizations_20200507/script_lz_crab4freq_powell_bound10_constantFreqAndInitAmps_tf0-1.py
|
d14345a8c9437a041da7e650381b2b1114829de0
|
[
"MIT"
] |
permissive
|
lucainnocenti/ultrafast-critical-ground-state-preparation-2007.07381
|
f739b3baad1d2aadda576303bb0bbe9d48ec204a
|
29f80dcf914096555cee9bc2e18249a2c95d6a50
|
refs/heads/master
| 2022-11-22T00:44:09.998199
| 2020-07-21T08:35:28
| 2020-07-21T08:35:28
| 281,237,037
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,458
|
py
|
import os
import sys
import numpy as np
import pandas as pd
import logging
if '../../' not in sys.path:
sys.path.append('../../')
import src.optimization as optimization
import src.protocol_ansatz as protocol_ansatz
from src.utils import autonumber_filename, basic_logger_configuration
output_file_name = os.path.basename(__file__)[7:-3] + '.csv'
output_file_name = autonumber_filename(output_file_name)
basic_logger_configuration(filename=output_file_name[:-3] + 'log')
logging.info('Output file name will be "{}"'.format(output_file_name))
# ------ start optimization
num_frequencies = 4
protocol = protocol_ansatz.CRABProtocolAnsatz(num_frequencies=num_frequencies)
protocol.generate_rnd_frequencies_each_tf = False
for idx in range(num_frequencies):
protocol.hyperpars['nuk' + str(idx + 1)] = 0
protocol.fill_hyperpar_value(y0=-5, y1=0)
results = optimization.find_best_protocol(
problem_specification=dict(
model='lz',
model_parameters=dict(omega_0=1),
task=dict(initial_intensity=-5, final_intensity=0)
),
optimization_specs=dict(
protocol=protocol,
protocol_options=dict(num_frequencies=num_frequencies),
optimization_method='powell',
parameters_constraints=[-10, 10],
initial_parameters=[0] * (2 * num_frequencies)
),
other_options=dict(
scan_times=np.linspace(0.01, 1, 200)
)
)
# ------ save results to file
results.to_csv(output_file_name)
|
[
"lukeinnocenti@gmail.com"
] |
lukeinnocenti@gmail.com
|
ced0baa0e9192cab080e7e0c0c749c9c7e56e9a1
|
1da91735d1a4d19e62b2d19826d9a1e85d88d690
|
/dxpy/dxpy/task/model/tests/test_task.py
|
32e1f9139b28e9e0836aef2a1a5c31a6253ebbf0
|
[] |
no_license
|
Hong-Xiang/dxl
|
94229e4c20f0c97dfe21f8563889c991330df9c3
|
29aed778d1c699cc57d09666a20b4ca60196392f
|
refs/heads/master
| 2021-01-02T22:49:20.298893
| 2018-05-22T13:42:20
| 2018-05-22T13:42:20
| 99,401,725
| 1
| 1
| null | 2018-05-22T13:42:21
| 2017-08-05T05:34:35
|
Python
|
UTF-8
|
Python
| false
| false
| 3,063
|
py
|
import json
import unittest
from dxpy.task.model import task
from dxpy.time.timestamps import TaskStamp
from dxpy.time.utils import strp
class TestTask(unittest.TestCase):
def test_to_json(self):
t = task.Task(tid=10, desc='test', workdir='/tmp/test',
worker=task.Worker.MultiThreading,
ttype=task.Type.Regular,
dependency=[1, 2, 3],
time_stamp=TaskStamp(create=strp(
"2017-09-22 12:57:44.036185")),
data={'sample': 42},
is_root=True)
s = t.to_json()
dct = json.loads(s)
self.assertEqual(dct['id'], 10)
self.assertEqual(dct['desc'], 'test')
self.assertEqual(dct['dependency'], [1, 2, 3])
self.assertEqual(dct['data'], {'sample': 42})
self.assertEqual(dct['type'], 'Regular')
self.assertEqual(dct['workdir'], '/tmp/test')
self.assertEqual(dct['worker'], 'MultiThreading')
self.assertEqual(dct['is_root'], True)
self.assertEqual(dct['time_stamp'], {
'create': "2017-09-22 12:57:44.036185", 'start': None, 'end': None})
self.assertEqual(dct['state'], 'BeforeSubmit')
def test_from_json(self):
dct = {
'__task__': True,
'id': 10,
'desc': 'test',
'workdir': '/tmp/test',
'worker': 'Slurm',
'type': 'Script',
'dependency': [1, 2, 3],
'data': {'sample': 42},
'is_root': True,
'time_stamp': {
'create': "2017-09-22 12:57:44.036185",
'start': None,
'end': None
},
'state': 'BeforeSubmit'
}
t = task.Task.from_json(json.dumps(dct))
self.assertEqual(t.id, 10)
self.assertEqual(t.desc, 'test')
self.assertEqual(t.workdir, '/tmp/test')
self.assertEqual(t.worker, task.Worker.Slurm)
self.assertEqual(t.type, task.Type.Script)
self.assertEqual(t.dependency, [1, 2, 3])
self.assertEqual(t.data, {'sample': 42})
self.assertEqual(t.is_root, True)
self.assertEqual(t.time_stamp.create, strp(
"2017-09-22 12:57:44.036185"))
self.assertEqual(t.state, task.State.BeforeSubmit)
def test_submit(self):
t = task.Task(10, 'test', state=task.State.BeforeSubmit)
self.assertEqual(t.state, task.State.BeforeSubmit)
t = task.submit(t)
self.assertEqual(t.state, task.State.Pending)
def test_start(self):
t = task.Task(10, 'test', state=task.State.BeforeSubmit)
self.assertEqual(t.state, task.State.BeforeSubmit)
t = task.start(t)
self.assertEqual(t.state, task.State.Runing)
def test_complete(self):
t = task.Task(10, 'test', state=task.State.BeforeSubmit)
self.assertEqual(t.state, task.State.BeforeSubmit)
t = task.complete(t)
self.assertEqual(t.state, task.State.Complete)
|
[
"hx.hongxiang@gmail.com"
] |
hx.hongxiang@gmail.com
|
9479f066756090388c2092129ef0059b3ebf32ea
|
cf14b6ee602bff94d3fc2d7e712b06458540eed7
|
/gs105/gs105/settings.py
|
422043aee64a923a3033927c1f8cb6ac0230c445
|
[] |
no_license
|
ManishShah120/Learning-Django
|
8b0d7bfe7e7c13dcb71bb3d0dcdf3ebe7c36db27
|
8fe70723d18884e103359c745fb0de5498b8d594
|
refs/heads/master
| 2023-03-29T09:49:47.694123
| 2021-03-28T16:04:34
| 2021-03-28T16:04:34
| 328,925,596
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,073
|
py
|
"""
Django settings for gs105 project.
Generated by 'django-admin startproject' using Django 3.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ytgu6b45d)u!-fh@a_v#1d*#010=aih7p8o5juvr(v$ubumwn='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'school',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'gs105.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gs105.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
|
[
"mkshah141@gmail.com"
] |
mkshah141@gmail.com
|
eee180705f38d0e11b8a5778069d77230bafec5f
|
481452cd3b904af7a42bbeb71190a59c29e4775b
|
/python_batch_4/class2/typecasting2.py
|
deb323944f44ee751f0fd3988dc54191fb1697f1
|
[] |
no_license
|
rahusriv/python_tutorial
|
b09b54044f9df86ac603634ac1dd8d4ea6705e4a
|
7de9b62a8e1e8ca1df5f2679ebf17d655f6b1b8e
|
refs/heads/master
| 2020-03-28T11:24:16.468977
| 2019-05-12T06:51:32
| 2019-05-12T06:51:32
| 148,209,075
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 83
|
py
|
a = "20.99"
b = "30.89"
c = int(float(a)) +int(float(b))
print(type(c))
print(c)
|
[
"rahusr@gmail.com"
] |
rahusr@gmail.com
|
aa6af7048c44cea9653dd669212be652afc07c82
|
960b3a17a4011264a001304e64bfb76d669b8ac5
|
/mstrio/api/authentication.py
|
ee18f3ec2d1622d62b49c9697d82696d49d54468
|
[
"Apache-2.0"
] |
permissive
|
MicroStrategy/mstrio-py
|
012d55df782a56dab3a32e0217b9cbfd0b59b8dd
|
c6cea33b15bcd876ded4de25138b3f5e5165cd6d
|
refs/heads/master
| 2023-08-08T17:12:07.714614
| 2023-08-03T12:30:11
| 2023-08-03T12:30:11
| 138,627,591
| 84
| 60
|
Apache-2.0
| 2023-07-31T06:43:33
| 2018-06-25T17:23:55
|
Python
|
UTF-8
|
Python
| false
| false
| 5,218
|
py
|
from mstrio.utils.error_handlers import ErrorHandler
@ErrorHandler(
err_msg='Authentication error. Check user credentials or REST API URL and try again'
)
def login(connection):
"""Authenticate a user and create an HTTP session on the web server where
the user's MicroStrategy sessions are stored.
This request returns an authorization token (X-MSTR-AuthToken) which will be
submitted with subsequent requests. The body of the request contains
the information needed to create the session. The loginMode parameter in
the body specifies the authentication mode to use. You can authenticate with
one of the following authentication modes: Standard (1), Anonymous (8),
or LDAP (16). Authentication modes can be enabled through the System
Administration REST APIs, if they are supported by the deployment.
Args:
connection: MicroStrategy REST API connection object
Returns:
Complete HTTP response object.
"""
return connection.post(
skip_expiration_check=True,
url=f'{connection.base_url}/api/auth/login',
data={
'username': connection.username,
'password': connection._Connection__password,
'loginMode': connection.login_mode,
'applicationType': 35,
},
)
@ErrorHandler(err_msg="Failed to logout.")
def logout(connection, error_msg=None, whitelist=None):
"""Close all existing sessions for the authenticated user.
Args:
connection: MicroStrategy REST API connection object
Returns:
Complete HTTP response object.
"""
return connection.post(
skip_expiration_check=True,
url=f'{connection.base_url}/api/auth/logout',
headers={'X-MSTR-ProjectID': None},
)
def session_renew(connection):
"""Extends the HTTP and Intelligence Server sessions by resetting the
timeouts.
Args:
connection: MicroStrategy REST API connection object
Returns:
Complete HTTP response object.
"""
return connection.put(
skip_expiration_check=True,
url=f'{connection.base_url}/api/sessions',
headers={'X-MSTR-ProjectID': None},
timeout=2.0,
)
def session_status(connection):
"""Checks Intelligence Server session status.
Args:
connection: MicroStrategy REST API connection object
Returns:
Complete HTTP response object.
"""
return connection.get(
skip_expiration_check=True,
url=f'{connection.base_url}/api/sessions',
headers={'X-MSTR-ProjectID': None},
)
@ErrorHandler(err_msg='Could not get identity token.')
def identity_token(connection):
"""Create a new identity token.
An identity token is used to share an existing session with another
project, based on the authorization token for the existing
session.
Args:
connection: MicroStrategy REST API connection object
Returns:
Complete HTTP response object.
"""
return connection.post(
url=f'{connection.base_url}/api/auth/identityToken',
)
def validate_identity_token(connection, identity_token):
"""Validate an identity token.
Args:
connection: MicroStrategy REST API connection object
identity_token: Identity token
Returns:
Complete HTTP response object.
"""
return connection.get(
url=f'{connection.base_url}/api/auth/identityToken',
headers={'X-MSTR-IdentityToken': identity_token},
)
@ErrorHandler(
err_msg='Error creating a new Web server session that shares an existing IServer '
'session.'
)
def delegate(connection, identity_token, whitelist=None):
"""Returns authentication token and cookies from given X-MSTR-
IdentityToken.
Args:
connection: MicroStrategy REST API connection object
identity_token: Identity token
whitelist: list of errors for which we skip printing error messages
Returns:
Complete HTTP response object.
"""
return connection.post(
skip_expiration_check=True,
url=f'{connection.base_url}/api/auth/delegate',
json={'loginMode': "-1", 'identityToken': identity_token},
)
@ErrorHandler(err_msg='Error getting privileges list.')
def user_privileges(connection):
"""Get the list of privileges for the authenticated user.
The response includes the name, ID, and description of each
privilege and specifies which projects the privileges are valid for.
Args:
connection: MicroStrategy REST API connection object
Returns:
Complete HTTP response object.
"""
return connection.get(url=f"{connection.base_url}/api/sessions/privileges")
@ErrorHandler(err_msg='Error getting info for authenticated user.')
def get_info_for_authenticated_user(connection, error_msg=None):
"""Get information for the authenticated user.
Args:
connection: MicroStrategy REST API connection object
error_msg (string, optional): Custom Error Message for Error Handling
Returns:
Complete HTTP response object.
"""
url = f'{connection.base_url}/api/sessions/userInfo'
return connection.get(url=url)
|
[
"noreply@github.com"
] |
MicroStrategy.noreply@github.com
|
2f62066c180ecaec7d3c36b4eb514313cea1f73a
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03605/s666410011.py
|
ee1b3b217136d88dfc453fa50b5f4c38f78ab5b2
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 60
|
py
|
N=input()
if N.count("9"):
print("Yes")
else:
print("No")
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
ec9c0cd180f50fb23acae69744788f81a9bfa036
|
8ccf7e6a93256fd83fed2bb7bd4f8bbe13dc1f40
|
/Assignment 3. Paxos/Simulation/Agents/Proposer.py
|
c35f8b2ea5e2ba44032b554a298ca176490310d9
|
[
"MIT"
] |
permissive
|
WailAbou/Distributed-Processing
|
5e2b84edc86b6d709c2599d82434731c6fd64dd6
|
46a36f1fd51d6f8b35cc639eb8002d81d7e09f2b
|
refs/heads/main
| 2023-05-28T05:52:39.790190
| 2021-06-14T00:57:08
| 2021-06-14T00:57:08
| 367,988,336
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,059
|
py
|
from Simulation.Agents import Agent
from Simulation.Message import Message, MessageTypes
class Proposer(Agent):
max_id = 0
def __init__(self, name, agent_id, value=None):
super().__init__(name, agent_id, value)
self.votes = 0
self.majority = False
self.suggested_value = None
self.consensus = False
Proposer.max_id = max(Proposer.max_id, agent_id + 1)
def recieve_promise(self, message, majority):
if message.source.value:
self.value = max(self.value, message.source.value)
self.votes += 1
if self.votes >= majority and not self.majority:
self.majority = True
return lambda acceptor: Message(message.destination, acceptor, MessageTypes.ACCEPT)
def recieve_accepted(self, message):
self.consensus = True
def init_value(self, value):
self.value = value
self.suggested_value = value
def reset(self):
self.votes = 0
self.majority = False
self.agent_id = Proposer.max_id
|
[
"abou.w@hotmail.com"
] |
abou.w@hotmail.com
|
b12c14f2d187174e8f714e4790ec36839780011f
|
ac5d55e43eb2f1fb8c47d5d2a68336eda181d222
|
/Reservoir Sampling/382. Linked List Random Node.py
|
535508fa3eecbcc13bfe833e95712b6200c347d5
|
[] |
no_license
|
tinkle1129/Leetcode_Solution
|
7a68b86faa37a3a8019626e947d86582549374b3
|
1520e1e9bb0c428797a3e5234e5b328110472c20
|
refs/heads/master
| 2021-01-11T22:06:45.260616
| 2018-05-28T03:10:50
| 2018-05-28T03:10:50
| 78,925,011
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,831
|
py
|
# - * - coding:utf8 - * - -
###########################################
# Author: Tinkle
# E-mail: shutingnjupt@gmail.com
# Name: Linked List Random Node.py
# Creation Time: 2017/9/24
###########################################
'''
Given a singly linked list, return a random node's value from the linked list. Each node must have the same probability of being chosen.
Follow up:
What if the linked list is extremely large and its length is unknown to you? Could you solve this efficiently without using extra space?
Example:
// Init a singly linked list [1,2,3].
ListNode head = new ListNode(1);
head.next = new ListNode(2);
head.next.next = new ListNode(3);
Solution solution = new Solution(head);
// getRandom() should return either 1, 2, or 3 randomly. Each element should have equal probability of returning.
solution.getRandom();
'''
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
import random
class Solution(object):
def __init__(self, head):
"""
@param head The linked list's head.
Note that the head is guaranteed to be not null, so it contains at least one node.
:type head: ListNode
"""
self.head = head
self.length = 0
ans = head
while (ans):
self.length += 1
ans = ans.next
def getRandom(self):
"""
Returns a random node's value.
:rtype: int
"""
index = random.randint(1, self.length) - 1
idx = 0
ans = self.head
while (idx < index):
ans = ans.next
idx += 1
return ans.val
# Your Solution object will be instantiated and called as such:
# obj = Solution(head)
# param_1 = obj.getRandom()
|
[
"496047829@qq.com"
] |
496047829@qq.com
|
68e09501a51d712d45387f738b12c0239a752984
|
b4777bf27a6d10d0e5b1c51351f9ad14a049b5e7
|
/results_discrete_paradigm_acc.py
|
1f08f50c522ed31784d9ff4e831821666ace9b7e
|
[] |
no_license
|
bioelectric-interfaces/cfir
|
1216ba1b62935f99f8821ccce2577be9cf71c6b8
|
6034b5216352e5d933405bccbe9a67b9e89c4735
|
refs/heads/master
| 2022-07-12T10:45:17.758669
| 2020-03-10T13:34:10
| 2020-03-10T13:34:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,356
|
py
|
"""
Figure 5: Discrete paradigm accuracy for one subject with median SNR
"""
import pandas as pd
import pylab as plt
import numpy as np
import seaborn as sns
from filters import CFIRBandEnvelopeDetector, RectEnvDetector
from utils import magnitude_spectrum
from constants import FS, DELAY_RANGE
from sklearn.metrics import roc_auc_score, average_precision_score, balanced_accuracy_score
def get_classes(y, alpha, n_states=3):
y_pred = np.zeros(len(y))
if n_states == 3:
y_pred[y > np.percentile(y, alpha)] = 1
y_pred[y > np.percentile(y, 100 - alpha)] = 2
if n_states == 2:
y_pred[y > np.percentile(y, 100 - alpha)] = 1
return y_pred
dataset = 8
eeg_df = pd.read_pickle('data/train_test_data.pkl').query('subj_id=={}'.format(dataset))
envelope = eeg_df['an_signal'].abs().values
band = eeg_df[['band_left', 'band_right']].values[0]
magnitude_spectrum_train = {}
_, weights = magnitude_spectrum(eeg_df['eeg'].values, FS)
stats_df = pd.read_pickle('results/stats.pkl').query('subj_id=={}'.format(dataset))
flatui = {'cfir':'#0099d8', 'acfir': '#84BCDA', 'wcfir':'#FE4A49', 'rect':'#A2A79E'}
alpha=5
#DELAY_RANGE = np.linspace(-50, 100, 51, dtype=int)
acc = np.zeros(len(DELAY_RANGE))
acc_rand = np.zeros(len(DELAY_RANGE))
fig, axes = plt.subplots(2, 2, sharey='col', figsize=(6,6))
plt.subplots_adjust(hspace=0.4, wspace=0.4)
for j_n_states, n_states in enumerate([2, 3]):
y_true = get_classes(envelope, alpha, n_states)
for method_name, method_class in zip(
['cfir', 'rect', 'wcfir'],
[CFIRBandEnvelopeDetector, RectEnvDetector, CFIRBandEnvelopeDetector]):
acc = np.zeros(len(DELAY_RANGE))*np.nan
for d, DELAY in enumerate(DELAY_RANGE):
if method_name == 'rect' and DELAY <0: continue
params = stats_df.query('method=="{}" & metric=="corr" & delay=="{}"'.format(method_name, DELAY*2))['params'].values[0]
params['weights'] = weights if method_name == 'wcfir' else None
env_det = method_class(band=band, fs=FS, delay=DELAY, **params)
envelope_pred = np.abs(env_det.apply(eeg_df['eeg'].values))
# params = stats_df.query('method=="rect" & metric=="corr"')['params'].values[0]
# env_det = WHilbertFilter(band=band, fs=FS, delay=DELAY, **params)
# envelope_pred = np.abs(env_det.apply(eeg_df['eeg'].values))
#
# params = stats_df.query('method=="whilbert" & metric=="corr"')['params'].values[0]
# env_det = WHilbertFilter(band=band, fs=FS, **params)
# envelope_pred = np.abs(env_det.apply(eeg_df['eeg'].values))
#
# params = stats_df.query('method=="ffiltar" & metric=="corr"')['params'].values[0]
# env_det = RectEnvDetector(band, FS, params['n_taps'], DELAY)
# env_det = WHilbertFilter(band=band, fs=FS, **params)
y_pred = get_classes(envelope_pred, alpha, n_states)
acc[d] = balanced_accuracy_score(y_true, y_pred) if (method_name in ['cfir', 'wcfir'] or DELAY>=0) else np.nan
axes[j_n_states, 1].plot(DELAY_RANGE*2, acc*100, '.-', label=method_name, color=flatui[method_name])
axes[j_n_states, 1].plot(DELAY_RANGE*2, DELAY_RANGE*0 + balanced_accuracy_score(y_true, y_true*0)*100, '.-', color='k', label='all-high')
# [ax.set_xlabel('Delay, ms') for ax in axes[:, 1]]
axes[1, 1].set_xlabel('Delay, ms')
axes[1, 1].legend()
axes[0, 1].set_ylabel('Balanced accuracy score, %')
axes[1, 1].set_ylabel('Balanced accuracy score, %')
axes[0, 0].set_title('A. High/Other\n', x = 0)
axes[1, 0].set_title('B. High/Middle/Low\n', ha='right')
[ax.axvline(0, color='k', linestyle='--', alpha=0.5, zorder=-1000) for ax in axes[:, 1]]
# plt.plot(envelope0ms)
# plt.plot(envelope)
#
# sns.kdeplot(envelope, envelope0ms)
# plt.savefig('results/viz/res-classification.png', dpi=500)
ax = axes
# fig, ax = plt.subplots(2, figsize=(6, 6))
up = np.percentile(envelope*1e6, 100-alpha)
low = np.percentile(envelope*1e6, alpha)
t = np.arange(len(envelope))/500
ax[0, 0].plot(t-58, envelope*1e6, color='k')
ax[0, 0].axhline(np.percentile(envelope*1e6, 100-alpha), color='k', linestyle='--')
ax[0, 0].text(8.5, up+4, 'High', ha='center')
ax[0, 0].text(8.5, up-3, 'Other', ha='center')
# plt.axhspan(np.percentile(envelope*1e6, alpha), np.percentile(envelope*1e6, 100-alpha), color=flatui['cfir'], alpha=0.5)
# plt.axhspan(np.percentile(envelope*1e6, alpha), -1000, color=flatui['wcfir'], alpha=0.5)
ax[0, 0].set_ylim(-7, 20)
ax[0, 0].set_xlim(0, 10)
ax[0, 0].set_ylabel('Envelope, $uV$')
ax[1, 0].plot(t-58, envelope*1e6, color='k')
ax[1, 0].axhline(np.percentile(envelope*1e6, 100-alpha), color='k', linestyle='--')
ax[1, 0].axhline(np.percentile(envelope*1e6, alpha), color='k', linestyle='--')
ax[1, 0].text(8.5, up+4, 'High', ha='center')
ax[1, 0].text(8.5, up-3, 'Middle', ha='center')
ax[1, 0].text(8.5, low-5, 'Low', ha='center')
# plt.axhspan(np.percentile(envelope*1e6, alpha), np.percentile(envelope*1e6, 100-alpha), color=flatui['cfir'], alpha=0.5)
# plt.axhspan(np.percentile(envelope*1e6, alpha), -1000, color=flatui['wcfir'], alpha=0.5)
ax[1, 0].set_ylim(-7, 20)
ax[1, 0].set_xlim(0, 10)
ax[1, 0].set_ylabel('Envelope, $uV$')
ax[1, 0].set_xlabel('Time, s')
# plt.savefig('results/viz/res-classification-explained.png', dpi=500)
|
[
"n.m.smetanin@gmail.com"
] |
n.m.smetanin@gmail.com
|
7aa41765cd6860e2540b6f799c4551cd82d47f48
|
34148545a20f0b9fe07860d1107e6aab2ec1f75d
|
/info_spider/Scrapy_History_Hanchao_V1_01/build/lib/Scrapy_History_Hanchao_V1_01/spiders/Zhuixue_01.py
|
139bef56439c9928931b6c7045a6f1948b1c9a0b
|
[] |
no_license
|
tangzhutao/chf
|
9bb9fa9b6ad75f1b587364e1005922c5bdddb4ca
|
4b249aee9689d3669306bbf020ad7fbb7e6b92bc
|
refs/heads/master
| 2022-12-03T03:55:17.308231
| 2020-08-21T09:57:47
| 2020-08-21T09:57:47
| 288,969,437
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,641
|
py
|
# -*- coding: utf-8 -*-
import scrapy, time, re
from scrapy.utils import request
from Scrapy_History_Hanchao_V1_01.items import InfoItem
import requests
from urllib3 import encode_multipart_formdata
from Scrapy_History_Hanchao_V1_01.ApolloConfig import IMAGES_STORE, SPIDER_NAME, UPLOADURL
class Zhuixue01Spider(scrapy.Spider):
name = 'Zhuixue_01'
base_url = 'http://lishi.zhuixue.net'
url_name = '追学网'
def start_requests(self):
for i in range(3):
url = f'http://lishi.zhuixue.net/hanchao/list_43_{i + 1}.html'
req = scrapy.Request(url=url, callback=self.parse, dont_filter=True)
yield req
def parse(self, response):
get_info = response.xpath('//div[@class="list1"]/li/a/@href').extract()
for info in get_info:
url = self.base_url + info
req = scrapy.Request(url=url, callback=self.detail_parse, dont_filter=True)
news_id = request.request_fingerprint(req)
req.meta.update({'news_id': news_id})
yield req
def detail_parse(self, response):
headers = {}
for k, v in response.request.headers.items():
headers[k.decode()] = v[0].decode()
title = response.xpath('//ul[@class="lisbt"]/li[1]/span/h1/text()').extract_first()
try:
issue_time = re.findall(r'\d+-\d+-\d+ \d+:\d+', response.text)[0].split(' ')[0]
except IndexError:
issue_time = None
content = response.xpath('//ul[@class="lisnr"]').extract_first()
images_url = response.xpath('//ul[@class="lisnr"]//img/@src').extract()
item = InfoItem()
images = []
if images_url:
for image_url in images_url:
if 'http' in image_url:
link = image_url
else:
link = self.base_url + image_url
res = self.download_img(link, headers)
if res['success']:
self.logger.info({'图片下载完成': link})
images.append(res['data']['url'])
else:
self.logger.info({'图片下载失败': link})
item['images'] = ','.join(images) if images else None
item['category'] = '汉朝'
item['content_url'] = response.url
item['title'] = title
item['issue_time'] = issue_time if issue_time else None
item['information_source'] = '历史追学网'
item['sign'] = '19'
item['news_id'] = response.meta['news_id']
item['content'] = content
item['author'] = None
item['title_image'] = None
item['attachments'] = None
item['area'] = None
item['address'] = None
item['tags'] = None
item['update_time'] = str(int(time.time() * 1000))
item['source'] = None
if content:
yield item
self.logger.info({'title': title, 'issue_time': issue_time})
def download_img(self, url, headers):
resp = requests.get(url, headers=headers)
file_name = url.split('/')[-1]
file = {
'file': (file_name, resp.content)
}
send_url = UPLOADURL + SPIDER_NAME
encode_data = encode_multipart_formdata(file)
file_data = encode_data[0]
headers_from_data = {
"Content-Type": encode_data[1]
}
response = requests.post(url=send_url, headers=headers_from_data, data=file_data).json()
return response
if __name__ == '__main__':
from scrapy import cmdline
cmdline.execute(['scrapy', 'crawl', 'Zhuixue_01'])
|
[
"18819492919@163.com"
] |
18819492919@163.com
|
9f38297ffcb415afd27671f80d18b3c3ccc487db
|
cb57a9ea4622b94207d12ea90eab9dd5b13e9e29
|
/lc/python/1768_merge_strings_alternately.py
|
32222174bc34d1567b034641491b8b2e157d8c7a
|
[] |
no_license
|
boknowswiki/mytraning
|
b59585e1e255a7a47c2b28bf2e591aef4af2f09a
|
5e2f6ceacf5dec8260ce87e9a5f4e28e86ceba7a
|
refs/heads/master
| 2023-08-16T03:28:51.881848
| 2023-08-10T04:28:54
| 2023-08-10T04:28:54
| 124,834,433
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,459
|
py
|
# string and two pointers
# time O(max(m,n))
# space O(1)
class Solution:
def mergeAlternately(self, word1: str, word2: str) -> str:
ret = []
n = len(word1)
m = len(word2)
if n == 0:
return word2
if m == 0:
return word1
i, j = 0, 0
idx = 0
while i < n and j < m:
if idx % 2 == 0:
ret.append(word1[i])
i += 1
else:
ret.append(word2[j])
j += 1
idx += 1
if i == n:
ret.extend(list(word2[j:]))
if j == m:
ret.extend(list(word1[i:]))
return "".join(ret)
class Solution(object):
def mergeAlternately(self, word1, word2):
m = len(word1)
n = len(word2)
i = 0
j = 0
result = []
while i < m or j < n:
if i < m:
result += word1[i]
i += 1
if j < n:
result += word2[j]
j += 1
return "".join(result)
class Solution(object):
def mergeAlternately(self, word1, word2):
result = []
n = max(len(word1), len(word2))
for i in range(n):
if i < len(word1):
result += word1[i]
if i < len(word2):
result += word2[i]
return "".join(result)
|
[
"noreply@github.com"
] |
boknowswiki.noreply@github.com
|
ca674d56b645b5721ff9210287a3026a3c86b84d
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2801/58758/256072.py
|
829cc7621c561a24efea43b99bb9b2ba608d94f2
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011
| 2020-07-28T16:21:24
| 2020-07-28T16:21:24
| 259,576,640
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 237
|
py
|
n = int(input())
nums = [int(x) for x in input().split()]
nums.sort()
flag = False
for i in range(0, len(nums)-2):
if nums[i] + nums[i+1] > nums[i+2]:
flag = True
break
if flag:
print('YES')
else:
print('NO')
|
[
"1069583789@qq.com"
] |
1069583789@qq.com
|
754f3df17792c7911d0f110efed7a7832bb5de48
|
f4b2d9a0de1f7a26a8fd5afe25446e62dfa0fdb5
|
/Python/base_algorithm/base_sum.py
|
b3db43265b69011967ccd5ef53c5613268a1b43e
|
[] |
no_license
|
Alexanderklau/LeetCode
|
e675425cca0b4e2e6f94d8c1ce6df92bbec32ac7
|
6090fa602ab29aef40d41661e473058eaaec490d
|
refs/heads/master
| 2021-06-23T17:41:53.309882
| 2020-12-01T14:36:00
| 2020-12-01T14:36:00
| 148,267,114
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 197
|
py
|
# coding: utf-8
__author__ = "lau.wenbo"
"""
高斯解法
"""
def sum_of_n(n):
the_sum = 0
for i in range(1, n+1):
the_sum = the_sum + i
return the_sum
print(sum_of_n(100))
|
[
"429095816@qq.com"
] |
429095816@qq.com
|
234615d0dfa6ec1b4bb50bbc470a76d507001e80
|
58be8fc8996b98b624fb9784527b2dc588d4587c
|
/pybamm/models/submodels/active_material/stress_driven_active_material.py
|
61fbe41ec0392883bec8138e4988b5b026f60706
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
gwhite09/PyBaMM
|
b9f7b6b06bb37b6819e306356f5b8e90df8affff
|
033ad6384582a3e5d29ad48eeaa7fe92b98e2a29
|
refs/heads/main
| 2023-08-22T19:49:26.112089
| 2021-09-17T17:02:34
| 2021-09-17T17:02:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,401
|
py
|
#
# Class for varying active material volume fraction, driven by stress
#
import pybamm
from .base_active_material import BaseModel
class StressDriven(BaseModel):
"""Submodel for varying active material volume fraction, driven by stress, from
[1]_ and [2]_.
Parameters
----------
param : parameter class
The parameters to use for this submodel
domain : str
The domain of the model either 'Negative' or 'Positive'
options : dict
Additional options to pass to the model
x_average : bool
Whether to use x-averaged variables (SPM, SPMe, etc) or full variables (DFN)
**Extends:** :class:`pybamm.active_material.BaseModel`
References
----------
.. [1] Ai, W., Kraft, L., Sturm, J., Jossen, A., & Wu, B. (2019). Electrochemical
Thermal-Mechanical Modelling of Stress Inhomogeneity in Lithium-Ion Pouch
Cells. Journal of The Electrochemical Society, 167(1), 013512.
.. [2] Reniers, J. M., Mulder, G., & Howey, D. A. (2019). Review and performance
comparison of mechanical-chemical degradation models for lithium-ion
batteries. Journal of The Electrochemical Society, 166(14), A3189.
"""
def __init__(self, param, domain, options, x_average):
super().__init__(param, domain, options=options)
pybamm.citations.register("Reniers2019")
self.x_average = x_average
def get_fundamental_variables(self):
domain = self.domain.lower() + " electrode"
if self.x_average is True:
eps_solid_xav = pybamm.Variable(
"X-averaged " + domain + " active material volume fraction",
domain="current collector",
)
eps_solid = pybamm.PrimaryBroadcast(eps_solid_xav, domain)
else:
eps_solid = pybamm.Variable(
self.domain + " electrode active material volume fraction",
domain=domain,
auxiliary_domains={"secondary": "current collector"},
)
variables = self._get_standard_active_material_variables(eps_solid)
return variables
def get_coupled_variables(self, variables):
# obtain the rate of loss of active materials (LAM) by stress
# This is loss of active material model by mechanical effects
if self.x_average is True:
stress_t_surf = variables[
"X-averaged "
+ self.domain.lower()
+ " particle surface tangential stress"
]
stress_r_surf = variables[
"X-averaged " + self.domain.lower() + " particle surface radial stress"
]
else:
stress_t_surf = variables[
self.domain + " particle surface tangential stress"
]
stress_r_surf = variables[self.domain + " particle surface radial stress"]
if self.domain == "Negative":
beta_LAM = self.param.beta_LAM_n
stress_critical = self.param.stress_critical_n
m_LAM = self.param.m_LAM_n
else:
beta_LAM = self.param.beta_LAM_p
stress_critical = self.param.stress_critical_p
m_LAM = self.param.m_LAM_p
stress_h_surf = (stress_r_surf + 2 * stress_t_surf) / 3
# compressive stress make no contribution
stress_h_surf *= stress_h_surf > 0
# assuming the minimum hydrostatic stress is zero for full cycles
stress_h_surf_min = stress_h_surf * 0
j_stress_LAM = (
-(beta_LAM / self.param.t0_cr)
* ((stress_h_surf - stress_h_surf_min) / stress_critical) ** m_LAM
)
deps_solid_dt = j_stress_LAM
variables.update(
self._get_standard_active_material_change_variables(deps_solid_dt)
)
return variables
def set_rhs(self, variables):
Domain = self.domain + " electrode"
if self.x_average is True:
eps_solid = variables[
"X-averaged " + Domain.lower() + " active material volume fraction"
]
deps_solid_dt = variables[
"X-averaged "
+ Domain.lower()
+ " active material volume fraction change"
]
else:
eps_solid = variables[Domain + " active material volume fraction"]
deps_solid_dt = variables[
Domain + " active material volume fraction change"
]
self.rhs = {eps_solid: deps_solid_dt}
def set_initial_conditions(self, variables):
if self.domain == "Negative":
x_n = pybamm.standard_spatial_vars.x_n
eps_solid_init = self.param.epsilon_s_n(x_n)
elif self.domain == "Positive":
x_p = pybamm.standard_spatial_vars.x_p
eps_solid_init = self.param.epsilon_s_p(x_p)
if self.x_average is True:
eps_solid_xav = variables[
"X-averaged "
+ self.domain.lower()
+ " electrode active material volume fraction"
]
self.initial_conditions = {eps_solid_xav: pybamm.x_average(eps_solid_init)}
else:
eps_solid = variables[
self.domain + " electrode active material volume fraction"
]
self.initial_conditions = {eps_solid: eps_solid_init}
|
[
"valentinsulzer@hotmail.com"
] |
valentinsulzer@hotmail.com
|
f14308e3fd66781d5cbdd827da378221a727e027
|
bccbb5244947574c63992dc812b5ef44519ec161
|
/tests/test_command_runner.py
|
fcb536ca809e16f5103fd66573f5e2e7dd3eeea3
|
[] |
no_license
|
hal1932/pysvn
|
d4fab12dbb07838d947292146ca49e9a31119deb
|
a579744543765b574655377a2e1ada5be961e8d8
|
refs/heads/master
| 2020-03-14T06:35:46.835307
| 2018-05-01T16:17:10
| 2018-05-01T16:17:10
| 131,487,301
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 939
|
py
|
# coding: utf-8
from __future__ import print_function, unicode_literals
import unittest as ut
import xml.etree.ElementTree as et
from svn.command_runner import CommandRunner
class TestCommandRunner(ut.TestCase):
def setUp(self):
self.__runner = CommandRunner()
self.__runner.current_directory = 'C:/Users/yuta/Desktop/subversion/trunk'
def tearDown(self):
pass
@ut.skip
def test_run(self):
result, out, err = self.__runner.run('info', ['--xml'])
self.assertEqual(result, 0)
self.assertEqual(err, '')
root = et.fromstring(out)
self.assertEqual(root.tag, 'info')
entry = root.find('entry')
self.assertEqual(entry.find('url').text, 'https://svn.apache.org/repos/asf/subversion/trunk')
self.assertEqual(entry.find('wc-info/wcroot-abspath').text, 'C:/Users/yuta/Desktop/subversion/trunk')
if __name__ == '__main__':
ut.main()
|
[
"yu.arai.19@gmail.com"
] |
yu.arai.19@gmail.com
|
89bfad9927cab9ec96b3795aa8887564a390caf1
|
6234d711a6352c694bb69946ff673e4829ab6916
|
/feelings/groups/views/company.py
|
91f38b17afdc2b90f2c6a890c1149c612963a773
|
[
"MIT"
] |
permissive
|
treehouse/livestream-django-feelings
|
c816beb4557d52d5aafb5f11a40f5e6a0c0f6ba5
|
a246e456bb28f736cfb670486a1534e2d18efc78
|
refs/heads/master
| 2021-01-13T12:51:04.730505
| 2019-02-21T15:25:38
| 2019-02-21T15:25:38
| 78,469,589
| 32
| 24
| null | 2017-02-23T22:10:10
| 2017-01-09T21:14:02
|
Python
|
UTF-8
|
Python
| false
| false
| 3,596
|
py
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import get_object_or_404
from django.urls import reverse, reverse_lazy
from django.views import generic
from braces.views import SetHeadlineMixin
from .. import forms
from .. import models
class Create(LoginRequiredMixin, SetHeadlineMixin, generic.CreateView):
form_class = forms.CompanyForm
headline = 'Create Company'
success_url = reverse_lazy('users:dashboard')
template_name = 'companies/form.html'
def form_valid(self, form):
form.instance.created_by = self.request.user
response = super().form_valid(form)
self.object.members.add(self.request.user)
return response
class Update(LoginRequiredMixin, SetHeadlineMixin, generic.UpdateView):
form_class = forms.CompanyForm
template_name = 'companies/form.html'
def get_queryset(self):
return self.request.user.companies.all()
def get_headline(self):
return f'Edit {self.object.name}'
def get_success_url(self):
return reverse('groups:companies:detail', kwargs={
'slug': self.object.slug})
class Detail(LoginRequiredMixin, generic.FormView):
form_class = forms.CompanyInviteForm
template_name = 'companies/detail.html'
def get_success_url(self):
self.get_object()
return reverse('groups:companies:detail', kwargs={
'slug': self.object.slug})
def get_queryset(self):
return self.request.user.companies.all()
def get_object(self):
self.object = self.request.user.companies.get(
slug=self.kwargs.get('slug')
)
return self.object
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['object'] = self.get_object()
return context
def form_valid(self, form):
response = super().form_valid(form)
models.CompanyInvite.objects.create(
from_user=self.request.user,
to_user=form.invitee,
company=self.get_object()
)
return response
class Leave(LoginRequiredMixin, SetHeadlineMixin, generic.FormView):
form_class = forms.LeaveForm
template_name = 'companies/form.html'
success_url = reverse_lazy('users:dashboard')
def get_object(self):
try:
self.object = self.request.user.companies.filter(
slug=self.kwargs.get('slug'),
).exclude(created_by=self.request.user).get()
except models.Company.DoesNotExist:
raise Http404
def get_headline(self):
self.get_object()
return f'Leave {self.object}?'
def form_valid(self, form):
self.get_object()
self.object.members.remove(self.request.user)
return super().form_valid(form)
class Invites(LoginRequiredMixin, generic.ListView):
model = models.CompanyInvite
template_name = 'companies/invites.html'
def get_queryset(self):
return self.request.user.companyinvite_received.filter(status=0)
class InviteResponse(LoginRequiredMixin, generic.RedirectView):
url = reverse_lazy('groups:companies:invites')
def get(self, request, *args, **kwargs):
invite = get_object_or_404(
models.CompanyInvite,
to_user=request.user,
uuid=kwargs.get('code'),
status=0
)
if kwargs.get('response') == 'accept':
invite.status = 1
else:
invite.status = 2
invite.save()
return super().get(request, *args, **kwargs)
|
[
"kenneth@gigantuan.net"
] |
kenneth@gigantuan.net
|
e95fae2b71d041eff7090fe472700f65339ffa56
|
3b7474148c07df7f4755106a3d0ada9b2de5efdc
|
/training/c31_pattern_design/e04_callback.py
|
b608b4cea7865efac231c64f8fa6e7dd59efcde1
|
[] |
no_license
|
juancsosap/pythontraining
|
7f67466846138f32d55361d64de81e74a946b484
|
1441d6fc9544042bc404d5c7efffd119fce33aa7
|
refs/heads/master
| 2021-08-26T05:37:15.851025
| 2021-08-11T22:35:23
| 2021-08-11T22:35:23
| 129,974,006
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 922
|
py
|
class Provider:
def get(self, path, cbf):
with open(path) as file:
text = file.read()
result = self.analyze(text)
cbf(result)
def word_count(self, text):
split_text = text.lower().split(' ')
words = len(set(split_text))
count = len(split_text)
return (words, count)
def char_count(self, text):
characters = len(set(text))
count = len(text)
return (characters, count)
def analyze(self, text):
word_info = self.word_count(text)
char_info = self.char_count(text)
return (word_info, char_info)
class Requester:
def make(self, path):
p = Provider()
p.get(path, self.done)
def done(self, result):
print(result)
if __name__ == "__main__":
basedir = __file__[:__file__.rfind('/')+1]
r = Requester()
r.make(basedir + 'data.txt')
|
[
"juan.c.sosa.p@gmail.com"
] |
juan.c.sosa.p@gmail.com
|
0457440b4e3f996aaa557313efda6c7f2d6e1a76
|
069ce71ee1ca85988ebf5bc179bcafbbd3d04f7f
|
/golib/views.py
|
70f41659f6d03852008364558b13e70346ea68e7
|
[] |
no_license
|
9gix/golib
|
21a1376b553a83b743c68f418f82a488c9964c1a
|
fbcfe0a9c5e0523c7b2e85f46cb0d18a4ac85db5
|
refs/heads/master
| 2021-03-12T19:57:59.971214
| 2012-11-04T17:22:59
| 2012-11-04T17:22:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
py
|
from django.shortcuts import redirect, render
from django.core.urlresolvers import reverse
def index(request):
if request.user.is_authenticated():
return redirect(reverse('catalog:book_list'))
return render(request, 'index.html', {})
|
[
"yeo.eugene.oey@gmail.com"
] |
yeo.eugene.oey@gmail.com
|
1ee42e0fa0fd0e830473f4079c9058dd6869c849
|
7ab85ba79a6553659f0b324ecebb4bb39f8a8a1c
|
/shallow copy.py
|
827e08006334256c38c0ceb955c5a8fd2ff5b596
|
[] |
no_license
|
subinmun1997/my_python
|
b75db77a035fa8f531d9872bf33a1818a002206a
|
634acc948e7758f5d26084536c506e7da45cd53c
|
refs/heads/master
| 2022-12-28T21:11:40.173378
| 2020-10-16T08:02:18
| 2020-10-16T08:02:18
| 292,875,851
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
r1=['John',('man','USA'),[175,23]]
r2=list(r1)
print(r1 is r2)
print(r1[0] is r2[0])
print(r1[1] is r2[1])
print(r1[2] is r2[2])
|
[
"qzxy812@gmail.com"
] |
qzxy812@gmail.com
|
7f64617c1f9ffa09fcadfbe29ce329539eae983a
|
0f074e5adef64fa16e88dc2499e76f08b4c33c02
|
/matplotlib/ipython and pylab/03 改变线条颜色和粗细.py
|
5536a6d7b75055db76aeaa13f86025196647c11b
|
[] |
no_license
|
guozhenjiang/Python
|
0ac39adaf72df0bfee51795fabcfd959a69b1862
|
44b07bd767f3f2a947331111ab920200ac2412c6
|
refs/heads/master
| 2021-05-19T16:54:40.725132
| 2020-11-19T16:26:26
| 2020-11-19T16:27:11
| 252,035,380
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,154
|
py
|
# 导入 matplotlib 的所有内容(nympy 可以用 np 这个名字来使用)
from pylab import *
# 创建一个 8 * 6 点(point)的图,并设置分辨率为 80
# figure(figsize=(8,6), dpi=80)
figure(figsize=(10,6), dpi=80)
# 创建一个新的 1 * 1 的子图,接下来的图样绘制在其中的第 1 块(也是唯一的一块)
subplot(1,1,1)
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
# 绘制余弦曲线,使用蓝色的、连续的、宽度为 1 (像素)的线条
# plot(X, C, color="blue", linewidth=1.0, linestyle="-")
plot(X, C, color="blue", linewidth=2.5, linestyle="-")
# 绘制正弦曲线,使用绿色的、连续的、宽度为 1 (像素)的线条
# plot(X, S, color="green", linewidth=1.0, linestyle="-")
plot(X, S, color="red", linewidth=2.5, linestyle="-")
# 设置横轴的上下限
xlim(-4.0,4.0)
# 设置横轴记号
xticks(np.linspace(-4,4,9,endpoint=True))
# 设置纵轴的上下限
ylim(-1.0,1.0)
# 设置纵轴记号
yticks(np.linspace(-1,1,5,endpoint=True))
# 以分辨率 72 来保存图片
# savefig("exercice_2.png",dpi=72)
# 在屏幕上显示
show()
|
[
"guo_zhen_jiang@163.com"
] |
guo_zhen_jiang@163.com
|
6ff8cf46f9afbcf4558f4fc7c0f57921fcc8d9d4
|
68577bb693fe01cddce56da36a43702c6bdedc07
|
/Programming/python/threads/events.001.py
|
a7ef176267b372f3242e604881eb1b4acfb8801b
|
[] |
no_license
|
ceccopierangiolieugenio/scripts
|
480ab9b94c135d47c4d7c916e35df537cfabbed3
|
fe0eca7d76733e204c1c702e03b9ccc11ee421fd
|
refs/heads/master
| 2023-03-31T16:57:37.064553
| 2023-03-26T13:21:36
| 2023-03-26T13:21:36
| 99,695,368
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,212
|
py
|
# Example from:
# https://www.bogotobogo.com/python/Multithread/python_multithreading_Event_Objects_between_Threads.php
import threading
import time
import logging
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-9s) %(message)s',)
def wait_for_event(e):
logging.debug('wait_for_event starting')
event_is_set = e.wait()
logging.debug('event set: %s', event_is_set)
def wait_for_event_timeout(e, t):
while not e.isSet():
logging.debug('wait_for_event_timeout starting')
event_is_set = e.wait(t)
logging.debug('event set: %s', event_is_set)
if event_is_set:
logging.debug('processing event')
else:
logging.debug('doing other things')
if __name__ == '__main__':
e = threading.Event()
t1 = threading.Thread(name='blocking',
target=wait_for_event,
args=(e,))
t1.start()
t2 = threading.Thread(name='non-blocking',
target=wait_for_event_timeout,
args=(e, 2))
t2.start()
logging.debug('Waiting before calling Event.set()')
time.sleep(3)
e.set()
logging.debug('Event is set')
|
[
"ceccopierangiolieugenio@googlemail.com"
] |
ceccopierangiolieugenio@googlemail.com
|
b4c0472ccadd94cd2d5b8635aa3af2ec2da7fb48
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/476/usersdata/321/110683/submittedfiles/Av2_Parte3.py
|
871c02d88b4454cc7f87fe1b0a0f024a5aa1caa1
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 339
|
py
|
# -*- coding: utf-8 -*-
qa= int(input('Quantidade de elementos de a: '))
qb= int(input('Quantidade de elementos de b: '))
a= []
b= []
for i in range(qa):
a.append(int(input('Digite o valor%d de a: ' % i)))
for i in range(qb):
b.append(int(input('Digite o valor%d de b: ' % i)))
soma= 0
while a[i] == b[i]:
soma+=1
print(soma)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
9d617e8e56b480d3f2c9796faf890e935037a64c
|
90419da201cd4948a27d3612f0b482c68026c96f
|
/sdk/python/pulumi_azure_nextgen/network/v20180701/get_network_watcher.py
|
a1c1900c872cb4134a105e35685ba9cbea6c876c
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
test-wiz-sec/pulumi-azure-nextgen
|
cd4bee5d70cb0d332c04f16bb54e17d016d2adaf
|
20a695af0d020b34b0f1c336e1b69702755174cc
|
refs/heads/master
| 2023-06-08T02:35:52.639773
| 2020-11-06T22:39:06
| 2020-11-06T22:39:06
| 312,993,761
| 0
| 0
|
Apache-2.0
| 2023-06-02T06:47:28
| 2020-11-15T09:04:00
| null |
UTF-8
|
Python
| false
| false
| 4,324
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetNetworkWatcherResult',
'AwaitableGetNetworkWatcherResult',
'get_network_watcher',
]
@pulumi.output_type
class GetNetworkWatcherResult:
"""
Network watcher in a resource group.
"""
def __init__(__self__, etag=None, location=None, name=None, provisioning_state=None, tags=None, type=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetNetworkWatcherResult(GetNetworkWatcherResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetNetworkWatcherResult(
etag=self.etag,
location=self.location,
name=self.name,
provisioning_state=self.provisioning_state,
tags=self.tags,
type=self.type)
def get_network_watcher(network_watcher_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkWatcherResult:
"""
Use this data source to access information about an existing resource.
:param str network_watcher_name: The name of the network watcher.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['networkWatcherName'] = network_watcher_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:network/v20180701:getNetworkWatcher', __args__, opts=opts, typ=GetNetworkWatcherResult).value
return AwaitableGetNetworkWatcherResult(
etag=__ret__.etag,
location=__ret__.location,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
tags=__ret__.tags,
type=__ret__.type)
|
[
"public@paulstack.co.uk"
] |
public@paulstack.co.uk
|
306efc4d66d57b80b9a10c625c04f08557d7f834
|
8adcfe7485ea04bc1f83cac7d92bb51b97582f64
|
/ALGORITHM/210531/프로그래머스 타겟 넘버.py
|
c31258c8edf6e3e7fa3b78f33fb5b9e9aed6108b
|
[] |
no_license
|
NoJeong/TIL
|
fdceb6efc5d2d56f8dd2e27271ea0faacfe336ae
|
c79c34b84f025aa40cd3a8e28fd0898bcb40b608
|
refs/heads/master
| 2023-06-24T22:18:50.665917
| 2021-07-23T06:21:21
| 2021-07-23T06:21:21
| 280,307,738
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 543
|
py
|
import sys
sys.stdin = open('프로그래머스 타겟 넘버.txt')
def solution(numbers, target):
answer = 0
def dfs(numbers, target, index):
nonlocal answer
if index < len(numbers):
numbers[index] *= 1
dfs(numbers, target, index + 1)
numbers[index] *= -1
dfs(numbers, target, index + 1)
elif sum(numbers) == target:
answer += 1
dfs(numbers, target, 0)
return answer
a = list(map(int,input().split()))
b = int(input())
solution(a,b)
|
[
"op032@naver.com"
] |
op032@naver.com
|
8527a82984c2cd8a19d450dc69773a45da4c0b51
|
79bc9a420df5c706b2ae06f4b75bf2bd2ba9646e
|
/emission/net/ext_service/push/query/trip_metrics.py
|
bad51c6afb3acaf63183dd918f4250efd1da085d
|
[
"BSD-3-Clause"
] |
permissive
|
Andrew-Tan/e-mission-server
|
7022786a13b4be87be62cfc2cc6d82543d063e5d
|
91d59bee86e63d803e401f10f4b6a2502effedda
|
refs/heads/master
| 2021-01-16T18:25:17.860723
| 2017-11-21T19:24:40
| 2017-11-21T19:24:40
| 100,073,534
| 0
| 0
|
BSD-3-Clause
| 2018-05-05T18:26:36
| 2017-08-11T22:13:44
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 4,279
|
py
|
# Input spec sample at
# emission/net/ext_service/push/sample.specs/trip_metrics.query.sample sample
# finds all users who have at least one day in Feb 2017 with no more than 10
# walk sections and a walk distance of at least 1km during the evening commute
# hours
# Input: query spec
# Output: list of uuids
#
import logging
import numpy as np
import emission.core.wrapper.motionactivity as ecwm
import emission.net.api.metrics as enam
import emission.storage.decorations.local_date_queries as esdl
import emission.storage.decorations.location_queries as esdlq
import emission.storage.decorations.user_queries as esdu
import emission.storage.timeseries.geoquery as estg
import emission.storage.timeseries.timequery as estt
import emission.storage.timeseries.tcquery as esttc
import emission.storage.decorations.analysis_timeseries_queries as esda
def get_metric_list(checks):
metric_list = [e["metric"] for e in checks]
logging.debug("Returning %s" % metric_list)
return metric_list
def compare_value(threshold, summed_value):
if '$gt' in threshold:
return summed_value > threshold['$gt']
if '$gte' in threshold:
return summed_value >= threshold['$gte']
if '$lt' in threshold:
return summed_value < threshold['$lt']
if '$lte' in threshold:
return summed_value <= threshold['$lte']
return False
def matches_check(check, msts):
# We know that the metric in the check matches the result because that's the
# way that the metrics API works. So we just need to check mode versus threshold
# entry looks like this (for count)
# ModeStatTimeSummary({'fmt_time': '2017-01-20T00:00:00+00:00',
# 'nUsers': 1,
# 'UNKNOWN': 1,
# 'ts': 1484870400,
# 'AIR_OR_HSR': 2,
# 'local_dt': LocalDate(...)})
mode_list = check['modes']
summed_value = 0
for mode in mode_list:
summed_value = summed_value + msts.get(mode, 0)
return compare_value(check["threshold"], summed_value)
def is_matched_user(user_id, spec):
metric_list = get_metric_list(spec["checks"])
time_type = spec['time_type']
if 'from_local_date' in spec and 'to_local_date' in spec:
freq_metrics = enam.summarize_by_local_date(user_id,
spec["from_local_date"], spec["to_local_date"],
spec["freq"], metric_list, include_aggregate=False)
elif 'start_time' in spec and 'end_time' in spec:
freq_metrics = enam.summarize_by_timestamp(user_id,
spec["start_time"], spec["end_time"],
spec["freq"], metric_list, include_aggregate=False)
else:
# If no start and end times are specified, we assume that this is a
# timestamp query because we can come up with a reasonable start and end
# time for timestamps but not for local_dates, which are basically a filter.
# so if we run this on the first of a month, for example, we won't find
# anything, which seems bogus and not what people would expect
assert time_type == "timestamp", "time_type = %s, expected timestamp" % time_type
freq_metrics = enam.summarize_by_timestamp(user_id,
0, time.time(), spec["freq"], metric_list, include_aggregate=False)
assert(freq_metrics is not None)
assert('user_metrics' in freq_metrics)
curr_user_metrics = freq_metrics['user_metrics']
checks = spec['checks']
check_results = np.zeros(len(checks))
for i, check in enumerate(checks):
curr_metric_result = curr_user_metrics[i]
# curr_freq_result is a list of ModeStatTimeSummary objects, one for each
# grouped time interval in the range
# e.g. for daily, 2017-01-19, 2017-01-20, 2017-01-21, 2017-01-22, 2017-01-23, ....
for msts in curr_metric_result:
# We defined our check as being true if it is true for _any_ grouped time
# period in the range. So as long as we find a match for that check, we are
# good!
if matches_check(check, msts):
check_results[i] = True
logging.info("For user_id %s, check result array = %s, all? %s" % (user_id, check_results, np.all(check_results)))
return np.all(check_results)
def query(spec):
sel_uuids = esdu.get_all_uuids()
matched_uuid_list = [uuid for uuid in sel_uuids if is_matched_user(uuid, spec)]
logging.info("matched matched_uuid_list of length = %s = %s" %
(len(matched_uuid_list), matched_uuid_list))
return matched_uuid_list
|
[
"shankari@eecs.berkeley.edu"
] |
shankari@eecs.berkeley.edu
|
df7859b3968e2e07fe6d573c3c0175bb0d06485b
|
72dbf8366cf17b6a81ab37e72af667726e3f2661
|
/store/migrations/0016_auto_20201104_1719.py
|
31c0e9e8bf2783c9b201a665dd614b048aa7b44d
|
[] |
no_license
|
Rayhun/Django_E-Commerce_website
|
3aef732ffa0a41509be95ced3c33b845233903a7
|
1a5f7e31f942914256e49ba7da1f7367a799f097
|
refs/heads/main
| 2023-05-23T18:18:27.875328
| 2021-04-30T19:29:06
| 2021-04-30T19:29:06
| 306,414,778
| 3
| 1
| null | 2021-04-30T19:28:58
| 2020-10-22T17:41:57
|
CSS
|
UTF-8
|
Python
| false
| false
| 505
|
py
|
# Generated by Django 3.1.1 on 2020-11-04 11:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0015_remove_product_product_name'),
]
operations = [
migrations.AlterField(
model_name='customer',
name='gender',
field=models.CharField(blank=True, choices=[('MALE', 'Male'), ('FEMALE', 'Female'), ('OTHERS', 'Others')], default='MALE', max_length=6, null=True),
),
]
|
[
"rayhunkhan27@gmail.com"
] |
rayhunkhan27@gmail.com
|
936baa9a603ebaf11d6c5adc98fecc3cf562f6cc
|
952abfc855d0fca89200f1e428aac9a87f1d3295
|
/tf114/tf09_mv2.py
|
6e8e7a3f054efe4bf5163d9aaf09c665b95a2f75
|
[] |
no_license
|
TaeYeon-kim-ai/STUDY_1.py
|
7570b4510bf8d9791447efe3a97a9668a1cabe06
|
e14392c706b7e51e40f1ac68555e26558e25b38f
|
refs/heads/master
| 2023-06-03T09:04:13.498591
| 2021-06-21T17:10:47
| 2021-06-21T17:10:47
| 329,834,933
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,710
|
py
|
#과제
# 차원 형태 내용
#스칼라 1
#벡터 [1,2,3]
#행렬 [[1,2],[2,3]]
#텐서 [[[1,2],[1,2,3]]]
# x * W 두개의 사이즈가 맞아야함
# x = 5, 3
# W = 3, 1(2,3,4,되던 상관없음) + b와 더할 수 있는 shape가 동일해야함.
# (5, 3) x (3, 1) = (5, 1) #앞에 열과 뒤에 행만 맞으면 행렬 연산할 수 있음
# (3, 2) x (2, 3) = (3, 3)
# [실습] 만들어봐
# verbose 로 나오는건 step과 cost와 hypothesis // epochs = 2001, 10개단위
import tensorflow as tf
tf.set_random_seed(66)
x_data = [[73, 51, 65],
[92, 98, 40],
[89, 31, 33],
[99, 33, 100],
[17, 66, 79]] #(5,3) metrix
y_data = [[152],
[185],
[180],
[205],
[142]] #metrix(5,1)
x = tf.placeholder(tf.float32, shape = [None, 3])
y = tf.placeholder(tf.float32, shape = [None, 1])
#행 맞춰두고 열은 y의 열값 x*w를 한 shape와 y의 shape가 같아야한다.
w = tf.Variable(tf.random_normal([3, 1]), name = 'weight')
b = tf.Variable(tf.random_normal([1]), name = 'bias') #바이어스 하나임
#hypothesis = x * w + b
hypothesis = tf.matmul(x, w) + b #matmul 은 매트릭스 멀티(매트릭스 곱)
cost = tf.reduce_mean(tf.square(hypothesis - y))
optimizer = tf.train.GradientDescentOptimizer(learning_rate = 71e-6) # cost : 294.25824
#optimizer = tf.train.AdamOptimizer(learning_rate = 0.1) #cost : 176.789 [실험]
train = optimizer.minimize(cost)
sess = tf.compat.v1.Session()
sess.run(tf.global_variables_initializer())
for step in range(2001) :
cost_val, hy_val, _ = sess.run([cost, hypothesis, train],
feed_dict = {x : x_data, y : y_data})
if step % 20 == 0 :
print("step : ", step, "\n", "cost : ", cost_val, "\n", hy_val)
sess.close()
'''
import matplotlib.pyplot as plt
w_history = []
cost_history = []
with tf.compat.v1.Session() as sess :
#sess.run(tf.compat.v1.global_variables_initializer()) #안해줘도 돌아감
#텐서플로의 실질적인 w에 tf.valiable설정은 안해줘서 파이썬의 veriable로 취급하는 듯
for i in range(-30, 50) : #-30 ~ 50
curr_w = i * 0.1 #i*0.1단위로 증가 -3,
curr_cost = sess.run(cost, feed_dict={w : curr_w})
w_history.append(curr_w)
cost_history.append(curr_cost)
print("=========================================")
print("W : ", w_history)
print("=========================================")
print("cost : ", cost_history)
print("=========================================")
plt.plot(w_history, cost_history)
plt.show()
'''
|
[
"noreply@github.com"
] |
TaeYeon-kim-ai.noreply@github.com
|
d407e6efe97070be75014a8bc45c966906c9cd14
|
e707164df1aa8edb5d276179538bd1eb1805f759
|
/CODE/fedora_application/env/lib/python2.7/site-packages/fedmsg/config.py
|
bcb6258cc6c1c0570c5a144728cc39fb87d6c893
|
[] |
no_license
|
beckastar/cleaner_markov
|
af5816c14c94a8cb7924728179470e7db9ed2bc0
|
a6de3fd87db77c0d80789cbce0ff409c222b4e67
|
refs/heads/master
| 2021-01-02T22:52:08.989862
| 2013-11-10T04:51:04
| 2013-11-10T04:51:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,936
|
py
|
# This file is part of fedmsg.
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
""" :mod:`fedmsg.config` handles loading, processing and validation of
all configuration.
The configuration values used at runtime are determined by checking in
the following order
- Built-in defaults
- Config file (/etc/fedmsg-config.py)
- Command line arguments
For example, if a config value does not appear in either the config file or on
the command line, then the built-in default is used. If a value appears in
both the config file and as a command line argument, then the command line
value is used.
You can print the runtime configuration to the terminal by using the
``fedmsg-config`` command implemented by
:func:`fedmsg.commands.config.config`.
"""
import argparse
import collections
import copy
import os
import sys
import textwrap
import warnings
from fedmsg.encoding import pretty_dumps
VALID_ENVIRONMENTS = ['dev', 'stg', 'prod']
defaults = dict(
topic_prefix="org.fedoraproject",
environment="dev",
io_threads=1,
post_init_sleep=0.5,
timeout=2,
print_config=False,
high_water_mark=0, # zero means no limit
zmq_linger=1000, # Wait one second before timing out on fedmsg-relay
active=False, # generally only true for fedmsg-logger
persistent_store=None, # an object. See the fedmsg.replay module.
logging=dict(
version=1,
formatters=dict(
bare={
"datefmt": "%Y-%m-%d %H:%M:%S",
"format": "[%(asctime)s][%(name)10s %(levelname)7s] %(message)s"
},
),
handlers=dict(
console={
"class": "logging.StreamHandler",
"formatter": "bare",
"level": "INFO",
"stream": "ext://sys.stdout",
}
),
loggers=dict(
fedmsg={
"level": "INFO",
"propagate": False,
"handlers": ["console"],
},
moksha={
"level": "INFO",
"propagate": False,
"handlers": ["console"],
},
),
),
)
__cache = {}
def load_config(extra_args=None,
doc=None,
filenames=None,
invalidate_cache=False,
fedmsg_command=False):
""" Setup a runtime config dict by integrating the following sources
(ordered by precedence):
- defaults
- config file
- command line arguments
If the ``fedmsg_command`` argument is False, no command line arguments are
checked.
"""
global __cache
if invalidate_cache:
__cache = {}
if __cache:
return __cache
# Coerce defaults if arguments are not supplied.
extra_args = extra_args or []
doc = doc or ""
config = copy.deepcopy(defaults)
config.update(_process_config_file(filenames=filenames))
# This is optional (and defaults to false) so that only 'fedmsg-*' commands
# are required to provide these arguments.
# For instance, the moksha-hub command takes a '-v' argument and internally
# makes calls to fedmsg. We don't want to impose all of fedmsg's CLI
# option constraints on programs that use fedmsg, so we make it optional.
if fedmsg_command:
config.update(_process_arguments(extra_args, doc, config))
# If the user specified a config file on the command line, then start over
# but read in that file instead.
if not filenames and config.get('config_filename', None):
return load_config(extra_args, doc,
filenames=[config['config_filename']])
# Just a little debug option. :)
if config['print_config']:
print pretty_dumps(config)
sys.exit(0)
if config['environment'] not in VALID_ENVIRONMENTS:
raise ValueError("%r not one of %r" % (
config['environment'], VALID_ENVIRONMENTS))
if 'endpoints' not in config:
raise ValueError("No config value 'endpoints' found.")
if not isinstance(config['endpoints'], dict):
raise ValueError("The 'endpoint' config value must be a dict.")
if 'srv_endpoints' in config and len(config['srv_endpoints']) > 0:
from dns.resolver import query, NXDOMAIN, Timeout, NoNameservers
for e in config['srv_endpoints']:
urls = []
try:
records = query('_fedmsg._tcp.{0}'.format(e), 'SRV')
except NXDOMAIN:
warnings.warn("There is no appropriate SRV records " +
"for {0}".format(e))
continue
except Timeout:
warnings.warn("The DNS query for the SRV records of" +
" {0} timed out.".format(e))
continue
except NoNameservers:
warnings.warn("No name server is available, please " +
"check the configuration")
break
for rec in records:
urls.append('tcp://{hostname}:{port}'.format(
hostname=rec.target.to_text(),
port=rec.port
))
config['endpoints'][e] = urls
if 'topic_prefix_re' not in config:
# Turn "org.fedoraproject" into "org\.fedoraproject\.(dev|stg|prod)"
config['topic_prefix_re'] = config['topic_prefix'].replace('.', '\.')\
+ '\.(%s)' % '|'.join(VALID_ENVIRONMENTS)
__cache = config
return config
def build_parser(declared_args, doc, config=None, prog=None):
""" Return the global :class:`argparse.ArgumentParser` used by all fedmsg
commands.
Extra arguments can be supplied with the `declared_args` argument.
"""
config = config or copy.deepcopy(defaults)
prog = prog or sys.argv[0]
parser = argparse.ArgumentParser(
description=textwrap.dedent(doc),
formatter_class=argparse.RawDescriptionHelpFormatter,
prog=prog,
)
parser.add_argument(
'--io-threads',
dest='io_threads',
type=int,
help="Number of io threads for 0mq to use",
default=config['io_threads'],
)
parser.add_argument(
'--topic-prefix',
dest='topic_prefix',
type=str,
help="Prefix for the topic of each message sent.",
default=config['topic_prefix'],
)
parser.add_argument(
'--post-init-sleep',
dest='post_init_sleep',
type=float,
help="Number of seconds to sleep after initializing.",
default=config['post_init_sleep'],
)
parser.add_argument(
'--config-filename',
dest='config_filename',
help="Config file to use.",
default=None,
)
parser.add_argument(
'--print-config',
dest='print_config',
help='Simply print out the configuration and exit. No action taken.',
default=False,
action='store_true',
)
parser.add_argument(
'--timeout',
dest='timeout',
help="Timeout in seconds for any blocking zmq operations.",
type=float,
default=config['timeout'],
)
parser.add_argument(
'--high-water-mark',
dest='high_water_mark',
help="Limit on the number of messages in the queue before blocking.",
type=int,
default=config['high_water_mark'],
)
parser.add_argument(
'--linger',
dest='zmq_linger',
help="Number of milliseconds to wait before timing out connections.",
type=int,
default=config['zmq_linger'],
)
for args, kwargs in declared_args:
# Replace the hard-coded extra_args default with the config file value
# (if it exists)
if all([k in kwargs for k in ['dest', 'default']]):
kwargs['default'] = config.get(
kwargs['dest'], kwargs['default'])
# Having slurped smart defaults from the config file, add the CLI arg.
parser.add_argument(*args, **kwargs)
return parser
def _process_arguments(declared_args, doc, config):
parser = build_parser(declared_args, doc, config)
args = parser.parse_args()
return dict(args._get_kwargs())
def _gather_configs_in(directory):
""" Return list of fully qualified python filenames in the given dir """
try:
return [
os.path.join(directory, fname)
for fname in os.listdir(directory)
if fname.endswith('.py')
]
except OSError:
return []
def _recursive_update(d1, d2):
""" Little helper function that does what d1.update(d2) does,
but works nice and recursively with dicts of dicts of dicts.
It's not necessarily very efficient.
"""
for k in set(d1).intersection(d2):
if isinstance(d1[k], dict) and isinstance(d2[k], dict):
d1[k] = _recursive_update(d1[k], d2[k])
else:
d1[k] = d2[k]
for k in set(d2).difference(d1):
d1[k] = d2[k]
return d1
def _process_config_file(filenames=None):
filenames = filenames or []
# Validate that these files are really files
for fname in filenames:
if not os.path.isfile(fname):
raise ValueError("%r is not a file." % fname)
# If nothing specified, look in the default locations
if not filenames:
filenames = [
'/etc/fedmsg-config.py',
os.path.expanduser('~/.fedmsg-config.py'),
os.getcwd() + '/fedmsg-config.py',
]
filenames = sum(map(_gather_configs_in, [
"/etc/fedmsg.d/",
os.path.expanduser('~/.fedmsg.d/'),
os.getcwd() + '/fedmsg.d/',
]), []) + filenames
# Each .ini file should really be a python module that
# builds a config dict.
config = {}
for fname in filenames:
if os.path.isfile(fname):
variables = {}
try:
execfile(fname, variables)
config = _recursive_update(config, variables['config'])
except IOError as e:
warnings.warn(str(e))
return config
|
[
"rebecca.robbins.et@gmail.com"
] |
rebecca.robbins.et@gmail.com
|
f58474a9aea0941e1addfa44edfbeac7a7c4e547
|
2c20f40a25c7e718e031db18901a6900527ea757
|
/vedo/pyplot.py
|
c22cef53b499069a54ae9c2281be4877bf49d19b
|
[
"OFL-1.1",
"MIT",
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
sariths/vtkPlotter
|
a09653c6aa862ff7932aef8ba417349f1b82288e
|
f5b84f9b3ef202353c85e0b18f2e2e1a5d72183c
|
refs/heads/master
| 2021-12-11T02:51:47.300710
| 2021-11-10T10:42:50
| 2021-11-10T10:42:50
| 137,873,932
| 0
| 0
|
MIT
| 2018-07-09T13:09:21
| 2018-06-19T10:03:55
|
Python
|
UTF-8
|
Python
| false
| false
| 98,743
|
py
|
import vtk
import numpy as np
import vedo
import vedo.settings as settings
import vedo.utils as utils
import vedo.colors as colors
import vedo.shapes as shapes
import vedo.addons as addons
from vedo.assembly import Assembly
from vedo.mesh import Mesh, merge
from vedo.plotter import show # not used, but useful to import this
__doc__ = """Plotting utility functions.""" + vedo.docs._defs
__all__ = [
"plot",
"histogram",
"donut",
"quiver",
"violin",
"whisker",
"streamplot",
"matrix",
"DirectedGraph",
"show",
]
##########################################################################
class Plot(Assembly):
"""
Derived class of ``Assembly`` to manipulate plots.
"""
def __init__(self, *objs):
Assembly.__init__(self, *objs)
self.yscale = 1
self.aspect = 4 / 3.0
self.cut = True # todo
self.xlim = None
self.ylim = None
self.pad = 0.05
self._x0lim = None
self._y0lim = None
self._x1lim = None
self._y1lim = None
self.zmax = 0 # z-order
self.fixed_scale = 1
self.bins = []
self.freqs = []
def ybounds(self, scaled=True):
if scaled:
return (self._y0lim/self.yscale, self._y1lim/self.yscale)
else:
return (self._y0lim, self._y1lim)
def __iadd__(self, *objs):
"""
Add object to plot with taking automatically into account the correct aspect ratio.
"""
# these types will scale proportionally to keep their native shape aspect ratio intact
typs = (
shapes.Text3D,
shapes.Polygon,
shapes.Star,
shapes.Disc,
shapes.Ellipsoid,
shapes.Latex,
shapes.Sphere,
# shapes.Arrow2D,
Assembly,
vedo.Picture,
)
self.fixed_scale = np.min([1, self.yscale])
objs = objs[0] # make a list anyway
if not utils.isSequence(objs):
objs = [objs]
if not utils.isSequence(objs[0]) and isinstance(objs[0], Plot):
# is adding another whole Plot # TO BE REVISED
plot2 = objs[0]
plot_z = plot2.z() + (plot2._x1lim - plot2._x0lim)/1000 # add a small shift in z
# print(plot2.yscale, self.yscale)
elems = plot2.unpack()
objs2 = []
for e in elems:
if e.name == "axes":
continue
ec = e.clone()
# remove plot2.yscale and apply self.yscale:
ec.SetScale(1, self.yscale/plot2.yscale, 1)
self.AddPart(ec.z(plot_z))
objs2.append(ec)
objs = objs2
else:
# print('adding individual objects', len(objs))
for a in objs:
if isinstance(a, typs):
# special scaling to preserve the aspect ratio
# print('adding', a.name, 'fixed scale', self.fixed_scale)
a.scale(self.fixed_scale)
else:
# print('adding', a.name, 'yscale', self.yscale)
a.scale([1, self.yscale, 1])
py = a.y()
a.y(py * self.yscale)
self.AddPart(a)
if self.cut: # todo
for a in objs:
if not a or a.name == "axes":
continue
if self._y0lim is not None and hasattr(a, "cutWithPlane"):
a.cutWithPlane([0, self._y0lim, 0], [0, 1, 0])
if self._y1lim is not None and hasattr(a, "cutWithPlane"):
a.cutWithPlane([0, self._y1lim, 0], [0, -1, 0])
if self._x0lim is not None and hasattr(a, "cutWithPlane"):
a.cutWithPlane([self._x0lim, 0, 0], [1, 0, 0])
if self._x1lim is not None and hasattr(a, "cutWithPlane"):
a.cutWithPlane([self._x1lim, 0, 0], [-1, 0, 0])
return self
def overlayPlot(self, *args, **kwargs):
"""Plot on top of an already existing plot."""
kwargs['format'] = self
plt = plot(*args, **kwargs)
plt.format = self
for a in plt.unpack():
self.AddPart(a)
return self
def overlayHistogram(self, *args, **kwargs):
"""Plot histogram on top of an already existing plot."""
kwargs['format'] = self
h = histogram(*args, **kwargs)
h.format = self
for a in h.unpack():
self.AddPart(a)
return self
def plot(*args, **kwargs):
"""
Draw a 2D line plot, or scatter plot, of variable x vs variable y.
Input format can be either [allx], [allx, ally] or [(x1,y1), (x2,y2), ...]
:param list xerrors: set uncertainties for the x variable, shown as error bars.
:param list yerrors: set uncertainties for the y variable, shown as error bars.
:param bool errorBand: represent errors on y as a filled error band.
Use ``ec`` keyword to modify its color.
:param list xlim: set limits to the range for the x variable
:param list ylim: set limits to the range for the y variable
:param float, aspect: desired aspect ratio.
If None, it is automatically calculated to get a reasonable aspect ratio.
Scaling factor is saved in ``Plot.yscale``.
:param str c: color of frame and text.
:param float alpha: opacity of frame and text.
:param str xtitle: title label along x-axis.
:param str ytitle: title label along y-axis.
:param str title: histogram title on top.
:param float titleSize: size of title
:param str ec: color of error bar, by default the same as marker color
:param str lc: color of line
:param float la: transparency of line
:param float lw: width of line
:param bool dashed: use a dashed line style
:param bool splined: spline the line joining the point as a countinous curve
:param str,int marker: use a marker shape for the data points
:param float ms: marker size.
:param str mc: color of marker
:param float ma: opacity of marker
:Example:
.. code-block:: python
from vedo.pyplot import plot
import numpy as np
x = np.linspace(0, 6.28, num=50)
plot(np.sin(x), 'r').plot(np.cos(x), 'bo-').show()
|simpleplot|
More examples:
|plot_errbars| |plot_errbars.py|_
|plot_errband| |plot_errband.py|_
|plot_pip| |plot_pip.py|_
|scatter1| |scatter1.py|_
|scatter2| |scatter2.py|_
If input is an external function or a forumula, draw the surface
representing the function :math:`f(x,y)`.
:param float x: x range of values.
:param float y: y range of values.
:param float zlimits: limit the z range of the independent variable.
:param int zlevels: will draw the specified number of z-levels contour lines.
:param bool showNan: show where the function does not exist as red points.
:param list bins: number of bins in x and y.
|plot_fxy| |plot_fxy.py|_
Function is: :math:`f(x,y)=\sin(3x) \cdot \log(x-y)/3` in range :math:`x=[0,3], y=[0,3]`.
If ``mode='complex'`` draw the real value of the function and color map the imaginary part.
:param str cmap: diverging color map (white means imag(z)=0).
:param float lw: line with of the binning
:param list bins: binning in x and y
|fcomplex| |plot_fxy.py|_
If ``mode='polar'`` input arrays are interpreted as a list of polar angles and radii.
Build a polar (radar) plot by joining the set of points in polar coordinates.
:param str title: plot title
:param float tsize: title size
:param int bins: number of bins in phi
:param float r1: inner radius
:param float r2: outer radius
:param float lsize: label size
:param c: color of the line
:param bc: color of the frame and labels
:param alpha: alpha of the frame
:param int ps: point size in pixels, if ps=0 no point is drawn
:param int lw: line width in pixels, if lw=0 no line is drawn
:param bool deg: input array is in degrees
:param float vmax: normalize radius to this maximum value
:param bool fill: fill convex area with solid color
:param bool spline: interpolate the set of input points
:param bool showDisc: draw the outer ring axis
:param int nrays: draw this number of axis rays (continuous and dashed)
:param bool showLines: draw lines to the origin
:param bool showAngles: draw angle values
|histo_polar| |histo_polar.py|_
If ``mode='spheric'`` input input is an external function rho(theta, phi).
A surface is created in spherical coordinates.
Return an ``Plot(Assembly)`` of 2 objects, the unit grid
sphere (in wireframe representation) and the surface `rho(theta, phi)`.
:param function rfunc: handle to a user defined function.
:param bool normalize: scale surface to fit inside the unit sphere
:param int res: grid resolution
:param bool scalarbar: add a 3D scalarbar to the plot for radius
:param c: color of the unit grid
:param alpha: transparency of the unit grid
:param str cmap: color map of the surface
|plot_spheric| |plot_spheric.py|_
"""
mode = kwargs.pop("mode", "")
if "spher" in mode:
return _plotSpheric(args[0], **kwargs)
if "bar" in mode:
return _barplot(args[0], **kwargs)
if isinstance(args[0], str) or "function" in str(type(args[0])):
if "complex" in mode:
return _plotFz(args[0], **kwargs)
return _plotFxy(args[0], **kwargs)
# grab the matplotlib-like options
optidx = None
for i, a in enumerate(args):
if i > 0 and isinstance(a, str):
optidx = i
break
if optidx:
opts = args[optidx].replace(" ", "")
if "--" in opts:
opts = opts.replace("--", "")
kwargs["dashed"] = True
elif "-" in opts:
opts = opts.replace("-", "")
else:
kwargs["lw"] = 0
symbs = [".", "p", "*", "h", "D", "d", "o", "v", "^", ">", "<", "s", "x", "+", "a"]
for ss in symbs:
if ss in opts:
opts = opts.replace(ss, "", 1)
kwargs["marker"] = ss
break
allcols = list(colors.color_nicks.keys()) + list(colors.colors.keys())
for cc in allcols:
if cc in opts:
opts = opts.replace(cc, "")
kwargs["lc"] = cc
kwargs["mc"] = cc
break
if opts:
colors.printc("Could not understand option(s):", opts, c="y")
if optidx == 1 or optidx is None:
if utils.isSequence(args[0][0]):
# print('case 1', 'plot([(x,y),..])')
data = np.array(args[0])
x = np.array(data[:, 0])
y = np.array(data[:, 1])
elif len(args) == 1 or optidx == 1:
# print('case 2', 'plot(x)')
x = np.linspace(0, len(args[0]), num=len(args[0]))
y = np.array(args[0])
elif utils.isSequence(args[1]):
# print('case 3', 'plot(allx,ally)')
x = np.array(args[0])
y = np.array(args[1])
elif utils.isSequence(args[0]) and utils.isSequence(args[0][0]):
# print('case 4', 'plot([allx,ally])')
x = np.array(args[0][0])
y = np.array(args[0][1])
elif optidx == 2:
# print('case 5', 'plot(x,y)')
x = np.array(args[0])
y = np.array(args[1])
else:
print("plot(): Could not understand input arguments", args)
return None
if "polar" in mode:
return _plotPolar(np.c_[x, y], **kwargs)
return _plotxy(np.c_[x, y], **kwargs)
def histogram(*args, **kwargs):
"""
Histogramming for 1D and 2D data arrays.
For 1D arrays:
:param int bins: number of bins.
:param list vrange: restrict the range of the histogram.
:param bool density: normalize the area to 1 by dividing by the nr of entries and bin size.
:param bool logscale: use logscale on y-axis.
:param bool fill: fill bars woth solid color `c`.
:param float gap: leave a small space btw bars.
:param bool outline: show outline of the bins.
:param bool errors: show error bars.
|histo_1D| |histo_1D.py|_
If ``mode='polar'`` assume input is polar coordinate system (rho, theta):
:param list weights: array of weights, of the same shape as the input.
Each value only contributes its associated weight towards the bin count (instead of 1).
:param str title: histogram title
:param float tsize: title size
:param int bins: number of bins in phi
:param float r1: inner radius
:param float r2: outer radius
:param float phigap: gap angle btw 2 radial bars, in degrees
:param float rgap: gap factor along radius of numeric angle labels
:param float lpos: label gap factor along radius
:param float lsize: label size
:param c: color of the histogram bars, can be a list of length `bins`.
:param bc: color of the frame and labels
:param alpha: alpha of the frame
:param str cmap: color map name
:param bool deg: input array is in degrees
:param float vmin: minimum value of the radial axis
:param float vmax: maximum value of the radial axis
:param list labels: list of labels, must be of length `bins`
:param bool showDisc: show the outer ring axis
:param int nrays: draw this number of axis rays (continuous and dashed)
:param bool showLines: show lines to the origin
:param bool showAngles: show angular values
:param bool showErrors: show error bars
|histo_polar| |histo_polar.py|_
For 2D arrays:
Input data formats [(x1,x2,..), (y1,y2,..)] or [(x1,y1), (x2,y2),..] are both valid.
:param str xtitle: x axis title
:param str ytitle: y axis title
:param list bins: binning as (nx, ny)
:param list vrange: range in x and y in format [(xmin,xmax), (ymin,ymax)]
:param str cmap: color map name
:param float lw: line width of the binning
:param bool scalarbar: add a scalarbar
|histo_2D| |histo_2D.py|_
If ``mode='hexbin'``, build a hexagonal histogram from a list of x and y values.
:param str xtitle: x axis title
:param str ytitle: y axis title
:param bool bins: nr of bins for the smaller range in x or y.
:param list vrange: range in x and y in format [(xmin,xmax), (ymin,ymax)]
:param float norm: sets a scaling factor for the z axis (freq. axis).
:param bool fill: draw solid hexagons.
:param str cmap: color map name for elevation.
|histo_hexagonal| |histo_hexagonal.py|_
If ``mode='spheric'``, build a histogram from list of theta and phi values.
:param float rmax: maximum radial elevation of bin
:param int res: sphere resolution
:param cmap: color map name
:param float lw: line width of the bin edges
:param bool scalarbar: add a scalarbar to plot
|histo_spheric| |histo_spheric.py|_
"""
mode = kwargs.pop("mode", "")
if len(args) == 2: # x, y
if "spher" in mode:
return _histogramSpheric(args[0], args[1], **kwargs)
if "hex" in mode:
return _histogramHexBin(args[0], args[1], **kwargs)
return _histogram2D(args[0], args[1], **kwargs)
elif len(args) == 1:
if isinstance(args[0], vedo.Volume):
data = args[0].pointdata[0]
elif isinstance(args[0], vedo.Points):
pd0 = args[0].pointdata[0]
if pd0:
data = pd0.ravel()
else:
data = args[0].celldata[0].ravel()
else:
data = np.array(args[0])
if "spher" in mode:
return _histogramSpheric(args[0][:, 0], args[0][:, 1], **kwargs)
if len(data.shape) == 1:
if "polar" in mode:
return _histogramPolar(data, **kwargs)
return _histogram1D(data, **kwargs)
else:
if "hex" in mode:
return _histogramHexBin(args[0][:, 0], args[0][:, 1], **kwargs)
return _histogram2D(args[0], **kwargs)
print("histogram(): Could not understand input", args[0])
return None
def fit(points,
deg=1,
niter=0,
nstd=3,
xerrors=None,
yerrors=None,
vrange=None,
res=250,
lw=3,
c='red4',
):
"""
Polynomial fitting in 2D with parameter error and error bands calculation.
Errors bars in both x and y are supported.
Additional information about the fitting output can be accessed. E.g.:
``fit = fitPolynomial(pts)``
- ``fit.coefficients``: contains the coefficient of the polynomial fit
- ``fit.coefficientErrors``: errors on the fitting coefficients,
these numbers only make sense if parameters are not correlated
- ``fit.MonteCarloCoefficients``: fitting coefficient set from MC generation
- ``fit.covarianceMatrix``: covariance matrix as a numpy array
- ``fit.reducedChi2``: reduced chi-square of the fitting
- ``fit.ndof``: number of degrees of freedom
- ``fit.dataSigma``: mean data dispersion from the central fit assuming Chi2=1
- ``fit.errorLines``: a ``vedo.Line`` object for the upper and lower error band
- ``fit.errorBand``: the ``vedo.Mesh`` object representing the error band
Errors on x and y can be specified. If left `None` an estimate is made from
the statistical spread of the dataset itself. Errors are always assumed gaussian.
:param int deg: degree of the polynomial to be fitted
:param int niter: number of monte-carlo iterations to compute error bands.
If set to 0, return the simple least-squares fit with naive error estimation
on coefficients only. A reasonable non-zero value to set is about 500, in
this case ``errorLines``, ``errorBand`` and the other class attributes are filled
:param int nstd: nr. of standard deviation to use for error calculation
:param list xerrors: array of the same length of points with the errors on x
:param list yerrors: array of the same length of points with the errors on y
:param list vrange: specify the domain range of the fitting line
(only affects visualization, but can be used to extrapolate the fit
outside the data range)
:param int res: resolution of the output fitted line and error lines
|fitPolynomial1| |fitPolynomial1.py|_
|fitPolynomial2| |fitPolynomial2.py|_
"""
if isinstance(points, vedo.pointcloud.Points):
points = points.points()
points = np.asarray(points)
if len(points) == 2: # assume user is passing [x,y]
points = np.c_[points[0],points[1]]
x = points[:,0]
y = points[:,1] # ignore z
n = len(x)
ndof = n - deg - 1
if vrange is not None:
x0, x1 = vrange
else:
x0, x1 = np.min(x), np.max(x)
if xerrors is not None:
x0 -= xerrors[0]/2
x1 += xerrors[-1]/2
tol = (x1-x0)/1000
xr = np.linspace(x0,x1, res)
# project x errs on y
if xerrors is not None:
xerrors = np.asarray(xerrors)
if yerrors is not None:
yerrors = np.asarray(yerrors)
w = 1.0/yerrors
coeffs = np.polyfit(x, y, deg, w=w, rcond=None)
else:
coeffs = np.polyfit(x, y, deg, rcond=None)
# update yerrors, 1 bootstrap iteration is enough
p1d = np.poly1d(coeffs)
der = (p1d(x+tol)-p1d(x))/tol
yerrors = np.sqrt(yerrors*yerrors + np.power(der*xerrors,2))
if yerrors is not None:
yerrors = np.asarray(yerrors)
w = 1.0/yerrors
coeffs, V = np.polyfit(x, y, deg, w=w, rcond=None, cov=True)
else:
w = 1
coeffs, V = np.polyfit(x, y, deg, rcond=None, cov=True)
p1d = np.poly1d(coeffs)
theor = p1d(xr)
l = shapes.Line(xr, theor, lw=lw, c=c).z(tol*2)
l.coefficients = coeffs
l.covarianceMatrix = V
residuals2_sum = np.sum(np.power(p1d(x)-y, 2))/ndof
sigma = np.sqrt(residuals2_sum)
l.reducedChi2 = np.sum(np.power((p1d(x)-y)*w, 2))/ndof
l.ndof = ndof
l.dataSigma = sigma # worked out from data using chi2=1 hypo
l.name = "LinePolynomialFit"
if not niter:
l.coefficientErrors = np.sqrt(np.diag(V))
return l ################################
if yerrors is not None:
sigma = yerrors
else:
w = None
l.reducedChi2 = 1
Theors, all_coeffs = [], []
for i in range(niter):
noise = np.random.randn(n)*sigma
Coeffs = np.polyfit(x, y + noise, deg, w=w, rcond=None)
all_coeffs.append(Coeffs)
P1d = np.poly1d(Coeffs)
Theor = P1d(xr)
Theors.append(Theor)
all_coeffs = np.array(all_coeffs)
l.MonteCarloCoefficients = all_coeffs
stds = np.std(Theors, axis=0)
l.coefficientErrors = np.std(all_coeffs, axis=0)
# check distributions on the fly
# for i in range(deg+1):
# vedo.pyplot.histogram(all_coeffs[:,i],title='par'+str(i)).show(new=1)
# vedo.pyplot.histogram(all_coeffs[:,0], all_coeffs[:,1],
# xtitle='param0', ytitle='param1',scalarbar=1).show(new=1)
# vedo.pyplot.histogram(all_coeffs[:,1], all_coeffs[:,2],
# xtitle='param1', ytitle='param2').show(new=1)
# vedo.pyplot.histogram(all_coeffs[:,0], all_coeffs[:,2],
# xtitle='param0', ytitle='param2').show(new=1)
error_lines = []
for i in [nstd, -nstd]:
el = shapes.Line(xr, theor+stds*i, lw=1, alpha=0.2, c='k').z(tol)
error_lines.append(el)
el.name = "ErrorLine for sigma="+str(i)
l.errorLines = error_lines
l1 = error_lines[0].points().tolist()
cband = l1 + list(reversed(error_lines[1].points().tolist())) + [l1[0]]
l.errorBand = shapes.Line(cband).triangulate().lw(0).c('k', 0.15)
l.errorBand.name = "PolynomialFitErrorBand"
return l
#########################################################################################
def _plotxy(
data,
format=None,
aspect=4/3,
xlim=None,
ylim=None,
xerrors=None,
yerrors=None,
title="",
xtitle="x",
ytitle="y",
titleSize=None,
c="k",
alpha=1,
ec=None,
lc="k",
la=1,
lw=3,
dashed=False,
spline=False,
errorBand=False,
marker="",
ms=None,
mc=None,
ma=None,
pad=0.05,
axes={},
):
line=False
if lw>0:
line=True
if marker == "" and not line and not spline:
line = True
# purge NaN from data
validIds = np.all(np.logical_not(np.isnan(data)), axis=1)
data = data[validIds]
offs = 0 # z offset
if format is not None: # reset to allow meaningful overlap
xlim = format.xlim
ylim = format.ylim
aspect = format.aspect
pad = format.pad
title = ""
xtitle = ""
ytitle = ""
offs = format.zmax
x0, y0 = np.min(data, axis=0)
x1, y1 = np.max(data, axis=0)
x0lim, x1lim = x0 - pad * (x1 - x0), x1 + pad * (x1 - x0)
y0lim, y1lim = y0 - pad * (y1 - y0), y1 + pad * (y1 - y0)
if y0lim == y1lim: # in case y is constant
y0lim = y0lim - (x1lim - x0lim) / 2
y1lim = y1lim + (x1lim - x0lim) / 2
elif x0lim == x1lim: # in case x is constant
x0lim = x0lim - (y1lim - y0lim) / 2
x1lim = x1lim + (y1lim - y0lim) / 2
if xlim is not None and xlim[0] is not None:
x0lim = xlim[0]
if xlim is not None and xlim[1] is not None:
x1lim = xlim[1]
if ylim is not None and ylim[0] is not None:
y0lim = ylim[0]
if ylim is not None and ylim[1] is not None:
y1lim = ylim[1]
dx = x1lim - x0lim
dy = y1lim - y0lim
if dx == 0 and dy == 0: # in case x and y are all constant
x0lim = x0lim - 1
x1lim = x1lim + 1
y0lim = y0lim - 1
y1lim = y1lim + 1
dx, dy = 1, 1
yscale = dx / dy / aspect
y0lim, y1lim = y0lim * yscale, y1lim * yscale
if format is not None:
x0lim = format._x0lim
y0lim = format._y0lim
x1lim = format._x1lim
y1lim = format._y1lim
yscale = format.yscale
dx = x1lim - x0lim
dy = y1lim - y0lim
offs += np.sqrt(dx * dx + dy * dy) / 10000
scale = np.array([[1, yscale]])
data = np.multiply(data, scale)
acts = []
# the line or spline
if dashed:
l = shapes.DashedLine(data, c=lc, alpha=la, lw=lw)
acts.append(l)
elif spline:
l = shapes.KSpline(data).lw(lw).c(lc).alpha(la)
acts.append(l)
elif line:
l = shapes.Line(data, c=lc, alpha=la).lw(lw)
acts.append(l)
if marker:
pts = shapes.Points(data)
if mc is None:
mc = lc
if ma is None:
ma = la
if utils.isSequence(ms): ### variable point size
mk = shapes.Marker(marker, s=1)
msv = np.zeros_like(pts.points())
msv[:, 0] = ms
marked = shapes.Glyph(
pts, glyphObj=mk, c=mc, orientationArray=msv, scaleByVectorSize=True
)
else: ### fixed point size
if ms is None:
ms = dx / 100.0
# print('automatic ms =', ms)
if utils.isSequence(mc):
# print('mc is sequence')
mk = shapes.Marker(marker, s=ms).triangulate()
msv = np.zeros_like(pts.points())
msv[:, 0] = 1
marked = shapes.Glyph(
pts, glyphObj=mk, c=mc, orientationArray=msv, scaleByVectorSize=True
)
else:
# print('mc is fixed color')
mk = shapes.Marker(marker, s=ms).triangulate()
marked = shapes.Glyph(pts, glyphObj=mk, c=mc)
marked.alpha(ma).z(offs)
acts.append(marked)
if ec is None:
if mc is not None:
ec = mc
else:
ec = lc
if xerrors is not None and not errorBand:
if len(xerrors) != len(data):
colors.printc("Error in plotxy(xerrors=...): mismatched array length.", c='r')
return None
errs = []
for i, dta in enumerate(data):
xval, yval = dta
xerr = xerrors[i] / 2
el = shapes.Line((xval - xerr, yval, offs), (xval + xerr, yval, offs))
errs.append(el)
mxerrs = merge(errs).c(ec).lw(lw).alpha(alpha).z(2 * offs)
acts.append(mxerrs)
if yerrors is not None and not errorBand:
if len(yerrors) != len(data):
colors.printc("Error in plotxy(yerrors=...): mismatched array length.", c='r')
return None
errs = []
for i in range(len(data)):
xval, yval = data[i]
yerr = yerrors[i] * yscale
el = shapes.Line((xval, yval - yerr, offs), (xval, yval + yerr, offs))
errs.append(el)
myerrs = merge(errs).c(ec).lw(lw).alpha(alpha).z(3 * offs)
acts.append(myerrs)
if errorBand:
epsy = np.zeros_like(data)
epsy[:, 1] = yerrors * yscale
data3dup = data + epsy
data3dup = np.c_[data3dup, np.zeros_like(yerrors)]
data3d_down = data - epsy
data3d_down = np.c_[data3d_down, np.zeros_like(yerrors)]
band = shapes.Ribbon(data3dup, data3d_down).z(-offs)
if ec is None:
band.c(lc)
else:
band.c(ec)
band.alpha(la).z(2 * offs)
acts.append(band)
for a in acts:
a.cutWithPlane([0, y0lim, 0], [0, 1, 0])
a.cutWithPlane([0, y1lim, 0], [0, -1, 0])
a.cutWithPlane([x0lim, 0, 0], [1, 0, 0])
a.cutWithPlane([x1lim, 0, 0], [-1, 0, 0])
a.lighting('off')
if title:
if titleSize is None:
titleSize = dx / 40.0
tit = shapes.Text3D(
title,
s=titleSize,
c=c,
depth=0,
alpha=alpha,
pos=((x0lim + x1lim) / 2, y1lim + (y1lim-y0lim) / 80, 0),
justify="bottom-center",
)
tit.pickable(False).z(3 * offs)
acts.append(tit)
if axes == 1 or axes == True:
axes = {}
if isinstance(axes, dict): #####################
ndiv = 6
if "numberOfDivisions" in axes.keys():
ndiv = axes["numberOfDivisions"]
tp, ts = utils.makeTicks(y0lim / yscale, y1lim / yscale, ndiv / aspect)
labs = []
for i in range(1, len(tp) - 1):
ynew = utils.linInterpolate(tp[i], [0, 1], [y0lim, y1lim])
# print(i, tp[i], ynew, ts[i])
labs.append([ynew, ts[i]])
if "xtitle" not in axes: axes["xtitle"] = xtitle
if "ytitle" not in axes: axes["ytitle"] = ytitle
axes["yValuesAndLabels"] = labs
axes["xrange"] = (x0lim, x1lim)
axes["yrange"] = (y0lim, y1lim)
axes["zrange"] = (0, 0)
# axes["c"] = "k"
axes["yUseBounds"] = True
axs = addons.Axes(**axes)
axs.name = "axes"
asse = Plot(acts, axs)
asse.axes = axs
asse.SetOrigin(x0lim, y0lim, 0)
else:
# settings.xtitle = xtitle
# settings.ytitle = ytitle
asse = Plot(acts)
asse.yscale = yscale
asse.xlim = xlim
asse.ylim = ylim
asse.aspect = aspect
asse.pad = pad
asse.title = title
asse.xtitle = xtitle
asse.ytitle = ytitle
asse._x0lim = x0lim
asse._y0lim = y0lim
asse._x1lim = x1lim
asse._y1lim = y1lim
asse.zmax = offs * 3 # z-order
asse.name = "plotxy"
return asse
def _plotFxy(
z,
xlim=(0, 3),
ylim=(0, 3),
zlim=(None, None),
showNan=True,
zlevels=10,
c=None,
bc="aqua",
alpha=1,
texture="paper4",
bins=(100, 100),
axes=True,
):
if isinstance(z, str):
try:
z = z.replace("math.", "").replace("np.", "")
namespace = locals()
code = "from math import*\ndef zfunc(x,y): return " + z
exec(code, namespace)
z = namespace["zfunc"]
except:
colors.printc("Syntax Error in _plotFxy()", c='r')
return None
if c is not None:
texture = None # disable
ps = vtk.vtkPlaneSource()
ps.SetResolution(bins[0], bins[1])
ps.SetNormal([0, 0, 1])
ps.Update()
poly = ps.GetOutput()
dx = xlim[1] - xlim[0]
dy = ylim[1] - ylim[0]
todel, nans = [], []
for i in range(poly.GetNumberOfPoints()):
px, py, _ = poly.GetPoint(i)
xv = (px + 0.5) * dx + xlim[0]
yv = (py + 0.5) * dy + ylim[0]
try:
zv = z(xv, yv)
except:
zv = 0
todel.append(i)
nans.append([xv, yv, 0])
poly.GetPoints().SetPoint(i, [xv, yv, zv])
if len(todel):
cellIds = vtk.vtkIdList()
poly.BuildLinks()
for i in todel:
poly.GetPointCells(i, cellIds)
for j in range(cellIds.GetNumberOfIds()):
poly.DeleteCell(cellIds.GetId(j)) # flag cell
poly.RemoveDeletedCells()
cl = vtk.vtkCleanPolyData()
cl.SetInputData(poly)
cl.Update()
poly = cl.GetOutput()
if not poly.GetNumberOfPoints():
colors.printc("Function is not real in the domain", c='r')
return None
if zlim[0]:
tmpact1 = Mesh(poly)
a = tmpact1.cutWithPlane((0, 0, zlim[0]), (0, 0, 1))
poly = a.polydata()
if zlim[1]:
tmpact2 = Mesh(poly)
a = tmpact2.cutWithPlane((0, 0, zlim[1]), (0, 0, -1))
poly = a.polydata()
cmap=''
if c in colors.cmaps_names:
cmap = c
c = None
bc= None
mesh = Mesh(poly, c, alpha).computeNormals().lighting("plastic")
if cmap:
mesh.addElevationScalars().cmap(cmap)
if bc:
mesh.bc(bc)
if texture:
mesh.texture(texture)
acts = [mesh]
if zlevels:
elevation = vtk.vtkElevationFilter()
elevation.SetInputData(poly)
bounds = poly.GetBounds()
elevation.SetLowPoint(0, 0, bounds[4])
elevation.SetHighPoint(0, 0, bounds[5])
elevation.Update()
bcf = vtk.vtkBandedPolyDataContourFilter()
bcf.SetInputData(elevation.GetOutput())
bcf.SetScalarModeToValue()
bcf.GenerateContourEdgesOn()
bcf.GenerateValues(zlevels, elevation.GetScalarRange())
bcf.Update()
zpoly = bcf.GetContourEdgesOutput()
zbandsact = Mesh(zpoly, "k", alpha).lw(1).lighting('off')
zbandsact._mapper.SetResolveCoincidentTopologyToPolygonOffset()
acts.append(zbandsact)
if showNan and len(todel):
bb = mesh.GetBounds()
if bb[4] <= 0 and bb[5] >= 0:
zm = 0.0
else:
zm = (bb[4] + bb[5]) / 2
nans = np.array(nans) + [0, 0, zm]
nansact = shapes.Points(nans, r=2, c="red", alpha=alpha)
nansact.GetProperty().RenderPointsAsSpheresOff()
acts.append(nansact)
if axes:
axs = addons.Axes(mesh)
acts.append(axs)
asse = Assembly(acts)
asse.name = "plotFxy"
if isinstance(z, str):
asse.name += " " + z
return asse
def _plotFz(
z,
x=(-1, 1),
y=(-1, 1),
zlimits=(None, None),
cmap="PiYG",
alpha=1,
lw=0.1,
bins=(75, 75),
axes=True,
):
if isinstance(z, str):
try:
z = z.replace("np.", "")
namespace = locals()
code = "from math import*\ndef zfunc(x,y): return " + z
exec(code, namespace)
z = namespace["zfunc"]
except:
colors.printc("Syntax Error in complex plotFz()", c='r')
return None
ps = vtk.vtkPlaneSource()
ps.SetResolution(bins[0], bins[1])
ps.SetNormal([0, 0, 1])
ps.Update()
poly = ps.GetOutput()
dx = x[1] - x[0]
dy = y[1] - y[0]
arrImg = []
for i in range(poly.GetNumberOfPoints()):
px, py, _ = poly.GetPoint(i)
xv = (px + 0.5) * dx + x[0]
yv = (py + 0.5) * dy + y[0]
try:
zv = z(np.complex(xv), np.complex(yv))
except:
zv = 0
poly.GetPoints().SetPoint(i, [xv, yv, np.real(zv)])
arrImg.append(np.imag(zv))
mesh = Mesh(poly, alpha).lighting("plastic")
v = max(abs(np.min(arrImg)), abs(np.max(arrImg)))
mesh.cmap(cmap, arrImg, vmin=-v, vmax=v)
mesh.computeNormals().lw(lw)
if zlimits[0]:
mesh.cutWithPlane((0, 0, zlimits[0]), (0, 0, 1))
if zlimits[1]:
mesh.cutWithPlane((0, 0, zlimits[1]), (0, 0, -1))
acts = [mesh]
if axes:
axs = addons.Axes(mesh, ztitle="Real part")
acts.append(axs)
asse = Assembly(acts)
asse.name = "plotFz"
if isinstance(z, str):
asse.name += " " + z
return asse
def _plotPolar(
rphi,
title="",
tsize=0.1,
lsize=0.05,
r1=0,
r2=1,
c="blue",
bc="k",
alpha=1,
ps=5,
lw=3,
deg=False,
vmax=None,
fill=False,
spline=False,
smooth=0,
showDisc=True,
nrays=8,
showLines=True,
showAngles=True,
):
if len(rphi) == 2:
rphi = np.stack((rphi[0], rphi[1]), axis=1)
rphi = np.array(rphi)
thetas = rphi[:, 0]
radii = rphi[:, 1]
k = 180 / np.pi
if deg:
thetas = np.array(thetas) / k
vals = []
for v in thetas: # normalize range
t = np.arctan2(np.sin(v), np.cos(v))
if t < 0:
t += 2 * np.pi
vals.append(t)
thetas = np.array(vals)
if vmax is None:
vmax = np.max(radii)
angles = []
points = []
for i in range(len(thetas)):
t = thetas[i]
r = (radii[i]) / vmax * r2 + r1
ct, st = np.cos(t), np.sin(t)
points.append([r * ct, r * st, 0])
p0 = points[0]
points.append(p0)
r2e = r1 + r2
lines = None
if spline:
lines = shapes.KSpline(points, closed=True)
lines.c(c).lw(lw).alpha(alpha)
elif lw:
lines = shapes.Line(points)
lines.c(c).lw(lw).alpha(alpha)
points.pop()
ptsact = None
if ps:
ptsact = shapes.Points(points, r=ps, c=c, alpha=alpha)
filling = None
if fill and lw:
faces = []
coords = [[0, 0, 0]] + lines.points().tolist()
for i in range(1, lines.N()):
faces.append([0, i, i + 1])
filling = Mesh([coords, faces]).c(c).alpha(alpha)
back = None
back2 = None
if showDisc:
back = shapes.Disc(r1=r2e, r2=r2e * 1.01, c=bc, res=(1,360))
back.z(-0.01).lighting('off').alpha(alpha)
back2 = shapes.Disc(r1=r2e/2, r2=r2e/2 * 1.005, c=bc, res=(1,360))
back2.z(-0.01).lighting('off').alpha(alpha)
ti = None
if title:
ti = shapes.Text3D(title, (0, 0, 0), s=tsize, depth=0, justify="top-center")
ti.pos(0, -r2e * 1.15, 0.01)
rays = []
if showDisc:
rgap = 0.05
for t in np.linspace(0, 2 * np.pi, num=nrays, endpoint=False):
ct, st = np.cos(t), np.sin(t)
if showLines:
l = shapes.Line((0, 0, -0.01), (r2e * ct * 1.03, r2e * st * 1.03, -0.01))
rays.append(l)
ct2, st2 = np.cos(t+np.pi/nrays), np.sin(t+np.pi/nrays)
lm = shapes.DashedLine((0, 0, -0.01),
(r2e * ct2, r2e * st2, -0.01),
spacing=0.25)
rays.append(lm)
elif showAngles: # just the ticks
l = shapes.Line(
(r2e * ct * 0.98, r2e * st * 0.98, -0.01),
(r2e * ct * 1.03, r2e * st * 1.03, -0.01),
)
if showAngles:
if 0 <= t < np.pi / 2:
ju = "bottom-left"
elif t == np.pi / 2:
ju = "bottom-center"
elif np.pi / 2 < t <= np.pi:
ju = "bottom-right"
elif np.pi < t < np.pi * 3 / 2:
ju = "top-right"
elif t == np.pi * 3 / 2:
ju = "top-center"
else:
ju = "top-left"
a = shapes.Text3D(int(t * k), pos=(0, 0, 0), s=lsize, depth=0, justify=ju)
a.pos(r2e * ct * (1 + rgap), r2e * st * (1 + rgap), -0.01)
angles.append(a)
mrg = merge(back, back2, angles, rays, ti)
if mrg:
mrg.color(bc).alpha(alpha).lighting('off')
rh = Assembly([lines, ptsact, filling] + [mrg])
rh.base = np.array([0, 0, 0])
rh.top = np.array([0, 0, 1])
rh.name = "plotPolar"
return rh
def _plotSpheric(rfunc, normalize=True, res=33, scalarbar=True, c="grey", alpha=0.05, cmap="jet"):
sg = shapes.Sphere(res=res, quads=True)
sg.alpha(alpha).c(c).wireframe()
cgpts = sg.points()
r, theta, phi = utils.cart2spher(*cgpts.T)
newr, inans = [], []
for i in range(len(r)):
try:
ri = rfunc(theta[i], phi[i])
if np.isnan(ri):
inans.append(i)
newr.append(1)
else:
newr.append(ri)
except:
inans.append(i)
newr.append(1)
newr = np.array(newr)
if normalize:
newr = newr / np.max(newr)
newr[inans] = 1
nanpts = []
if len(inans):
redpts = utils.spher2cart(newr[inans], theta[inans], phi[inans])
nanpts.append(shapes.Points(redpts, r=4, c="r"))
pts = utils.spher2cart(newr, theta, phi)
ssurf = sg.clone().points(pts)
if len(inans):
ssurf.deletePoints(inans)
ssurf.alpha(1).wireframe(0).lw(0.1)
ssurf.cmap(cmap, newr)
ssurf.computeNormals()
if scalarbar:
xm = np.max([np.max(pts[0]), 1])
ym = np.max([np.abs(np.max(pts[1])), 1])
ssurf.mapper().SetScalarRange(np.min(newr), np.max(newr))
sb3d = ssurf.addScalarBar3D(sx=xm * 0.07, sy=ym, c='k').scalarbar
sb3d.rotateX(90).pos(xm * 1.1, 0, -0.5)
else:
sb3d = None
sg.pickable(False)
asse = Assembly([ssurf, sg] + nanpts + [sb3d])
asse.name = "plotSpheric"
return asse
#########################################################################################
def _barplot(
data,
format=None,
errors=False,
aspect=4/3,
xlim=None,
ylim=(0,None),
xtitle=" ",
ytitle="counts",
title="",
titleSize=None,
titleColor=None,
logscale=False,
fill=True,
c="olivedrab",
gap=0.02,
alpha=1,
outline=False,
lw=2,
lc="k",
pad=0.05,
axes={},
bc="k",
):
offs = 0 # z offset
if len(data) == 4:
counts, xlabs, cols, edges = data
elif len(data) == 3:
counts, xlabs, cols = data
edges = np.array(range(len(counts)+1))+0.5
elif len(data) == 2:
counts, xlabs = data
edges = np.array(range(len(counts)+1))+0.5
cols = [c] * len(counts)
else:
m = "barplot error: data must be given as [counts, labels, colors, edges] not\n"
colors.printc(m, data, c='r')
colors.printc(" bin edges and colors are optional. Abort.", c='r')
raise RuntimeError()
counts = np.asarray(counts)
edges = np.asarray(edges)
# sanity checks
assert len(counts) == len(xlabs)
assert len(counts) == len(cols)
assert len(counts) == len(edges)-1
if format is not None: # reset to allow meaningful overlap
xlim = format.xlim
ylim = format.ylim
aspect = format.aspect
pad = format.pad
axes = 0
title = ""
xtitle = ""
ytitle = ""
offs = format.zmax
if logscale:
counts = np.log10(counts + 1)
if ytitle=='counts':
ytitle='log_10 (counts+1)'
x0, x1 = np.min(edges), np.max(edges)
y0, y1 = 0, np.max(counts)
binsize = edges[1] - edges[0]
x0lim, x1lim = x0 - pad * (x1 - x0), x1 + pad * (x1 - x0)
y0lim, y1lim = y0 - pad * (y1 - y0) / 100, y1 + pad * (y1 - y0)
if errors:
y1lim += np.sqrt(y1) / 2
if y0lim == y1lim: # in case y is constant
y0lim = y0lim - (x1lim - x0lim) / 2
y1lim = y1lim + (x1lim - x0lim) / 2
elif x0lim == x1lim: # in case x is constant
x0lim = x0lim - (y1lim - y0lim) / 2
x1lim = x1lim + (y1lim - y0lim) / 2
if xlim is not None and xlim[0] is not None:
x0lim = xlim[0]
if xlim is not None and xlim[1] is not None:
x1lim = xlim[1]
if ylim is not None and ylim[0] is not None:
y0lim = ylim[0]
if ylim is not None and ylim[1] is not None:
y1lim = ylim[1]
dx = x1lim - x0lim
dy = y1lim - y0lim
if dx == 0 and dy == 0: # in case x and y are all constant
x0lim = x0lim - 1
x1lim = x1lim + 1
y0lim = y0lim - 1
y1lim = y1lim + 1
dx, dy = 1, 1
yscale = dx / dy / aspect
y0lim, y1lim = y0lim * yscale, y1lim * yscale
if format is not None:
x0lim = format._x0lim
y0lim = format._y0lim
x1lim = format._x1lim
y1lim = format._y1lim
yscale = format.yscale
dx = x1lim - x0lim
dy = y1lim - y0lim
offs += np.sqrt(dx * dx + dy * dy) / 10000
counts = counts * yscale
centers = (edges[0:-1] + edges[1:]) / 2
rs = []
maxheigth = 0
if fill: #####################
if outline:
gap = 0
for i in range(len(centers)):
p0 = (edges[i] + gap * binsize, 0, 0)
p1 = (edges[i + 1] - gap * binsize, counts[i], 0)
r = shapes.Rectangle(p0, p1)
r.origin(p0).PickableOff()
maxheigth = max(maxheigth, p1[1])
if c in colors.cmaps_names:
col = colors.colorMap((p0[0]+p1[0])/2, c, edges[0], edges[-1])
else:
col = cols[i]
r.color(col).alpha(alpha).lighting('off').z(offs)
r.name = f'bar_{i}'
rs.append(r)
if outline or not fill: #####################
lns = [[edges[0], 0, 0]]
for i in range(len(centers)):
lns.append([edges[i], counts[i], 0])
lns.append([edges[i + 1], counts[i], 0])
maxheigth = max(maxheigth, counts[i])
lns.append([edges[-1], 0, 0])
outl = shapes.Line(lns, c=lc, alpha=alpha, lw=lw).z(offs)
outl.name = f'bar_outline_{i}'
rs.append(outl)
bin_centers_pos = []
for i in range(len(centers)):
if counts[i]:
bin_centers_pos.append([centers[i], counts[i], 0])
if errors: #####################
for bcp in bin_centers_pos:
x = bcp[0]
f = bcp[1]
err = np.sqrt(f / yscale) * yscale
el = shapes.Line([x, f-err/2, 0], [x, f+err/2, 0], c=lc, alpha=alpha, lw=lw)
el.z(offs * 1.9)
rs.append(el)
# print('errors', el.z())
for a in rs: #####################
a.cutWithPlane([0, y0lim, 0], [0, 1, 0])
a.cutWithPlane([0, y1lim, 0], [0, -1, 0])
a.cutWithPlane([x0lim, 0, 0], [1, 0, 0])
a.cutWithPlane([x1lim, 0, 0], [-1, 0, 0])
a.lighting('off')
if title: #####################
if titleColor is None:
titleColor = bc
if titleSize is None:
titleSize = dx / 40.0
tit = shapes.Text3D(
title,
s=titleSize,
c=titleColor,
depth=0,
alpha=alpha,
pos=((x0lim + x1lim) / 2, y1lim + (y1lim-y0lim) / 80, 0),
justify="bottom-center",
)
tit.pickable(False).z(2.5 * offs)
rs.append(tit)
if axes == 1 or axes == True: #####################
axes = {}
if isinstance(axes, dict):
ndiv = 6
if "numberOfDivisions" in axes:
ndiv = axes["numberOfDivisions"]
tp, ts = utils.makeTicks(y0lim / yscale, y1lim / yscale, ndiv / aspect)
ylabs = []
for i in range(1, len(tp) - 1):
ynew = utils.linInterpolate(tp[i], [0, 1], [y0lim, y1lim])
ylabs.append([ynew, ts[i]])
axes["yValuesAndLabels"] = ylabs
_xlabs = []
for i in range(len(centers)):
_xlabs.append([centers[i], str(xlabs[i])])
axes["xValuesAndLabels"] = _xlabs
if "xtitle" not in axes: axes["xtitle"] = xtitle
if "ytitle" not in axes: axes["ytitle"] = ytitle
axes["xrange"] = (x0lim, x1lim)
axes["yrange"] = (y0lim, y1lim)
axes["zrange"] = (0, 0)
axes["c"] = bc
axs = addons.Axes(**axes)
axs.name = "axes"
asse = Plot(rs, axs)
asse.axes = axs
asse.SetOrigin(x0lim, y0lim, 0)
else:
# settings.xtitle = xtitle
# settings.ytitle = ytitle
asse = Plot(rs)
asse.yscale = yscale
asse.xlim = xlim
asse.ylim = ylim
asse.aspect = aspect
asse.pad = pad
asse.title = title
asse.xtitle = xtitle
asse.ytitle = ytitle
asse._x0lim = x0lim
asse._y0lim = y0lim
asse._x1lim = x1lim
asse._y1lim = y1lim
asse.zmax = offs * 3 # z-order
asse.bins = edges
asse.centers = centers
asse.freqs = counts / yscale
asse.name = "BarPlot"
return asse
#########################################################################################
def _histogram1D(
data,
format=None,
bins=25,
aspect=4/3,
xlim=None,
ylim=(0,None),
errors=False,
title="",
xtitle=" ",
ytitle="counts",
titleSize=None,
titleColor=None,
density=False,
logscale=False,
fill=True,
c="olivedrab",
gap=0.02,
alpha=1,
outline=False,
lw=2,
lc="k",
marker="",
ms=None,
mc=None,
ma=None,
pad=0.05,
axes={},
bc="k",
):
# purge NaN from data
validIds = np.all(np.logical_not(np.isnan(data)))
data = data[validIds]
offs = 0 # z offset
if format is not None: # reset to allow meaningful overlap
xlim = format.xlim
ylim = format.ylim
aspect = format.aspect
pad = format.pad
bins = format.bins
axes = 0
title = ""
xtitle = ""
ytitle = ""
offs = format.zmax
fs, edges = np.histogram(data, bins=bins, range=xlim)
# print('frequencies', fs)
# print('edges', edges)
if density:
ntot = len(data.ravel())
binsize = edges[1]-edges[0]
fs = fs/(ntot*binsize)
if ytitle=='counts':
ytitle=f"counts/({ntot}~\dot~{utils.precision(binsize,3)})"
elif logscale:
fs = np.log10(fs + 1)
if ytitle=='counts':
ytitle='log_10 (counts+1)'
x0, x1 = np.min(edges), np.max(edges)
y0, y1 = 0, np.max(fs)
binsize = edges[1] - edges[0]
x0lim, x1lim = x0 - pad * (x1 - x0), x1 + pad * (x1 - x0)
y0lim, y1lim = y0 - pad * (y1 - y0) / 100, y1 + pad * (y1 - y0)
if errors:
y1lim += np.sqrt(y1) / 2
if y0lim == y1lim: # in case y is constant
y0lim = y0lim - (x1lim - x0lim) / 2
y1lim = y1lim + (x1lim - x0lim) / 2
elif x0lim == x1lim: # in case x is constant
x0lim = x0lim - (y1lim - y0lim) / 2
x1lim = x1lim + (y1lim - y0lim) / 2
if xlim is not None and xlim[0] is not None:
x0lim = xlim[0]
if xlim is not None and xlim[1] is not None:
x1lim = xlim[1]
if ylim is not None and ylim[0] is not None:
y0lim = ylim[0]
if ylim is not None and ylim[1] is not None:
y1lim = ylim[1]
dx = x1lim - x0lim
dy = y1lim - y0lim
if dx == 0 and dy == 0: # in case x and y are all constant
x0lim = x0lim - 1
x1lim = x1lim + 1
y0lim = y0lim - 1
y1lim = y1lim + 1
dx, dy = 1, 1
yscale = dx / dy / aspect
y0lim, y1lim = y0lim * yscale, y1lim * yscale
if format is not None:
x0lim = format._x0lim
y0lim = format._y0lim
x1lim = format._x1lim
y1lim = format._y1lim
yscale = format.yscale
dx = x1lim - x0lim
dy = y1lim - y0lim
offs += np.sqrt(dx * dx + dy * dy) / 10000
fs = fs * yscale
if utils.isSequence(bins):
myedges = np.array(bins)
bins = len(bins) - 1
else:
myedges = edges
rs = []
maxheigth = 0
if fill: #####################
if outline:
gap = 0
for i in range(bins):
p0 = (myedges[i] + gap * binsize, 0, 0)
p1 = (myedges[i + 1] - gap * binsize, fs[i], 0)
r = shapes.Rectangle(p0, p1)
r.origin(p0).PickableOff()
maxheigth = max(maxheigth, p1[1])
if c in colors.cmaps_names:
col = colors.colorMap((p0[0]+p1[0])/2, c, myedges[0], myedges[-1])
else:
col = c
r.color(col).alpha(alpha).lighting('off').z(offs)
rs.append(r)
# print('rectangles', r.z())
if outline: #####################
lns = [[myedges[0], 0, 0]]
for i in range(bins):
lns.append([myedges[i], fs[i], 0])
lns.append([myedges[i + 1], fs[i], 0])
maxheigth = max(maxheigth, fs[i])
lns.append([myedges[-1], 0, 0])
outl = shapes.Line(lns, c=lc, alpha=alpha, lw=lw).z(offs)
rs.append(outl)
# print('histo outline', outl.z())
bin_centers_pos = []
for i in range(bins):
x = (myedges[i] + myedges[i + 1]) / 2
if fs[i]:
bin_centers_pos.append([x, fs[i], 0])
if marker: #####################
pts = shapes.Points(bin_centers_pos)
if mc is None:
mc = lc
if ma is None:
ma = alpha
if utils.isSequence(ms): ### variable point size
mk = shapes.Marker(marker, s=1)
msv = np.zeros_like(pts.points())
msv[:, 0] = ms
marked = shapes.Glyph(
pts, glyphObj=mk, c=mc, orientationArray=msv, scaleByVectorSize=True
)
else: ### fixed point size
if ms is None:
ms = dx / 100.0
if utils.isSequence(mc):
mk = shapes.Marker(marker, s=ms)
msv = np.zeros_like(pts.points())
msv[:, 0] = 1
marked = shapes.Glyph(
pts, glyphObj=mk, c=mc, orientationArray=msv, scaleByVectorSize=True
)
else:
mk = shapes.Marker(marker, s=ms)
marked = shapes.Glyph(pts, glyphObj=mk, c=mc)
marked.alpha(ma).z(offs * 2)
# print('marker', marked.z())
rs.append(marked)
if errors: #####################
for bcp in bin_centers_pos:
x = bcp[0]
f = bcp[1]
err = np.sqrt(f / yscale) * yscale
el = shapes.Line([x, f-err/2, 0], [x, f+err/2, 0], c=lc, alpha=alpha, lw=lw)
el.z(offs * 1.9)
rs.append(el)
# print('errors', el.z())
for a in rs: #####################
a.cutWithPlane([0, y0lim, 0], [0, 1, 0])
a.cutWithPlane([0, y1lim, 0], [0, -1, 0])
a.cutWithPlane([x0lim, 0, 0], [1, 0, 0])
a.cutWithPlane([x1lim, 0, 0], [-1, 0, 0])
a.lighting('off').phong()
if title: #####################
if titleColor is None:
titleColor = bc
if titleSize is None:
titleSize = dx / 40.0
tit = shapes.Text3D(
title,
s=titleSize,
c=titleColor,
depth=0,
alpha=alpha,
pos=((x0lim + x1lim) / 2, y1lim + (y1lim-y0lim) / 80, 0),
justify="bottom-center",
)
tit.pickable(False).z(2.5 * offs)
rs.append(tit)
if axes == 1 or axes == True:
axes = {}
if isinstance(axes, dict): #####################
ndiv = 6
if "numberOfDivisions" in axes.keys():
ndiv = axes["numberOfDivisions"]
tp, ts = utils.makeTicks(y0lim / yscale, y1lim / yscale, ndiv / aspect)
labs = []
for i in range(1, len(tp) - 1):
ynew = utils.linInterpolate(tp[i], [0, 1], [y0lim, y1lim])
labs.append([ynew, ts[i]])
if "xtitle" not in axes: axes["xtitle"] = xtitle
if "ytitle" not in axes: axes["ytitle"] = ytitle
axes["yValuesAndLabels"] = labs
axes["xrange"] = (x0lim, x1lim)
axes["yrange"] = (y0lim, y1lim)
axes["zrange"] = (0, 0)
axes["c"] = bc
axs = addons.Axes(**axes)
axs.name = "axes"
asse = Plot(rs, axs)
asse.axes = axs
asse.SetOrigin(x0lim, y0lim, 0)
else:
# settings.xtitle = xtitle
# settings.ytitle = ytitle
asse = Plot(rs)
asse.yscale = yscale
asse.xlim = xlim
asse.ylim = ylim
asse.aspect = aspect
asse.pad = pad
asse.title = title
asse.xtitle = xtitle
asse.ytitle = ytitle
asse._x0lim = x0lim
asse._y0lim = y0lim
asse._x1lim = x1lim
asse._y1lim = y1lim
asse.zmax = offs * 3 # z-order
asse.bins = edges
asse.centers = (edges[0:-1] + edges[1:]) / 2
asse.freqs = fs / yscale
asse.name = "histogram1D"
return asse
def _histogram2D(
xvalues,
yvalues=None,
format=None,
bins=25,
aspect=1,
xlim=None,
ylim=None,
weights=None,
cmap="cividis",
alpha=1,
title="",
xtitle="x",
ytitle="y",
ztitle="z",
titleSize=None,
titleColor=None,
# logscale=False,
lw=0,
scalarbar=True,
axes=True,
bc="k",
):
offs = 0 # z offset
if format is not None: # reset to allow meaningful overlap
xlim = format.xlim
ylim = format.ylim
aspect = format.aspect
bins = format.bins
axes = 0
title = ""
xtitle = ""
ytitle = ""
ztitle = ""
offs = format.zmax
if yvalues is None:
# assume [(x1,y1), (x2,y2) ...] format
yvalues = xvalues[:, 1]
xvalues = xvalues[:, 0]
if isinstance(bins, int):
bins = (bins, bins)
H, xedges, yedges = np.histogram2d(xvalues, yvalues, weights=weights,
bins=bins, range=(xlim, ylim))
x0lim, x1lim = np.min(xedges), np.max(xedges)
y0lim, y1lim = np.min(yedges), np.max(yedges)
dx, dy = x1lim - x0lim, y1lim - y0lim
if dx == 0 and dy == 0: # in case x and y are all constant
x0lim = x0lim - 1
x1lim = x1lim + 1
y0lim = y0lim - 1
y1lim = y1lim + 1
dx, dy = 1, 1
yscale = dx / dy / aspect
y0lim, y1lim = y0lim * yscale, y1lim * yscale
acts = []
#####################
g = shapes.Grid(
pos=[(x0lim + x1lim) / 2, (y0lim + y1lim) / 2, 0],
sx=dx,
sy=dy * yscale,
resx=bins[0],
resy=bins[1],
)
g.alpha(alpha).lw(lw).wireframe(0).flat().lighting('off')
g.cmap(cmap, np.ravel(H.T), on='cells')
g.SetOrigin(x0lim, y0lim, 0)
if scalarbar:
sc = g.addScalarBar3D(c=bc).scalarbar
scy0, scy1 = sc.ybounds()
sc_scale = (y1lim-y0lim)/(scy1-scy0)
sc.scale(sc_scale)
acts.append(sc)
g.base = np.array([0, 0, 0])
g.top = np.array([0, 0, 1])
acts.append(g)
if title: #####################
if titleColor is None:
titleColor = bc
if titleSize is None:
titleSize = dx / 40.0
tit = shapes.Text3D(
title,
s=titleSize,
c=titleColor,
depth=0,
alpha=alpha,
pos=((x0lim + x1lim) / 2, y1lim + (y1lim-y0lim) / 80, 0),
justify="bottom-center",
)
tit.pickable(False).z(2.5 * offs)
acts.append(tit)
if axes == 1 or axes == True: #####################
axes = {"xyGridTransparent": True, "xyAlpha": 0}
if isinstance(axes, dict):
ndiv = 6
if "numberOfDivisions" in axes.keys():
ndiv = axes["numberOfDivisions"]
tp, ts = utils.makeTicks(y0lim / yscale, y1lim / yscale, ndiv / aspect)
labs = []
for i in range(1, len(tp) - 1):
ynew = utils.linInterpolate(tp[i], [0, 1], [y0lim, y1lim])
labs.append([ynew, ts[i]])
if "xtitle" not in axes: axes["xtitle"] = xtitle
if "ytitle" not in axes: axes["ytitle"] = ytitle
if "ztitle" not in axes: axes["ztitle"] = ztitle
axes["yValuesAndLabels"] = labs
axes["xrange"] = (x0lim, x1lim)
axes["yrange"] = (y0lim, y1lim)
axes["zrange"] = (0, 0) # todo
axes["c"] = bc
axs = addons.Axes(**axes)
axs.name = "axes"
asse = Plot(acts, axs)
asse.axes = axs
asse.SetOrigin(x0lim, y0lim, 0)
else:
# settings.xtitle = xtitle
# settings.ytitle = ytitle
# settings.ytitle = ztitle
asse = Plot(acts)
asse.yscale = yscale
asse.xlim = xlim
asse.ylim = ylim
asse.aspect = aspect
asse.title = title
asse.xtitle = xtitle
asse.ytitle = ytitle
asse._x0lim = x0lim
asse._y0lim = y0lim
asse._x1lim = x1lim
asse._y1lim = y1lim
asse.freqs = H
asse.bins = (xedges, yedges)
asse.zmax = offs * 3 # z-order
asse.name = "histogram2D"
return asse
def _histogramHexBin(
xvalues,
yvalues,
xtitle="",
ytitle="",
ztitle="",
bins=12,
vrange=None,
norm=1,
fill=True,
c=None,
cmap="terrain_r",
alpha=1,
):
# if xtitle:
# settings.xtitle = xtitle
# if ytitle:
# settings.ytitle = ytitle
# if ztitle:
# settings.ztitle = ztitle
xmin, xmax = np.min(xvalues), np.max(xvalues)
ymin, ymax = np.min(yvalues), np.max(yvalues)
dx, dy = xmax - xmin, ymax - ymin
if utils.isSequence(bins):
n,m = bins
else:
if xmax - xmin < ymax - ymin:
n = bins
m = np.rint(dy / dx * n / 1.2 + 0.5).astype(int)
else:
m = bins
n = np.rint(dx / dy * m * 1.2 + 0.5).astype(int)
src = vtk.vtkPointSource()
src.SetNumberOfPoints(len(xvalues))
src.Update()
pointsPolydata = src.GetOutput()
# values = list(zip(xvalues, yvalues))
values = np.stack((xvalues, yvalues), axis=1)
zs = [[0.0]] * len(values)
values = np.append(values, zs, axis=1)
pointsPolydata.GetPoints().SetData(utils.numpy2vtk(values, dtype=float))
cloud = Mesh(pointsPolydata)
col = None
if c is not None:
col = colors.getColor(c)
hexs, binmax = [], 0
ki, kj = 1.33, 1.12
r = 0.47 / n * 1.2 * dx
for i in range(n + 3):
for j in range(m + 2):
cyl = vtk.vtkCylinderSource()
cyl.SetResolution(6)
cyl.CappingOn()
cyl.SetRadius(0.5)
cyl.SetHeight(0.1)
cyl.Update()
t = vtk.vtkTransform()
if not i % 2:
p = (i / ki, j / kj, 0)
else:
p = (i / ki, j / kj + 0.45, 0)
q = (p[0] / n * 1.2 * dx + xmin, p[1] / m * dy + ymin, 0)
ids = cloud.closestPoint(q, radius=r, returnCellId=True)
ne = len(ids)
if fill:
t.Translate(p[0], p[1], ne / 2)
t.Scale(1, 1, ne * 10)
else:
t.Translate(p[0], p[1], ne)
t.RotateX(90) # put it along Z
tf = vtk.vtkTransformPolyDataFilter()
tf.SetInputData(cyl.GetOutput())
tf.SetTransform(t)
tf.Update()
if c is None:
col = i
h = Mesh(tf.GetOutput(), c=col, alpha=alpha).flat()
h.lighting('plastic')
h.PickableOff()
hexs.append(h)
if ne > binmax:
binmax = ne
if cmap is not None:
for h in hexs:
z = h.GetBounds()[5]
col = colors.colorMap(z, cmap, 0, binmax)
h.color(col)
asse = Assembly(hexs)
asse.SetScale(1.2 / n * dx, 1 / m * dy, norm / binmax * (dx + dy) / 4)
asse.SetPosition(xmin, ymin, 0)
asse.base = np.array([0, 0, 0])
asse.top = np.array([0, 0, 1])
asse.name = "histogramHexBin"
return asse
def _histogramPolar(
values,
weights=None,
title="",
tsize=0.1,
bins=16,
r1=0.25,
r2=1,
phigap=0.5,
rgap=0.05,
lpos=1,
lsize=0.04,
c='grey',
bc="k",
alpha=1,
cmap=None,
deg=False,
vmin=None,
vmax=None,
labels=(),
showDisc=True,
nrays=8,
showLines=True,
showAngles=True,
showErrors=False,
):
k = 180 / np.pi
if deg:
values = np.array(values) / k
else:
values = np.array(values)
vals = []
for v in values: # normalize range
t = np.arctan2(np.sin(v), np.cos(v))
if t < 0:
t += 2 * np.pi
vals.append(t+0.00001)
histodata, edges = np.histogram(vals, weights=weights,
bins=bins, range=(0, 2*np.pi))
thetas = []
for i in range(bins):
thetas.append((edges[i] + edges[i + 1]) / 2)
if vmin is None:
vmin = np.min(histodata)
if vmax is None:
vmax = np.max(histodata)
errors = np.sqrt(histodata)
r2e = r1 + r2
if showErrors:
r2e += np.max(errors) / vmax * 1.5
back = None
if showDisc:
back = shapes.Disc(r1=r2e, r2=r2e * 1.01, c=bc, res=(1,360))
back.z(-0.01)
slices = []
lines = []
angles = []
errbars = []
for i, t in enumerate(thetas):
r = histodata[i] / vmax * r2
d = shapes.Disc((0, 0, 0), r1, r1+r, res=(1,360))
delta = np.pi/bins - np.pi/2 - phigap/k
d.cutWithPlane(normal=(np.cos(t + delta), np.sin(t + delta), 0))
d.cutWithPlane(normal=(np.cos(t - delta), np.sin(t - delta), 0))
if cmap is not None:
cslice = colors.colorMap(histodata[i], cmap, vmin, vmax)
d.color(cslice)
else:
if c is None:
d.color(i)
elif utils.isSequence(c) and len(c) == bins:
d.color(c[i])
else:
d.color(c)
d.alpha(alpha).lighting('off')
slices.append(d)
ct, st = np.cos(t), np.sin(t)
if showErrors:
showLines = False
err = np.sqrt(histodata[i]) / vmax * r2
errl = shapes.Line(
((r1 + r - err) * ct, (r1 + r - err) * st, 0.01),
((r1 + r + err) * ct, (r1 + r + err) * st, 0.01),
)
errl.alpha(alpha).lw(3).color(bc)
errbars.append(errl)
labs=[]
rays = []
if showDisc:
outerdisc = shapes.Disc(r1=r2e, r2=r2e * 1.01, c=bc, res=(1,360))
outerdisc.z(-0.01)
innerdisc = shapes.Disc(r1=r2e/2, r2=r2e/2 * 1.005, c=bc, res=(1, 360))
innerdisc.z(-0.01)
rays.append(outerdisc)
rays.append(innerdisc)
rgap = 0.05
for t in np.linspace(0, 2 * np.pi, num=nrays, endpoint=False):
ct, st = np.cos(t), np.sin(t)
if showLines:
l = shapes.Line((0, 0, -0.01), (r2e * ct * 1.03, r2e * st * 1.03, -0.01))
rays.append(l)
ct2, st2 = np.cos(t+np.pi/nrays), np.sin(t+np.pi/nrays)
lm = shapes.DashedLine((0, 0, -0.01),
(r2e * ct2, r2e * st2, -0.01),
spacing=0.25)
rays.append(lm)
elif showAngles: # just the ticks
l = shapes.Line(
(r2e * ct * 0.98, r2e * st * 0.98, -0.01),
(r2e * ct * 1.03, r2e * st * 1.03, -0.01),
)
if showAngles:
if 0 <= t < np.pi / 2:
ju = "bottom-left"
elif t == np.pi / 2:
ju = "bottom-center"
elif np.pi / 2 < t <= np.pi:
ju = "bottom-right"
elif np.pi < t < np.pi * 3 / 2:
ju = "top-right"
elif t == np.pi * 3 / 2:
ju = "top-center"
else:
ju = "top-left"
a = shapes.Text3D(int(t * k), pos=(0, 0, 0), s=lsize, depth=0, justify=ju)
a.pos(r2e * ct * (1 + rgap), r2e * st * (1 + rgap), -0.01)
angles.append(a)
ti = None
if title:
ti = shapes.Text3D(title, (0, 0, 0), s=tsize, depth=0, justify="top-center")
ti.pos(0, -r2e * 1.15, 0.01)
for i,t in enumerate(thetas):
if i < len(labels):
lab = shapes.Text3D(labels[i], (0, 0, 0), #font="VTK",
s=lsize, depth=0, justify="center")
lab.pos(r2e *np.cos(t) * (1 + rgap) * lpos / 2,
r2e *np.sin(t) * (1 + rgap) * lpos / 2, 0.01)
labs.append(lab)
mrg = merge(lines, angles, rays, ti, labs)
if mrg:
mrg.color(bc).lighting('off')
rh = Plot(slices + errbars + [mrg])
rh.freqs = histodata
rh.bins = edges
rh.base = np.array([0, 0, 0])
rh.top = np.array([0, 0, 1])
rh.name = "histogramPolar"
return rh
def _histogramSpheric(
thetavalues, phivalues, rmax=1.2, res=8, cmap="rainbow", lw=0.1, scalarbar=True,
):
x, y, z = utils.spher2cart(np.ones_like(thetavalues) * 1.1, thetavalues, phivalues)
ptsvals = np.c_[x, y, z]
sg = shapes.Sphere(res=res, quads=True).shrink(0.999).computeNormals().lw(0.1)
sgfaces = sg.faces()
sgpts = sg.points()
# sgpts = np.vstack((sgpts, [0,0,0]))
# idx = sgpts.shape[0]-1
# newfaces = []
# for fc in sgfaces:
# f1,f2,f3,f4 = fc
# newfaces.append([idx,f1,f2, idx])
# newfaces.append([idx,f2,f3, idx])
# newfaces.append([idx,f3,f4, idx])
# newfaces.append([idx,f4,f1, idx])
newsg = sg # Mesh((sgpts, sgfaces)).computeNormals().phong()
newsgpts = newsg.points()
cntrs = sg.cellCenters()
counts = np.zeros(len(cntrs))
for p in ptsvals:
cell = sg.closestPoint(p, returnCellId=True)
counts[cell] += 1
acounts = np.array(counts)
counts *= (rmax - 1) / np.max(counts)
for cell, cn in enumerate(counts):
if not cn:
continue
fs = sgfaces[cell]
pts = sgpts[fs]
_, t1, p1 = utils.cart2spher(pts[:, 0], pts[:, 1], pts[:, 2])
x, y, z = utils.spher2cart(1 + cn, t1, p1)
newsgpts[fs] = np.c_[x, y, z]
newsg.points(newsgpts)
newsg.cmap(cmap, acounts, on='cells')
if scalarbar:
newsg.addScalarBar()
newsg.name = "histogramSpheric"
return newsg
def donut(
fractions,
title="",
tsize=0.3,
r1=1.7,
r2=1,
phigap=0,
lpos=0.8,
lsize=0.15,
c=None,
bc="k",
alpha=1,
labels=(),
showDisc=False,
):
"""
Donut plot or pie chart.
:param str title: plot title
:param float tsize: title size
:param float r1: inner radius
:param float r2: outer radius, starting from r1
:param float phigap: gap angle btw 2 radial bars, in degrees
:param float lpos: label gap factor along radius
:param float lsize: label size
:param c: color of the plot slices
:param bc: color of the disc frame
:param alpha: alpha of the disc frame
:param list labels: list of labels
:param bool showDisc: show the outer ring axis
|donut| |donut.py|_
"""
fractions = np.array(fractions)
angles = np.add.accumulate(2 * np.pi * fractions)
angles[-1] = 2 * np.pi
if angles[-2] > 2 * np.pi:
print("Error in donut(): fractions must sum to 1.")
raise RuntimeError
cols = []
for i, th in enumerate(np.linspace(0, 2 * np.pi, 360, endpoint=False)):
for ia, a in enumerate(angles):
if th < a:
cols.append(c[ia])
break
labs = ()
if len(labels):
angles = np.concatenate([[0], angles])
labs = [""] * 360
for i in range(len(labels)):
a = (angles[i + 1] + angles[i]) / 2
j = int(a / np.pi * 180)
labs[j] = labels[i]
data = np.linspace(0, 2 * np.pi, 360, endpoint=False) + 0.005
dn = _histogramPolar(
data,
title=title,
bins=360,
r1=r1,
r2=r2,
phigap=phigap,
lpos=lpos,
lsize=lsize,
tsize=tsize,
c=cols,
bc=bc,
alpha=alpha,
vmin=0,
vmax=1,
labels=labs,
showDisc=showDisc,
showLines=0,
showAngles=0,
showErrors=0,
)
dn.name = "donut"
return dn
def quiver(
points,
vectors,
c="k",
alpha=1,
shaftLength=0.8,
shaftWidth=0.05,
headLength=0.25,
headWidth=0.2,
fill=True,
):
"""
Quiver Plot, display `vectors` at `points` locations.
Color can be specified as a colormap which maps the size of the arrows.
:param float shaftLength: fractional shaft length
:param float shaftWidth: fractional shaft width
:param float headLength: fractional head length
:param float headWidth: fractional head width
:param bool fill: if False only generate the outline
|quiver| |quiver.py|_
"""
if isinstance(points, vedo.Points):
points = points.points()
else:
points = np.array(points)
vectors = np.array(vectors) / 2
spts = points - vectors
epts = points + vectors
arrs2d = shapes.Arrows2D(
spts,
epts,
c=c,
shaftLength=shaftLength,
shaftWidth=shaftWidth,
headLength=headLength,
headWidth=headWidth,
fill=fill,
alpha=alpha,
)
arrs2d.pickable(False)
arrs2d.name = "quiver"
return arrs2d
def violin(
values,
bins=10,
vlim=None,
x=0,
width=3,
spline=True,
fill=True,
c="violet",
alpha=1,
outline=True,
centerline=True,
lc="darkorchid",
lw=3,
):
"""
Violin style histogram.
:param int bins: number of bins
:param list vlim: input value limits. Crop values outside range.
:param list x: x-position of the violin axis
:param float width: width factor of the normalized distribution
:param bool spline: spline points
:param bool fill: fill violin with solid color
:param bool outline: add the distribution outline
:param bool centerline: add the vertical centerline at x
:param lc: line color
|histo_violin| |histo_violin.py|_
"""
fs, edges = np.histogram(values, bins=bins, range=vlim)
mine, maxe = np.min(edges), np.max(edges)
fs = fs.astype(float) / len(values) * width
rs = []
if spline:
lnl, lnr = [(0, edges[0], 0)], [(0, edges[0], 0)]
for i in range(bins):
xc = (edges[i] + edges[i + 1]) / 2
yc = fs[i]
lnl.append([-yc, xc, 0])
lnr.append([yc, xc, 0])
lnl.append((0, edges[-1], 0))
lnr.append((0, edges[-1], 0))
spl = shapes.KSpline(lnl).x(x)
spr = shapes.KSpline(lnr).x(x)
spl.color(lc).alpha(alpha).lw(lw)
spr.color(lc).alpha(alpha).lw(lw)
if outline:
rs.append(spl)
rs.append(spr)
if fill:
rb = shapes.Ribbon(spl, spr, c=c, alpha=alpha).lighting('off')
rs.append(rb)
else:
lns1 = [[0, mine, 0]]
for i in range(bins):
lns1.append([fs[i], edges[i], 0])
lns1.append([fs[i], edges[i + 1], 0])
lns1.append([0, maxe, 0])
lns2 = [[0, mine, 0]]
for i in range(bins):
lns2.append([-fs[i], edges[i], 0])
lns2.append([-fs[i], edges[i + 1], 0])
lns2.append([0, maxe, 0])
if outline:
rs.append(shapes.Line(lns1, c=lc, alpha=alpha, lw=lw).x(x))
rs.append(shapes.Line(lns2, c=lc, alpha=alpha, lw=lw).x(x))
if fill:
for i in range(bins):
p0 = (-fs[i], edges[i], 0)
p1 = (fs[i], edges[i + 1], 0)
r = shapes.Rectangle(p0, p1).x(p0[0] + x)
r.color(c).alpha(alpha).lighting('off')
rs.append(r)
if centerline:
cl = shapes.Line([0, mine, 0.01], [0, maxe, 0.01], c=lc, alpha=alpha, lw=2).x(x)
rs.append(cl)
asse = Assembly(rs)
asse.base = np.array([0, 0, 0])
asse.top = np.array([0, 1, 0])
asse.name = "violin"
return asse
def whisker(data,
s=0.25,
c='k',
lw=2,
bc='blue',
alpha=0.25,
r=5,
jitter=True,
horizontal=False,
):
"""
Generate a "whisker" bar from a 1-dimensional dataset.
:param float s: size of the box
:param c: color of the lines
:param float lw: line width
:param bc: color of the box
:param float alpha: transparency of the box
:param float r: point radius in pixels (use value 0 to disable)
:param bool jitter: add some randomness to points to avoid overlap
:param bool horizontal: set horizontal layout
|whiskers| |whiskers.py|_
"""
xvals = np.zeros_like(np.array(data))
if jitter:
xjit = np.random.randn(len(xvals))*s/9
xjit = np.clip(xjit, -s/2.1, s/2.1)
xvals += xjit
dmean = np.mean(data)
dq05 = np.quantile(data, 0.05)
dq25 = np.quantile(data, 0.25)
dq75 = np.quantile(data, 0.75)
dq95 = np.quantile(data, 0.95)
pts = None
if r: pts = shapes.Points([xvals, data], c=c, r=r)
rec = shapes.Rectangle([-s/2, dq25],[s/2, dq75], c=bc, alpha=alpha)
rec.GetProperty().LightingOff()
rl = shapes.Line([[-s/2, dq25],[s/2, dq25],[s/2, dq75],[-s/2, dq75]], closed=True)
l1 = shapes.Line([0,dq05,0], [0,dq25,0], c=c, lw=lw)
l2 = shapes.Line([0,dq75,0], [0,dq95,0], c=c, lw=lw)
lm = shapes.Line([-s/2, dmean], [s/2, dmean])
lns = merge(l1, l2, lm, rl)
asse = Assembly([lns, rec, pts])
if horizontal:
asse.rotateZ(-90)
asse.name = "Whisker"
asse.info['mean'] = dmean
asse.info['quantile_05'] = dq05
asse.info['quantile_25'] = dq25
asse.info['quantile_75'] = dq75
asse.info['quantile_95'] = dq95
return asse
def streamplot(X, Y, U, V, direction="both",
maxPropagation=None, mode=1, lw=0.001, c=None, probes=()):
"""
Generate a streamline plot of a vectorial field (U,V) defined at positions (X,Y).
Returns a ``Mesh`` object.
:param str direction: either "forward", "backward" or "both"
:param float maxPropagation: maximum physical length of the streamline
:param float lw: line width in absolute units
:param int mode: vary line width
- 0 - do not vary line width
- 1 - vary line width by first vector component
- 2 - vary line width vector magnitude
- 3 - vary line width by absolute value of first vector component
|plot_stream| |plot_stream.py|_
"""
n = len(X)
m = len(Y[0])
if n != m:
print("Limitation in streamplot(): only square grids are allowed.", n, m)
raise RuntimeError()
xmin, xmax = X[0][0], X[-1][-1]
ymin, ymax = Y[0][0], Y[-1][-1]
field = np.sqrt(U * U + V * V)
vol = vedo.Volume(field, dims=(n, n, 1))
uf = np.ravel(U, order="F")
vf = np.ravel(V, order="F")
vects = np.c_[uf, vf, np.zeros_like(uf)]
vol.addPointArray(vects, "vects")
if len(probes) == 0:
probe = shapes.Grid(pos=((n-1)/2,(n-1)/2,0), sx=n-1, sy=n-1, resx=n-1, resy=n-1)
else:
if isinstance(probes, vedo.Points):
probes = probes.points()
else:
probes = np.array(probes)
if len(probes[0]) == 2:
probes = np.c_[probes[:, 0], probes[:, 1], np.zeros(len(probes))]
sv = [(n - 1) / (xmax - xmin), (n - 1) / (ymax - ymin), 1]
probes = probes - [xmin, ymin, 0]
probes = np.multiply(probes, sv)
probe = vedo.Points(probes)
stream = vedo.base.streamLines( vol.imagedata(),
probe,
tubes={"radius": lw, "varyRadius": mode,},
lw=lw,
maxPropagation=maxPropagation,
direction=direction,
)
if c is not None:
stream.color(c)
else:
stream.addScalarBar()
stream.lighting('off')
stream.scale([1 / (n - 1) * (xmax - xmin), 1 / (n - 1) * (ymax - ymin), 1])
stream.shift(xmin, ymin)
return stream
def matrix(M,
title='Matrix',
xtitle='',
ytitle='',
xlabels=[],
ylabels=[],
xrotation=0,
cmap='Reds',
vmin=None,
vmax=None,
precision=2,
font='Theemim',
scale=0,
scalarbar=True,
lc='white',
lw=0,
c='black',
alpha=1,
):
"""
Generate a matrix, or a 2D color-coded plot with bin labels.
Returns an ``Assembly`` object.
Parameters
----------
M : list or numpy array
the input array to visualize.
title : str, optional
title of the plot. The default is 'Matrix'.
xtitle : str, optional
title of the horizontal colmuns. The default is ''.
ytitle : str, optional
title of the vertical rows. The default is ''.
xlabels : list, optional
individual string labels for each column. Must be of length m. The default is [].
ylabels : list, optional
individual string labels for each row. Must be of length n. The default is [].
xrotation : float, optional
rotation of the horizontal labels. The default is 0.
cmap : str, optional
color map name. The default is 'Reds'.
vmin : float, optional
minimum value of the colormap range. The default is None.
vmax : float, optional
maximum value of the colormap range. The default is None.
precision : int, optional
number of digits for the matrix entries or bins. The default is 2.
font : str, optional
font name. The default is ''.
scale : float, optional
size of the numeric entries or bin values. The default is 0.
scalarbar : bool, optional
add a scalar bar to the right of the plot. The default is True.
lc : str, optional
color of the line separating the bins. The default is 'white'.
lw : float, optional
Width of the line separating the bins. The default is 0.
c : str, optional
text color. The default is 'k'.
alpha : float, optional
plot transparency. The default is 1.
"""
M = np.asarray(M)
n,m = M.shape
gr = shapes.Grid(resx=m, resy=n, sx=m/(m+n)*2, sy=n/(m+n)*2, c=c, alpha=alpha)
gr.wireframe(False).lc(lc).lw(lw)
matr = np.flip( np.flip(M), axis=1).ravel(order='C')
gr.cmap(cmap, matr, on='cells', vmin=vmin, vmax=vmax)
sbar=None
if scalarbar:
gr.addScalarBar3D(titleFont=font, labelFont=font)
sbar = gr.scalarbar
labs=None
if scale !=0:
labs = gr.labels(cells=True, scale=scale/max(m,n),
precision=precision, font=font, justify='center', c=c)
labs.z(0.001)
t = None
if title:
if title == 'Matrix':
title += ' '+str(n)+'x'+str(m)
t = shapes.Text3D(title, font=font, s=0.04,
justify='bottom-center', c=c)
t.shift(0, n/(m+n)*1.05)
xlabs=None
if len(xlabels)==m:
xlabs=[]
jus = 'top-center'
if xrotation>44:
jus = 'right-center'
for i in range(m):
xl = shapes.Text3D(xlabels[i], font=font, s=0.02,
justify=jus, c=c).rotateZ(xrotation)
xl.shift((2*i-m+1)/(m+n), -n/(m+n)*1.05)
xlabs.append(xl)
ylabs=None
if len(ylabels)==n:
ylabs=[]
for i in range(n):
yl = shapes.Text3D(ylabels[i], font=font, s=.02,
justify='right-center', c=c)
yl.shift(-m/(m+n)*1.05, (2*i-n+1)/(m+n))
ylabs.append(yl)
xt=None
if xtitle:
xt = shapes.Text3D(xtitle, font=font, s=0.035,
justify='top-center', c=c)
xt.shift(0, -n/(m+n)*1.05)
if xlabs is not None:
y0,y1 = xlabs[0].ybounds()
xt.shift(0, -(y1-y0)-0.55/(m+n))
yt=None
if ytitle:
yt = shapes.Text3D(ytitle, font=font, s=0.035,
justify='bottom-center', c=c).rotateZ(90)
yt.shift(-m/(m+n)*1.05, 0)
if ylabs is not None:
x0,x1 = ylabs[0].xbounds()
yt.shift(-(x1-x0)-0.55/(m+n),0)
asse = Assembly(gr, sbar, labs, t, xt, yt, xlabs, ylabs)
asse.name = "Matrix"
return asse
def cornerPlot(points, pos=1, s=0.2, title="", c="b", bg="k", lines=True, dots=True):
"""
Return a ``vtkXYPlotActor`` that is a plot of `x` versus `y`,
where `points` is a list of `(x,y)` points.
:param int pos: assign position:
- 1, topleft,
- 2, topright,
- 3, bottomleft,
- 4, bottomright.
"""
if len(points) == 2: # passing [allx, ally]
points = np.stack((points[0], points[1]), axis=1)
c = colors.getColor(c) # allow different codings
array_x = vtk.vtkFloatArray()
array_y = vtk.vtkFloatArray()
array_x.SetNumberOfTuples(len(points))
array_y.SetNumberOfTuples(len(points))
for i, p in enumerate(points):
array_x.InsertValue(i, p[0])
array_y.InsertValue(i, p[1])
field = vtk.vtkFieldData()
field.AddArray(array_x)
field.AddArray(array_y)
data = vtk.vtkDataObject()
data.SetFieldData(field)
plot = vtk.vtkXYPlotActor()
plot.AddDataObjectInput(data)
plot.SetDataObjectXComponent(0, 0)
plot.SetDataObjectYComponent(0, 1)
plot.SetXValuesToValue()
plot.SetAdjustXLabels(0)
plot.SetAdjustYLabels(0)
plot.SetNumberOfXLabels(3)
plot.GetProperty().SetPointSize(5)
plot.GetProperty().SetLineWidth(2)
plot.GetProperty().SetColor(colors.getColor(bg))
plot.SetPlotColor(0, c[0], c[1], c[2])
plot.SetXTitle(title)
plot.SetYTitle("")
plot.ExchangeAxesOff()
plot.SetPlotPoints(dots)
if not lines:
plot.PlotLinesOff()
if isinstance(pos, str):
spos = 2
if "top" in pos:
if "left" in pos: spos=1
elif "right" in pos: spos=2
elif "bottom" in pos:
if "left" in pos: spos=3
elif "right" in pos: spos=4
pos = spos
if pos == 1:
plot.GetPositionCoordinate().SetValue(0.0, 0.8, 0)
elif pos == 2:
plot.GetPositionCoordinate().SetValue(0.76, 0.8, 0)
elif pos == 3:
plot.GetPositionCoordinate().SetValue(0.0, 0.0, 0)
elif pos == 4:
plot.GetPositionCoordinate().SetValue(0.76, 0.0, 0)
else:
plot.GetPositionCoordinate().SetValue(pos[0], pos[1], 0)
plot.GetPosition2Coordinate().SetValue(s, s, 0)
return plot
def cornerHistogram(
values,
bins=20,
vrange=None,
minbin=0,
logscale=False,
title="",
c="g",
bg="k",
alpha=1,
pos="bottom-left",
s=0.175,
lines=True,
dots=False,
nmax=None,
):
"""
Build a histogram from a list of values in n bins.
The resulting object is a 2D actor.
Use *vrange* to restrict the range of the histogram.
:param int nmax: limit the sampling to this max nr of entries
Use `pos` to assign its position:
- 1, topleft,
- 2, topright,
- 3, bottomleft,
- 4, bottomright,
- (x, y), as fraction of the rendering window
"""
if hasattr(values, '_data'):
values = utils.vtk2numpy(values._data.GetPointData().GetScalars())
n = values.shape[0]
if nmax and nmax < n:
# subsample:
idxs = np.linspace(0, n, num=int(nmax), endpoint=False).astype(int)
values = values[idxs]
fs, edges = np.histogram(values, bins=bins, range=vrange)
if minbin:
fs = fs[minbin:-1]
if logscale:
fs = np.log10(fs + 1)
pts = []
for i in range(len(fs)):
pts.append([(edges[i] + edges[i + 1]) / 2, fs[i]])
plot = cornerPlot(pts, pos, s, title, c, bg, lines, dots)
plot.SetNumberOfYLabels(2)
plot.SetNumberOfXLabels(3)
tprop = vtk.vtkTextProperty()
tprop.SetColor(colors.getColor(bg))
tprop.SetFontFamily(vtk.VTK_FONT_FILE)
tprop.SetFontFile(utils.getFontPath(settings.defaultFont))
tprop.SetOpacity(alpha)
plot.SetAxisTitleTextProperty(tprop)
plot.GetProperty().SetOpacity(alpha)
plot.GetXAxisActor2D().SetLabelTextProperty(tprop)
plot.GetXAxisActor2D().SetTitleTextProperty(tprop)
plot.GetXAxisActor2D().SetFontFactor(0.55)
plot.GetYAxisActor2D().SetLabelFactor(0.0)
plot.GetYAxisActor2D().LabelVisibilityOff()
return plot
class DirectedGraph(Assembly):
"""A graph consists of a collection of nodes (without postional information)
and a collection of edges connecting pairs of nodes.
The task is to determine the node positions only based on their connections.
This class is derived from class ``Assembly``, and it assembles 4 Mesh objects
representing the graph, the node labels, edge labels and edge arrows.
:param c: color of the Graph
:param int n: number of the initial set of nodes
:param int,str layout: layout in ['2d', 'fast2d', 'clustering2d', 'circular',
'circular3d', 'cone', 'force', 'tree']
Each of these layouts has diferent available options.
Options for layouts '2d', 'fast2d' and 'clustering2d':
:param int seed: seed of the random number generator used to jitter point positions
:param float restDistance: manually set the resting distance
:param int maxNumberOfIterations: the maximum number of iterations to be used
:param float zrange: expand 2d graph along z axis.
Options for layouts 'circular', and 'circular3d':
:param float radius: set the radius of the circles.
:param float height: set the vertical (local z) distance between the circles
:param float zrange: expand 2d graph along z axis.
Options for layout 'cone':
:param float compactness: ratio between the average width of a cone in the tree,
and the height of the cone. The default setting is 0.75.
:param bool compression: put children closer together, possibly allowing sub-trees to overlap.
This is useful if the tree is actually the spanning tree of a graph.
:param float spacing: space between layers of the tree
Options for layout 'force':
:param int seed: seed the random number generator used to jitter point positions
:param list bounds: set the region in space in which to place the final graph
:param int maxNumberOfIterations: the maximum number of iterations to be used
:param bool threeDimensional: allow optimization in the 3rd dimension too
:param bool randomInitialPoints: use random positions within the graph bounds as initial points
Example:
|lineage_graph| |lineage_graph.py|_
|graph_network| |graph_network.py|_
"""
def __init__(self, **kargs):
vedo.base.BaseActor.__init__(self)
self.nodes = []
self.edges = []
self._nodeLabels = [] # holds strings
self._edgeLabels = []
self.edgeOrientations = []
self.edgeGlyphPosition = 0.6
self.zrange = 0.0
self.rotX = 0
self.rotY = 0
self.rotZ = 0
self.arrowScale = 0.15
self.nodeLabelScale = None
self.nodeLabelJustify = "bottom-left"
self.edgeLabelScale = None
self.mdg = vtk.vtkMutableDirectedGraph()
n = kargs.pop('n', 0)
for i in range(n): self.addNode()
self._c = kargs.pop('c', (0.3,0.3,0.3))
self.gl = vtk.vtkGraphLayout()
self.font = kargs.pop('font', '')
s = kargs.pop('layout', '2d')
if isinstance(s, int):
ss = ['2d', 'fast2d', 'clustering2d', 'circular', 'circular3d',
'cone', 'force', 'tree']
s = ss[s]
self.layout = s
if '2d' in s:
if 'clustering' in s:
self.strategy = vtk.vtkClustering2DLayoutStrategy()
elif 'fast' in s:
self.strategy = vtk.vtkFast2DLayoutStrategy()
else:
self.strategy = vtk.vtkSimple2DLayoutStrategy()
self.rotX = 180
opt = kargs.pop('restDistance', None)
if opt is not None: self.strategy.SetRestDistance(opt)
opt = kargs.pop('seed', None)
if opt is not None: self.strategy.SetRandomSeed(opt)
opt = kargs.pop('maxNumberOfIterations', None)
if opt is not None: self.strategy.SetMaxNumberOfIterations(opt)
self.zrange = kargs.pop('zrange', 0)
elif 'circ' in s:
if '3d' in s:
self.strategy = vtk.vtkSimple3DCirclesStrategy()
self.strategy.SetDirection(0,0,-1)
self.strategy.SetAutoHeight(True)
self.strategy.SetMethod(1)
self.rotX = -90
opt = kargs.pop('radius', None) # float
if opt is not None:
self.strategy.SetMethod(0)
self.strategy.SetRadius(opt) # float
opt = kargs.pop('height', None)
if opt is not None:
self.strategy.SetAutoHeight(False)
self.strategy.SetHeight(opt) # float
else:
self.strategy = vtk.vtkCircularLayoutStrategy()
self.zrange = kargs.pop('zrange', 0)
elif 'cone' in s:
self.strategy = vtk.vtkConeLayoutStrategy()
self.rotX = 180
opt = kargs.pop('compactness', None)
if opt is not None: self.strategy.SetCompactness(opt)
opt = kargs.pop('compression', None)
if opt is not None: self.strategy.SetCompression(opt)
opt = kargs.pop('spacing', None)
if opt is not None: self.strategy.SetSpacing(opt)
elif 'force' in s:
self.strategy = vtk.vtkForceDirectedLayoutStrategy()
opt = kargs.pop('seed', None)
if opt is not None: self.strategy.SetRandomSeed(opt)
opt = kargs.pop('bounds', None)
if opt is not None:
self.strategy.SetAutomaticBoundsComputation(False)
self.strategy.SetGraphBounds(opt) # list
opt = kargs.pop('maxNumberOfIterations', None)
if opt is not None: self.strategy.SetMaxNumberOfIterations(opt) # int
opt = kargs.pop('threeDimensional', True)
if opt is not None: self.strategy.SetThreeDimensionalLayout(opt) # bool
opt = kargs.pop('randomInitialPoints', None)
if opt is not None: self.strategy.SetRandomInitialPoints(opt) # bool
elif 'tree' in s:
self.strategy = vtk.vtkSpanTreeLayoutStrategy()
self.rotX = 180
else:
colors.printc("Cannot understand layout:", s, c='r')
colors.printc("Available layouts:", c='r')
colors.printc("[2d,fast2d,clustering2d,circular,circular3d,cone,force,tree]", c='r')
raise RuntimeError()
self.gl.SetLayoutStrategy(self.strategy)
if len(kargs):
colors.printc("Cannot understand options:", kargs, c='r')
return
def addNode(self, label="id"):
"""Add a new node to the Graph."""
v = self.mdg.AddVertex() # vtk calls it vertex..
self.nodes.append(v)
if label == 'id': label=int(v)
self._nodeLabels.append(str(label))
return v
def addEdge(self, v1, v2, label=""):
"""Add a new edge between to nodes.
An extra node is created automatically if needed."""
nv = len(self.nodes)
if v1>=nv:
for i in range(nv, v1+1):
self.addNode()
nv = len(self.nodes)
if v2>=nv:
for i in range(nv, v2+1):
self.addNode()
e = self.mdg.AddEdge(v1,v2)
self.edges.append(e)
self._edgeLabels.append(str(label))
return e
def addChild(self, v, nodeLabel="id", edgeLabel=""):
"""Add a new edge to a new node as its child.
The extra node is created automatically if needed."""
nv = len(self.nodes)
if v>=nv:
for i in range(nv, v+1):
self.addNode()
child = self.mdg.AddChild(v)
self.edges.append((v,child))
self.nodes.append(child)
if nodeLabel == 'id': nodeLabel=int(child)
self._nodeLabels.append(str(nodeLabel))
self._edgeLabels.append(str(edgeLabel))
return child
def build(self):
"""
Build the DirectedGraph(Assembly).
Accessory objects are also created for labels and arrows.
"""
self.gl.SetZRange(self.zrange)
self.gl.SetInputData(self.mdg)
self.gl.Update()
graphToPolyData = vtk.vtkGraphToPolyData()
graphToPolyData.EdgeGlyphOutputOn()
graphToPolyData.SetEdgeGlyphPosition(self.edgeGlyphPosition)
graphToPolyData.SetInputData(self.gl.GetOutput())
graphToPolyData.Update()
dgraph = Mesh(graphToPolyData.GetOutput(0))
# dgraph.clean() # WRONG!!! dont uncomment
dgraph.flat().color(self._c).lw(2)
dgraph.name = "DirectedGraph"
diagsz = self.diagonalSize()/1.42
if not diagsz:
return None
dgraph.SetScale(1/diagsz)
if self.rotX:
dgraph.rotateX(self.rotX)
if self.rotY:
dgraph.rotateY(self.rotY)
if self.rotZ:
dgraph.rotateZ(self.rotZ)
vecs = graphToPolyData.GetOutput(1).GetPointData().GetVectors()
self.edgeOrientations = utils.vtk2numpy(vecs)
# Use Glyph3D to repeat the glyph on all edges.
arrows=None
if self.arrowScale:
arrowSource = vtk.vtkGlyphSource2D()
arrowSource.SetGlyphTypeToEdgeArrow()
arrowSource.SetScale(self.arrowScale)
arrowSource.Update()
arrowGlyph = vtk.vtkGlyph3D()
arrowGlyph.SetInputData(0, graphToPolyData.GetOutput(1))
arrowGlyph.SetInputData(1, arrowSource.GetOutput())
arrowGlyph.Update()
arrows = Mesh(arrowGlyph.GetOutput())
arrows.SetScale(1/diagsz)
arrows.lighting('off').color(self._c)
if self.rotX:
arrows.rotateX(self.rotX)
if self.rotY:
arrows.rotateY(self.rotY)
if self.rotZ:
arrows.rotateZ(self.rotZ)
arrows.name = "DirectedGraphArrows"
nodeLabels = dgraph.labels(self._nodeLabels,
scale=self.nodeLabelScale,
precision=0,
font=self.font,
justify=self.nodeLabelJustify,
)
nodeLabels.color(self._c).pickable(True)
nodeLabels.name = "DirectedGraphNodeLabels"
edgeLabels = dgraph.labels(self._edgeLabels,
cells=True,
scale=self.edgeLabelScale,
precision=0,
font=self.font,
)
edgeLabels.color(self._c).pickable(True)
edgeLabels.name = "DirectedGraphEdgeLabels"
Assembly.__init__(self, [dgraph,
nodeLabels,
edgeLabels,
arrows])
self.name = "DirectedGraphAssembly"
return self
|
[
"marco.musy@gmail.com"
] |
marco.musy@gmail.com
|
75af37c7035fa42e49638ffc2f8b9d925f49ea7e
|
ee00ebe5e71c36b05fbff993b19e9723b963313f
|
/35_inserted_position.py
|
f5142c9a3ab2c24b65c81f2721f1dd7ad04a16e3
|
[] |
no_license
|
26XINXIN/leetcode
|
f365560d93604a28abf399707b333f3c11f924ec
|
78ed11f34fd03e9a188c9c6cb352e883016d05d9
|
refs/heads/master
| 2021-06-28T16:31:45.103879
| 2020-09-19T20:33:55
| 2020-09-19T20:33:55
| 144,975,903
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 661
|
py
|
class Solution:
def searchInsert(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
ans = self.binary_search(nums, 0, len(nums)-1, target)
if ans == len(nums)-1 and target > nums[-1]:
ans += 1
return ans
def binary_search(self, nums, l, r, target):
if l == r:
return l
mid = (l + r) // 2
if nums[mid] == target:
return mid
elif nums[mid] < target:
return self.binary_search(nums, mid + 1, r, target)
else:
return self.binary_search(nums, l, mid, target)
|
[
"yangxin.nlp@bytedance.com"
] |
yangxin.nlp@bytedance.com
|
0961a55413c0854c2148a4c91bfb17bbb9891d86
|
3122ac39f1ce0a882b48293a77195476299c2a3b
|
/clients/python-flask/generated/openapi_server/models/pipeline_run_node.py
|
f205f70d61ad88141b09f4ad9bdc2cfd5a55b15f
|
[
"MIT"
] |
permissive
|
miao1007/swaggy-jenkins
|
4e6fe28470eda2428cbc584dcd365a21caa606ef
|
af79438c120dd47702b50d51c42548b4db7fd109
|
refs/heads/master
| 2020-08-30T16:50:27.474383
| 2019-04-10T13:47:17
| 2019-04-10T13:47:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,765
|
py
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from openapi_server.models.base_model_ import Model
from openapi_server.models.pipeline_run_nodeedges import PipelineRunNodeedges # noqa: F401,E501
from openapi_server import util
class PipelineRunNode(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, _class: str=None, display_name: str=None, duration_in_millis: int=None, edges: List[PipelineRunNodeedges]=None, id: str=None, result: str=None, start_time: str=None, state: str=None): # noqa: E501
"""PipelineRunNode - a model defined in OpenAPI
:param _class: The _class of this PipelineRunNode. # noqa: E501
:type _class: str
:param display_name: The display_name of this PipelineRunNode. # noqa: E501
:type display_name: str
:param duration_in_millis: The duration_in_millis of this PipelineRunNode. # noqa: E501
:type duration_in_millis: int
:param edges: The edges of this PipelineRunNode. # noqa: E501
:type edges: List[PipelineRunNodeedges]
:param id: The id of this PipelineRunNode. # noqa: E501
:type id: str
:param result: The result of this PipelineRunNode. # noqa: E501
:type result: str
:param start_time: The start_time of this PipelineRunNode. # noqa: E501
:type start_time: str
:param state: The state of this PipelineRunNode. # noqa: E501
:type state: str
"""
self.openapi_types = {
'_class': str,
'display_name': str,
'duration_in_millis': int,
'edges': List[PipelineRunNodeedges],
'id': str,
'result': str,
'start_time': str,
'state': str
}
self.attribute_map = {
'_class': '_class',
'display_name': 'displayName',
'duration_in_millis': 'durationInMillis',
'edges': 'edges',
'id': 'id',
'result': 'result',
'start_time': 'startTime',
'state': 'state'
}
self.__class = _class
self._display_name = display_name
self._duration_in_millis = duration_in_millis
self._edges = edges
self._id = id
self._result = result
self._start_time = start_time
self._state = state
@classmethod
def from_dict(cls, dikt) -> 'PipelineRunNode':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The PipelineRunNode of this PipelineRunNode. # noqa: E501
:rtype: PipelineRunNode
"""
return util.deserialize_model(dikt, cls)
@property
def _class(self) -> str:
"""Gets the _class of this PipelineRunNode.
:return: The _class of this PipelineRunNode.
:rtype: str
"""
return self.__class
@_class.setter
def _class(self, _class: str):
"""Sets the _class of this PipelineRunNode.
:param _class: The _class of this PipelineRunNode.
:type _class: str
"""
self.__class = _class
@property
def display_name(self) -> str:
"""Gets the display_name of this PipelineRunNode.
:return: The display_name of this PipelineRunNode.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name: str):
"""Sets the display_name of this PipelineRunNode.
:param display_name: The display_name of this PipelineRunNode.
:type display_name: str
"""
self._display_name = display_name
@property
def duration_in_millis(self) -> int:
"""Gets the duration_in_millis of this PipelineRunNode.
:return: The duration_in_millis of this PipelineRunNode.
:rtype: int
"""
return self._duration_in_millis
@duration_in_millis.setter
def duration_in_millis(self, duration_in_millis: int):
"""Sets the duration_in_millis of this PipelineRunNode.
:param duration_in_millis: The duration_in_millis of this PipelineRunNode.
:type duration_in_millis: int
"""
self._duration_in_millis = duration_in_millis
@property
def edges(self) -> List[PipelineRunNodeedges]:
"""Gets the edges of this PipelineRunNode.
:return: The edges of this PipelineRunNode.
:rtype: List[PipelineRunNodeedges]
"""
return self._edges
@edges.setter
def edges(self, edges: List[PipelineRunNodeedges]):
"""Sets the edges of this PipelineRunNode.
:param edges: The edges of this PipelineRunNode.
:type edges: List[PipelineRunNodeedges]
"""
self._edges = edges
@property
def id(self) -> str:
"""Gets the id of this PipelineRunNode.
:return: The id of this PipelineRunNode.
:rtype: str
"""
return self._id
@id.setter
def id(self, id: str):
"""Sets the id of this PipelineRunNode.
:param id: The id of this PipelineRunNode.
:type id: str
"""
self._id = id
@property
def result(self) -> str:
"""Gets the result of this PipelineRunNode.
:return: The result of this PipelineRunNode.
:rtype: str
"""
return self._result
@result.setter
def result(self, result: str):
"""Sets the result of this PipelineRunNode.
:param result: The result of this PipelineRunNode.
:type result: str
"""
self._result = result
@property
def start_time(self) -> str:
"""Gets the start_time of this PipelineRunNode.
:return: The start_time of this PipelineRunNode.
:rtype: str
"""
return self._start_time
@start_time.setter
def start_time(self, start_time: str):
"""Sets the start_time of this PipelineRunNode.
:param start_time: The start_time of this PipelineRunNode.
:type start_time: str
"""
self._start_time = start_time
@property
def state(self) -> str:
"""Gets the state of this PipelineRunNode.
:return: The state of this PipelineRunNode.
:rtype: str
"""
return self._state
@state.setter
def state(self, state: str):
"""Sets the state of this PipelineRunNode.
:param state: The state of this PipelineRunNode.
:type state: str
"""
self._state = state
|
[
"cliffano@gmail.com"
] |
cliffano@gmail.com
|
4e4336b975c5ee46eb7645c1b114c235d4303c50
|
989f011a784015e1a33c41362ab4ec06e92b3339
|
/examples/07_functions/func_args_unpacking.py
|
1e837181f73877fce4b28831c45f757f1b3da290
|
[] |
no_license
|
yevgeniy-voloshin/pyneng-online-jun-jul-2017
|
b0be9df7d379e24b654172c1bc3f5cc0bdbbcd2f
|
050e43d7f582528189005c1b7c34970352e968f1
|
refs/heads/master
| 2021-01-21T16:22:27.347769
| 2017-05-19T17:35:16
| 2017-05-19T17:35:16
| 91,885,650
| 1
| 0
| null | 2017-05-20T11:46:28
| 2017-05-20T11:46:28
| null |
UTF-8
|
Python
| false
| false
| 3,132
|
py
|
# Unpacking positional arguments
def config_interface(intf_name, ip_address, cidr_mask):
interface = 'interface %s'
no_shut = 'no shutdown'
ip_addr = 'ip address %s %s'
result = []
result.append(interface % intf_name)
result.append(no_shut)
mask_bits = int(cidr_mask.split('/')[-1])
bin_mask = '1'*mask_bits + '0'*(32-mask_bits)
dec_mask = '.'.join([ str(int(bin_mask[i:i+8], 2)) for i in [0,8,16,24] ])
result.append(ip_addr % (ip_address, dec_mask))
return result
#print config_interface('Fa0/1', '10.0.1.1', '/25')
interfaces_info = [['Fa0/1', '10.0.1.1', '/24'],
['Fa0/2', '10.0.2.1', '/24'],
['Fa0/3', '10.0.3.1', '/24'],
['Fa0/4', '10.0.4.1', '/24'],
['Lo0', '10.0.0.1', '/32']]
for i in interfaces_info:
print config_interface(*i)
"""
Output:
['interface Fa0/1', 'no shutdown', 'ip address 10.0.1.1 255.255.255.0']
['interface Fa0/2', 'no shutdown', 'ip address 10.0.2.1 255.255.255.0']
['interface Fa0/3', 'no shutdown', 'ip address 10.0.3.1 255.255.255.0']
['interface Fa0/4', 'no shutdown', 'ip address 10.0.4.1 255.255.255.0']
['interface Lo0', 'no shutdown', 'ip address 10.0.0.1 255.255.255.255']
"""
# Unpacking keyword arguments
def config_to_list(cfg_file, delete_excl=True,
delete_empty=True, strip_end=True):
result = []
with open( cfg_file ) as f:
for line in f:
if strip_end:
line = line.rstrip()
if delete_empty and not line:
pass
elif delete_excl and line.startswith('!'):
pass
else:
result.append(line)
return result
cfg = [dict(cfg_file='r1.txt', delete_excl=True, delete_empty=True, strip_end=True),
dict(cfg_file='r2.txt', delete_excl=False, delete_empty=True, strip_end=True),
dict(cfg_file='r3.txt', delete_excl=True, delete_empty=False, strip_end=True),
dict(cfg_file='r4.txt', delete_excl=True, delete_empty=True, strip_end=False)]
for d in cfg:
print config_to_list(**d)
"""
Output:
['service timestamps debug datetime msec localtime show-timezone year', 'service timestamps log datetime msec localtime show-timezone year', 'service password-encryption', 'service sequence-numbers', 'no ip domain lookup', 'ip ssh version 2']
['!', 'service timestamps debug datetime msec localtime show-timezone year', 'service timestamps log datetime msec localtime show-timezone year', 'service password-encryption', 'service sequence-numbers', '!', 'no ip domain lookup', '!', 'ip ssh version 2', '!']
['service timestamps debug datetime msec localtime show-timezone year', 'service timestamps log datetime msec localtime show-timezone year', 'service password-encryption', 'service sequence-numbers', '', '', '', 'ip ssh version 2', '']
['service timestamps debug datetime msec localtime show-timezone year\n', 'service timestamps log datetime msec localtime show-timezone year\n', 'service password-encryption\n', 'service sequence-numbers\n', 'no ip domain lookup\n', 'ip ssh version 2\n']
```
|
[
"pyneng.course@gmail.com"
] |
pyneng.course@gmail.com
|
21ce8528825c4e522f40d39c4685443bcdbcdac4
|
76f4443972ba066e9a3239d96416b8e807800d8f
|
/tensorflow/contrib/seq2seq/python/ops/helper.py
|
46d0563fe08b03a7eb96dfec092949761666d563
|
[
"Apache-2.0"
] |
permissive
|
ravyg/tensorflow
|
34702f5c0003b602431471987fe50b5ffb6d1912
|
2a83490455f0f02ea05fc447551cacea1c1b8cb6
|
refs/heads/master
| 2021-01-17T11:46:47.552165
| 2017-02-23T22:21:57
| 2017-02-23T22:21:57
| 60,307,856
| 3
| 2
| null | 2016-08-28T21:03:23
| 2016-06-03T01:07:06
|
C++
|
UTF-8
|
Python
| false
| false
| 14,187
|
py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A library of helpers for use with SamplingDecoders.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
from tensorflow.contrib.distributions.python.ops import categorical
from tensorflow.contrib.seq2seq.python.ops import decoder
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.util import nest
__all__ = [
"Helper",
"TrainingHelper",
"GreedyEmbeddingHelper",
"CustomHelper",
"ScheduledEmbeddingTrainingHelper",
]
_transpose_batch_time = decoder._transpose_batch_time # pylint: disable=protected-access
@six.add_metaclass(abc.ABCMeta)
class Helper(object):
"""Helper interface. Helper instances are used by SamplingDecoder."""
@abc.abstractproperty
def batch_size(self):
"""Returns a scalar int32 tensor."""
raise NotImplementedError("batch_size has not been implemented")
@abc.abstractmethod
def initialize(self, name=None):
"""Returns `(initial_finished, initial_inputs)`."""
pass
@abc.abstractmethod
def sample(self, time, outputs, state, name=None):
"""Returns `sample_ids`."""
pass
@abc.abstractmethod
def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""Returns `(finished, next_inputs, next_state)`."""
pass
class CustomHelper(Helper):
"""Base abstract class that allows the user to customize sampling."""
def __init__(self, initialize_fn, sample_fn, next_inputs_fn):
"""Initializer.
Args:
initialize_fn: callable that returns `(finished, next_inputs)`
for the first iteration.
sample_fn: callable that takes `(time, outputs, state)`
and emits tensor `sample_ids`.
next_inputs_fn: callable that takes `(time, outputs, state, sample_ids)`
and emits `(finished, next_inputs, next_state)`.
"""
self._initialize_fn = initialize_fn
self._sample_fn = sample_fn
self._next_inputs_fn = next_inputs_fn
self._batch_size = None
@property
def batch_size(self):
if self._batch_size is None:
raise ValueError("batch_size accessed before initialize was called")
return self._batch_size
def initialize(self, name=None):
with ops.name_scope(name, "%sInitialize" % type(self).__name__):
(finished, next_inputs) = self._initialize_fn()
if self._batch_size is None:
self._batch_size = array_ops.size(finished)
return (finished, next_inputs)
def sample(self, time, outputs, state, name=None):
with ops.name_scope(
name, "%sSample" % type(self).__name__, (time, outputs, state)):
return self._sample_fn(time=time, outputs=outputs, state=state)
def next_inputs(self, time, outputs, state, sample_ids, name=None):
with ops.name_scope(
name, "%sNextInputs" % type(self).__name__, (time, outputs, state)):
return self._next_inputs_fn(
time=time, outputs=outputs, state=state, sample_ids=sample_ids)
class TrainingHelper(Helper):
"""A helper for use during training. Only reads inputs.
Returned sample_ids are the argmax of the RNN output logits.
"""
def __init__(self, inputs, sequence_length, time_major=False, name=None):
"""Initializer.
Args:
inputs: A (structure of) input tensors.
sequence_length: An int32 vector tensor.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
name: Name scope for any created operations.
Raises:
ValueError: if `sequence_length` is not a 1D tensor.
"""
with ops.name_scope(name, "TrainingHelper", [inputs, sequence_length]):
inputs = ops.convert_to_tensor(inputs, name="inputs")
if not time_major:
inputs = nest.map_structure(_transpose_batch_time, inputs)
def _unstack_ta(inp):
return tensor_array_ops.TensorArray(
dtype=inp.dtype, size=array_ops.shape(inp)[0],
element_shape=inp.get_shape()[1:]).unstack(inp)
self._input_tas = nest.map_structure(_unstack_ta, inputs)
self._sequence_length = ops.convert_to_tensor(
sequence_length, name="sequence_length")
if self._sequence_length.get_shape().ndims != 1:
raise ValueError(
"Expected sequence_length to be a vector, but received shape: %s" %
self._sequence_length.get_shape())
self._zero_inputs = nest.map_structure(
lambda inp: array_ops.zeros_like(inp[0, :]), inputs)
self._batch_size = array_ops.size(sequence_length)
@property
def batch_size(self):
return self._batch_size
def initialize(self, name=None):
with ops.name_scope(name, "TrainingHelperInitialize"):
finished = math_ops.equal(0, self._sequence_length)
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished, lambda: self._zero_inputs,
lambda: nest.map_structure(lambda inp: inp.read(0), self._input_tas))
return (finished, next_inputs)
def sample(self, time, outputs, name=None, **unused_kwargs):
with ops.name_scope(name, "TrainingHelperSample", [time, outputs]):
sample_ids = math_ops.cast(
math_ops.argmax(outputs, axis=-1), dtypes.int32)
return sample_ids
def next_inputs(self, time, outputs, state, name=None, **unused_kwargs):
"""next_inputs_fn for TrainingHelper."""
with ops.name_scope(name, "TrainingHelperNextInputs",
[time, outputs, state]):
next_time = time + 1
finished = (next_time >= self._sequence_length)
all_finished = math_ops.reduce_all(finished)
def read_from_ta(inp):
return inp.read(next_time)
next_inputs = control_flow_ops.cond(
all_finished, lambda: self._zero_inputs,
lambda: nest.map_structure(read_from_ta, self._input_tas))
return (finished, next_inputs, state)
class ScheduledEmbeddingTrainingHelper(TrainingHelper):
"""A training helper that adds scheduled sampling.
Returns -1s for sample_ids where no sampling took place; valid sample id
values elsewhere.
"""
def __init__(self, inputs, sequence_length, embedding, sampling_probability,
time_major=False, seed=None, scheduling_seed=None, name=None):
"""Initializer.
Args:
inputs: A (structure of) input tensors.
sequence_length: An int32 vector tensor.
embedding: A callable that takes a vector tensor of `ids` (argmax ids),
or the `params` argument for `embedding_lookup`.
sampling_probability: A 0D `float32` tensor: the probability of sampling
categorically from the output ids instead of reading directly from the
inputs.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
seed: The sampling seed.
scheduling_seed: The schedule decision rule sampling seed.
name: Name scope for any created operations.
Raises:
ValueError: if `sampling_probability` is not a scalar or vector.
"""
with ops.name_scope(name, "ScheduledEmbeddingSamplingWrapper",
[embedding, sampling_probability]):
if callable(embedding):
self._embedding_fn = embedding
else:
self._embedding_fn = (
lambda ids: embedding_ops.embedding_lookup(embedding, ids))
self._sampling_probability = ops.convert_to_tensor(
sampling_probability, name="sampling_probability")
if self._sampling_probability.get_shape().ndims not in (0, 1):
raise ValueError(
"sampling_probability must be either a scalar or a vector. "
"saw shape: %s" % (self._sampling_probability.get_shape()))
self._seed = seed
self._scheduling_seed = scheduling_seed
super(ScheduledEmbeddingTrainingHelper, self).__init__(
inputs=inputs,
sequence_length=sequence_length,
time_major=time_major,
name=name)
def initialize(self, name=None):
return super(ScheduledEmbeddingTrainingHelper, self).initialize(name=name)
def sample(self, time, outputs, state, name=None):
with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample",
[time, outputs, state]):
# Return -1s where we did not sample, and sample_ids elsewhere
select_sample_noise = random_ops.random_uniform(
[self.batch_size], seed=self._scheduling_seed)
select_sample = (self._sampling_probability > select_sample_noise)
sample_id_sampler = categorical.Categorical(logits=outputs)
return array_ops.where(
select_sample,
sample_id_sampler.sample(seed=self._seed),
array_ops.tile([-1], [self.batch_size]))
def next_inputs(self, time, outputs, state, sample_ids, name=None):
with ops.name_scope(name, "ScheduledEmbeddingTrainingHelperSample",
[time, outputs, state, sample_ids]):
(finished, base_next_inputs, state) = (
super(ScheduledEmbeddingTrainingHelper, self).next_inputs(
time=time,
outputs=outputs,
state=state,
sample_ids=sample_ids,
name=name))
def maybe_sample():
"""Perform scheduled sampling."""
where_sampling = math_ops.cast(
array_ops.where(sample_ids > -1), dtypes.int32)
where_not_sampling = math_ops.cast(
array_ops.where(sample_ids <= -1), dtypes.int32)
where_sampling_flat = array_ops.reshape(where_sampling, [-1])
where_not_sampling_flat = array_ops.reshape(where_not_sampling, [-1])
sample_ids_sampling = array_ops.gather(sample_ids, where_sampling_flat)
inputs_not_sampling = array_ops.gather(
base_next_inputs, where_not_sampling_flat)
sampled_next_inputs = self._embedding_fn(sample_ids_sampling)
base_shape = array_ops.shape(base_next_inputs)
return (array_ops.scatter_nd(indices=where_sampling,
updates=sampled_next_inputs,
shape=base_shape)
+ array_ops.scatter_nd(indices=where_not_sampling,
updates=inputs_not_sampling,
shape=base_shape))
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished, lambda: base_next_inputs, maybe_sample)
return (finished, next_inputs, state)
class GreedyEmbeddingHelper(Helper):
"""A helper for use during inference.
Uses the argmax of the output (treated as logits) and passes the
result through an embedding layer to get the next input.
"""
def __init__(self, embedding, start_tokens, end_token):
"""Initializer.
Args:
embedding: A callable that takes a vector tensor of `ids` (argmax ids),
or the `params` argument for `embedding_lookup`.
start_tokens: `int32` vector shaped `[batch_size]`, the start tokens.
end_token: `int32` scalar, the token that marks end of decoding.
Raises:
ValueError: if `sequence_length` is not a 1D tensor.
"""
if callable(embedding):
self._embedding_fn = embedding
else:
self._embedding_fn = (
lambda ids: embedding_ops.embedding_lookup(embedding, ids))
self._start_tokens = ops.convert_to_tensor(
start_tokens, dtype=dtypes.int32, name="start_tokens")
self._end_token = ops.convert_to_tensor(
end_token, dtype=dtypes.int32, name="end_token")
if self._start_tokens.get_shape().ndims != 1:
raise ValueError("start_tokens must be a vector")
self._batch_size = array_ops.size(start_tokens)
if self._end_token.get_shape().ndims != 0:
raise ValueError("end_token must be a scalar")
self._start_inputs = self._embedding_fn(self._start_tokens)
@property
def batch_size(self):
return self._batch_size
def initialize(self, name=None):
finished = array_ops.tile([False], [self._batch_size])
return (finished, self._start_inputs)
def sample(self, time, outputs, state, name=None):
"""sample for GreedyEmbeddingHelper."""
del time, state # unused by sample_fn
# Outputs are logits, use argmax to get the most probable id
if not isinstance(outputs, ops.Tensor):
raise TypeError("Expected outputs to be a single Tensor, got: %s" %
outputs)
sample_ids = math_ops.cast(
math_ops.argmax(outputs, axis=-1), dtypes.int32)
return sample_ids
def next_inputs(self, time, outputs, state, sample_ids, name=None):
"""next_inputs_fn for GreedyEmbeddingHelper."""
del time, outputs # unused by next_inputs_fn
finished = math_ops.equal(sample_ids, self._end_token)
all_finished = math_ops.reduce_all(finished)
next_inputs = control_flow_ops.cond(
all_finished,
# If we're finished, the next_inputs value doesn't matter
lambda: self._start_inputs,
lambda: self._embedding_fn(sample_ids))
return (finished, next_inputs, state)
|
[
"gardener@tensorflow.org"
] |
gardener@tensorflow.org
|
408cd967099fe900471e4103edf3f71c8f1f8cd8
|
12e04c219d6911d06a048c913f8d8d6c00dad857
|
/chendian/api/blog/views.py
|
5deab03ba38adced720c0e0764230d3bc891c9a2
|
[
"MIT"
] |
permissive
|
mozillazg/chendian-plus
|
928e98beb77f351e08b25a5ba9671ad648dac4b5
|
893c62b4b855879006d4cb378faeb9d1c6635923
|
refs/heads/master
| 2023-09-04T09:58:58.112022
| 2017-04-04T09:44:28
| 2017-04-04T09:44:28
| 31,481,576
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,155
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from django.shortcuts import get_object_or_404
from rest_framework.views import APIView
from rest_framework.permissions import IsAdminUser
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from api.permissions import IsAdminOrReadonly, IsAdminOrReadAndCreate
from blog.models import Article, Tag, Category
from .serializers import ArticleSerializer, TagSerializer, CategorySerializer
class CategoryViewSet(ModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
search_fields = ('name',)
filter_fields = ('id',)
permission_classes = (IsAdminOrReadonly,)
class TagViewSet(ModelViewSet):
queryset = Tag.objects.all()
serializer_class = TagSerializer
search_fields = ('name',)
filter_fields = ('id',)
class ArticleViewSet(ModelViewSet):
queryset = Article.objects.all()
serializer_class = ArticleSerializer
filter_fields = (
'id', 'author__nick_name',
'categories__slug', 'tags__slug'
)
search_fields = ('title',)
permission_classes = (IsAdminOrReadAndCreate,)
def get_queryset(self):
queryset = super(ArticleViewSet, self).get_queryset()
if not self.request.user.is_staff:
queryset = queryset.filter(status=Article.STATUS_APPROVED)
return queryset.select_related('author'
).prefetch_related('tags',
'categories')
class ArticleApprove(APIView):
permission_classes = (IsAdminUser,)
def put(self, request, pk, *args, **kwargs):
article = get_object_or_404(Article.objects.all(), pk=pk)
article.status = Article.STATUS_APPROVED
article.save()
return Response(status=204)
def delete(self, request, pk, *args, **kwargs):
article = get_object_or_404(Article.objects.all(), pk=pk)
article.status = Article.STATUS_DISAPPROVED
article.save()
return Response(status=204)
|
[
"opensource.mozillazg@gmail.com"
] |
opensource.mozillazg@gmail.com
|
ab2ef61c7be69927178e04d9011d650cc23a1a87
|
2322b5a1e6443715d14029ed09d9091d221e4569
|
/tests/test_dfply_select.py
|
718f55fd119c9ec68b7fdbd216d3fa859c7dd4e2
|
[
"MIT"
] |
permissive
|
zdelrosario/py_grama
|
1e86454cb67dde055a332de023aab8a52eeb0d87
|
d5edbcd3c8dc8705362eb36d437890962851da1d
|
refs/heads/master
| 2023-09-05T01:44:57.707310
| 2023-08-29T12:51:56
| 2023-08-29T12:51:56
| 173,464,075
| 19
| 9
|
MIT
| 2023-08-16T20:14:08
| 2019-03-02T15:25:43
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 14,801
|
py
|
import numpy as np
import pandas as pd
from scipy.stats import norm
import unittest
from context import grama as gr
from context import data
X = gr.Intention()
##==============================================================================
## select and drop test functions
##==============================================================================
# 0 1 2 3 4 5 6 7 8 9
# carat cut color clarity depth table price x y z
# 0.23 Ideal E SI2 61.5 55.0 326 3.95 3.98 2.43
class TestSelect(unittest.TestCase):
def test_select(self):
df = data.df_diamonds[["carat", "cut", "price"]]
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select("carat", "cut", "price"))
)
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_select(0, 1, 6)))
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_select(0, 1, "price")))
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select([0, X.cut], X.price))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(X.carat, X["cut"], X.price))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(X[["carat", "cut", "price"]]))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(X[["carat", "cut"]], X.price))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(X.iloc[:, [0, 1, 6]]))
)
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_select([X.loc[:, ["carat", "cut", "price"]]])
)
)
def test_select_inversion(self):
df = data.df_diamonds.iloc[:, 3:]
d = data.df_diamonds >> gr.tf_select(~X.carat, ~X.cut, ~X.color)
self.assertTrue(df.equals(d))
def test_drop(self):
df = data.df_diamonds.drop(["carat", "cut", "price"], axis=1)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop("carat", "cut", "price"))
)
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(0, 1, 6)))
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(0, 1, "price")))
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop([0, X.cut], X.price)))
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(X.carat, X["cut"], X.price))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(X[["carat", "cut", "price"]]))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(X[["carat", "cut"]], X.price))
)
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(X.iloc[:, [0, 1, 6]])))
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_drop([X.loc[:, ["carat", "cut", "price"]]])
)
)
def test_select_containing(self):
df = data.df_diamonds[["carat", "cut", "color", "clarity", "price"]]
assert df.equals(data.df_diamonds >> gr.tf_select(gr.contains("c")))
def test_drop_containing(self):
df = data.df_diamonds[["depth", "table", "x", "y", "z"]]
assert df.equals(data.df_diamonds >> gr.tf_drop(gr.contains("c")))
def test_select_matches(self):
df = data.df_diamonds[["carat", "cut", "color", "clarity", "price"]]
assert df.equals(data.df_diamonds >> gr.tf_select(gr.matches("^c[auol]|pri")))
def test_drop_matches(self):
df = data.df_diamonds[["depth", "table", "x", "y", "z"]]
assert df.equals(data.df_diamonds >> gr.tf_drop(gr.matches("^c[auol]|p.i")))
def test_select_startswith(self):
df = data.df_diamonds[["carat", "cut", "color", "clarity"]]
assert df.equals(data.df_diamonds >> gr.tf_select(gr.starts_with("c")))
def test_drop_startswith(self):
df = data.df_diamonds[["depth", "table", "price", "x", "y", "z"]]
assert df.equals(data.df_diamonds >> gr.tf_drop(gr.starts_with("c")))
def test_select_endswith(self):
df = data.df_diamonds[["table", "price"]]
assert df.equals(data.df_diamonds >> gr.tf_select(gr.ends_with("e")))
def test_drop_endswith(self):
df = data.df_diamonds.drop("z", axis=1)
assert df.equals(data.df_diamonds >> gr.tf_drop(gr.ends_with("z")))
def test_select_between(self):
df = data.df_diamonds[["cut", "color", "clarity"]]
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_select(gr.columns_between(X.cut, X.clarity))
)
)
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_select(gr.columns_between("cut", "clarity"))
)
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(gr.columns_between(1, 3)))
)
df = data.df_diamonds[["x", "y", "z"]]
assert df.equals(data.df_diamonds >> gr.tf_select(gr.columns_between("x", 20)))
def test_drop_between(self):
df = data.df_diamonds[["carat", "z"]]
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_between("cut", "y")))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_between(X.cut, 8)))
)
df = data.df_diamonds[["carat", "cut"]]
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_between(X.color, 20)))
)
def test_select_from(self):
df = data.df_diamonds[["x", "y", "z"]]
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(gr.columns_from("x")))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(gr.columns_from(X.x)))
)
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_select(gr.columns_from(7))))
self.assertTrue(
data.df_diamonds[[]].equals(
data.df_diamonds >> gr.tf_select(gr.columns_from(100))
)
)
def test_drop_from(self):
df = data.df_diamonds[["carat", "cut"]]
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_from("color")))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_from(X.color)))
)
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_from(2))))
self.assertTrue(
data.df_diamonds[[]].equals(
data.df_diamonds >> gr.tf_drop(gr.columns_from(0))
)
)
def test_select_to(self):
df = data.df_diamonds[["carat", "cut"]]
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(gr.columns_to("color")))
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_select(gr.columns_to(X.color)))
)
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_select(gr.columns_to(2))))
def test_drop_to(self):
df = data.df_diamonds[["x", "y", "z"]]
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_to("x"))))
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_to(X.x))))
self.assertTrue(df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_to(7))))
def select_through(self):
df = data.df_diamonds[["carat", "cut", "color"]]
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_select(gr.columns_to("color", inclusive=True))
)
)
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_select(gr.columns_to(X.color, inclusive=True))
)
)
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_select(gr.columns_to(2, inclusive=True))
)
)
def drop_through(self):
df = data.df_diamonds[["y", "z"]]
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_drop(gr.columns_to("x", inclusive=True))
)
)
self.assertTrue(
df.equals(
data.df_diamonds >> gr.tf_drop(gr.columns_to(X.x, inclusive=True))
)
)
self.assertTrue(
df.equals(data.df_diamonds >> gr.tf_drop(gr.columns_to(7, inclusive=True)))
)
def test_select_if(self):
# test 1: manually build data.df_diamonds subset where columns are numeric and
# mean is greater than 3
cols = list()
for col in data.df_diamonds:
try:
if mean(data.df_diamonds[col]) > 3:
cols.append(col)
except:
pass
df_if = data.df_diamonds[cols]
self.assertTrue(
df_if.equals(data.df_diamonds >> gr.tf_select_if(lambda col: mean(col) > 3))
)
# test 2: use and
cols = list()
for col in data.df_diamonds:
try:
if mean(data.df_diamonds[col]) > 3 and max(data.df_diamonds[col]) < 50:
cols.append(col)
except:
pass
df_if = data.df_diamonds[cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_select_if(lambda col: mean(col) > 3 and max(col) < 50)
)
)
# test 3: use or
cols = list()
for col in data.df_diamonds:
try:
if mean(data.df_diamonds[col]) > 3 or max(data.df_diamonds[col]) < 6:
cols.append(col)
except:
pass
df_if = data.df_diamonds[cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_select_if(lambda col: mean(col) > 3 or max(col) < 6)
)
)
# test 4: string operations - contain a specific string
cols = list()
for col in data.df_diamonds:
try:
if any(data.df_diamonds[col].str.contains("Ideal")):
cols.append(col)
except:
pass
df_if = data.df_diamonds[cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_select_if(lambda col: any(col.str.contains("Ideal")))
)
)
# test 5: get any text columns
# uses the special '.' regex symbol to find any text value
cols = list()
for col in data.df_diamonds:
try:
if any(data.df_diamonds[col].str.contains(".")):
cols.append(col)
except:
pass
df_if = data.df_diamonds[cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_select_if(lambda col: any(col.str.contains(".")))
)
)
# test 6: is_numeric helper
self.assertEqual(
{"carat", "depth", "table", "price", "x", "y", "z"},
set(
(
data.df_diamonds
>> gr.tf_select_if(gr.is_numeric)
).columns
)
)
def test_drop_if(self):
# test 1: returns a dataframe where any column does not have a mean greater than 3
# this means numeric columns with mean less than 3, and also any non-numeric column
# (since it does not have a mean)
cols = list()
for col in data.df_diamonds:
try:
if mean(data.df_diamonds[col]) > 3:
cols.append(col)
except:
pass
inverse_cols = [col for col in data.df_diamonds if col not in cols]
df_if = data.df_diamonds[inverse_cols]
self.assertTrue(
df_if.equals(data.df_diamonds >> gr.tf_drop_if(lambda col: mean(col) > 3))
)
# test 2: use and
# return colums where both conditions are false:
# the mean greater than 3, and max < 50
# again, this will include non-numeric columns
cols = list()
for col in data.df_diamonds:
try:
if mean(data.df_diamonds[col]) > 3 and max(data.df_diamonds[col]) < 50:
cols.append(col)
except:
pass
inverse_cols = [col for col in data.df_diamonds if col not in cols]
df_if = data.df_diamonds[inverse_cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_drop_if(lambda col: mean(col) > 3 and max(col) < 50)
)
)
# test 3: use or
# this will return a dataframe where either of the two conditions are false:
# the mean is greater than 3, or the max < 6
cols = list()
for col in data.df_diamonds:
try:
if mean(data.df_diamonds[col]) > 3 or max(data.df_diamonds[col]) < 6:
cols.append(col)
except:
pass
inverse_cols = [col for col in data.df_diamonds if col not in cols]
df_if = data.df_diamonds[inverse_cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_drop_if(lambda col: mean(col) > 3 or max(col) < 6)
)
)
# test 4: string operations - contain a specific string
# this will drop any columns if they contain the word 'Ideal'
cols = list()
for col in data.df_diamonds:
try:
if any(data.df_diamonds[col].str.contains("Ideal")):
cols.append(col)
except:
pass
inverse_cols = [col for col in data.df_diamonds if col not in cols]
df_if = data.df_diamonds[inverse_cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_drop_if(lambda col: any(col.str.contains("Ideal")))
)
)
# test 5: drop any text columns
# uses the special '.' regex symbol to find any text value
cols = list()
for col in data.df_diamonds:
try:
if any(data.df_diamonds[col].str.contains(".")):
cols.append(col)
except:
pass
inverse_cols = [col for col in data.df_diamonds if col not in cols]
df_if = data.df_diamonds[inverse_cols]
self.assertTrue(
df_if.equals(
data.df_diamonds
>> gr.tf_drop_if(lambda col: any(col.str.contains(".")))
)
)
|
[
"zdelrosario@outlook.com"
] |
zdelrosario@outlook.com
|
c3276098bbfeed6b97b1c4291af58b7939b17082
|
d94b6845aeeb412aac6850b70e22628bc84d1d6d
|
/multiple_user_representations/models/task.py
|
05df7200848d01a702e2184141f95464a2814410
|
[
"CC-BY-4.0",
"Apache-2.0"
] |
permissive
|
ishine/google-research
|
541aea114a68ced68736340e037fc0f8257d1ea2
|
c1ae273841592fce4c993bf35cdd0a6424e73da4
|
refs/heads/master
| 2023-06-08T23:02:25.502203
| 2023-05-31T01:00:56
| 2023-05-31T01:06:45
| 242,478,569
| 0
| 0
|
Apache-2.0
| 2020-06-23T01:55:11
| 2020-02-23T07:59:42
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 10,488
|
py
|
# coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the task class that computes the loss and metrics for retrieval task."""
from typing import Optional
import tensorflow as tf
import tensorflow_recommenders as tfrs
class MultiShotRetrievalTask(tfrs.tasks.Retrieval):
"""Extends the tfrs retrieval task to support multiple user representation (MUR).
This class modifies the call function to support MUR. See base class for more
details. For details on MUR see http://shortn/_PO6OdvUuAs.
"""
def call(self,
user_embeddings,
candidate_embeddings,
sample_weight = None,
candidate_sampling_probability = None,
eval_candidate_embeddings = None,
compute_metrics = True,
is_head_item = None):
"""Computes the loss function for next-item prediction.
While computing the loss, the next-item's embedding is used as positive,
while remaining items in the batch are negatives (in-batch negatives).
Args:
user_embeddings: User Embeddings [B, H, D] from the user tower, where H
corresponds to the number of user representations.
candidate_embeddings: The candidate embeddings corresponding to ground
truth items that match the users. Shape: [B, D].
sample_weight: Tensor of sample weights [B].
candidate_sampling_probability: Optional tensor of candidate sampling
probabilities. When given will be be used to correct the logits to
reflect the sampling probability of in-batch negatives.
eval_candidate_embeddings: Candidate Embeddings [B, N, D] from the item
tower, where N corresponds to the number of candidates. The
eval_candidate_embeddings is only used to evaluate the metrics. See
evaluation (http://shortn/_PO6OdvUuAs#heading=h.tghsu9ag8g7x) with
negative samples. When not given, all candidates are considered for
evaluating metrics.
compute_metrics: Whether to compute metrics or not.
is_head_item: 1 if the positive candidate is a head item else 0. This is
used to compute head/tail metrics. Shape: [B, 1].
Returns:
loss: Loss value tensor.
"""
scores = tf.linalg.matmul(
user_embeddings, candidate_embeddings, transpose_b=True)
scores = tf.math.reduce_max(scores, axis=1)
batch_size = tf.shape(scores)[0]
num_candidates = tf.shape(scores)[-1]
labels = tf.eye(batch_size, num_candidates)
if self._temperature is not None:
scores = scores / self._temperature
if candidate_sampling_probability is not None:
candidate_sampling_probability = tf.cast(candidate_sampling_probability,
tf.float32)
scores = tfrs.layers.loss.SamplingProbablityCorrection()(
scores, candidate_sampling_probability)
loss = self._loss(y_true=labels, y_pred=scores, sample_weight=sample_weight)
if not compute_metrics:
return loss
if not self._factorized_metrics:
return loss
if eval_candidate_embeddings is None:
update_op = self._factorized_metrics.update_state(
user_embeddings, candidate_embeddings, is_head_item=is_head_item)
else:
update_op = self._factorized_metrics.update_state_with_negatives(
user_embeddings,
candidate_embeddings,
eval_candidate_embeddings,
is_head_item=is_head_item)
with tf.control_dependencies([update_op]):
return tf.identity(loss)
def compute_cosine_disagreement_loss(self,
query_head):
"""Computes the cosine disagreement loss given the query_head.
Args:
query_head: Tensor of shape [1, H, D].
Returns:
disagreement_loss: The cosine similarity computed across axis=1 for
different queries in the query_head.
"""
norm = tf.linalg.norm(query_head, axis=-1, keepdims=True)
disagreement_queries = query_head / tf.stop_gradient(norm)
query_embedding_score = tf.linalg.matmul(
disagreement_queries, disagreement_queries, transpose_b=True)
disagreement_loss = tf.reduce_sum(query_embedding_score)
disagreement_loss -= tf.reduce_sum(tf.linalg.trace(query_embedding_score))
return disagreement_loss
class MultiQueryStreaming(tfrs.layers.factorized_top_k.Streaming):
"""A wrapper for item candidates to efficiently retrieve top K scores.
This class extends the tfrs factorized_top_k.Streaming class, which only
supports a single user representation for retrieval. The class overrides the
_compute_score to extend the functionality to multiple representations.
See base class.
"""
def _compute_score(self, queries,
candidates):
"""Computes multi-query score by overriding _compute_score from parent.
Args:
queries: Multiple queries.
candidates: Candidate tensor.
Returns:
scores: Max score from multiple queries.
"""
scores = tf.matmul(queries, candidates, transpose_b=True)
scores = tf.reduce_max(scores, axis=1)
return scores
class MultiQueryFactorizedTopK(tfrs.metrics.FactorizedTopK):
"""Computes metrics for multi user representations across top K candidates.
We reuse the functionality from base class, while only modifying the
update_state function to support multiple representations.
See base class for details.
"""
def update_state(self,
query_embeddings,
true_candidate_embeddings,
is_head_item = None):
"""Updates the state of the FactorizedTopK Metric.
See the base class method `update_state` for details. This method extends
the functionality to multiple user representation case, i.e. when
query_embeddings is of shape [B, H, D].
Args:
query_embeddings: The query embeddings used to retrieve candidates.
true_candidate_embeddings: The positive candidate embeddings.
is_head_item: 1 if the positive candidate is a head item else 0. This is
used to compute head/tail metrics. Shape: [B, 1].
Returns:
update_ops: The metric update op. Used for tf.v1 functionality.
"""
# true_candidate_embeddings: [B, d]
true_candidate_embeddings = tf.expand_dims(
true_candidate_embeddings, axis=1)
# positive_scores: B x H x 1
positive_scores = tf.reduce_sum(
query_embeddings * true_candidate_embeddings, axis=2, keepdims=True)
# positive_scores: B x 1
positive_scores = tf.reduce_max(positive_scores, axis=1)
top_k_predictions, _ = self._candidates(query_embeddings, k=self._k)
y_true = tf.concat(
[tf.ones(tf.shape(positive_scores)),
tf.zeros_like(top_k_predictions)],
axis=1)
y_pred = tf.concat([positive_scores, top_k_predictions], axis=1)
update_ops = []
for metric in self._top_k_metrics:
if metric.name.startswith(
("head_", "Head_")) and is_head_item is not None:
update_ops.append(
metric.update_state(
y_true=y_true, y_pred=y_pred, sample_weight=is_head_item))
elif metric.name.startswith(
("tail_", "Tail_")) and is_head_item is not None:
# If item is not head, then the item is considered to be a tail item.
is_tail_item = 1.0 - is_head_item
update_ops.append(
metric.update_state(
y_true=y_true, y_pred=y_pred, sample_weight=is_tail_item))
else:
update_ops.append(metric.update_state(y_true=y_true, y_pred=y_pred))
return tf.group(update_ops)
def update_state_with_negatives(
self,
query_embeddings,
candidate_embeddings,
neg_candidate_embeddings,
is_head_item = None):
"""Updates the state of the FactorizedTopK Metric wrt the negative samples.
The function computes the metrics only wrt the given negative samples.
Unlike the update_state fn, this fn assumes that the number of candidate
samples are fewer while finding topK candidates.
Args:
query_embeddings: The query embeddings used for retrieval. Shape: [B,H,D].
candidate_embeddings: The candidate embeddings corresponding to ground
truth items that match the query. Shape: [B,D].
neg_candidate_embeddings: The negative candidate embeddings against which
we evaluate. Shape: [B,N,D].
is_head_item: 1 if the positive candidate is a head item else 0. This is
used to compute head/tail metrics. Shape: [B, 1].
Returns:
update_ops: The metric op used for tf.v1 functionality.
"""
pos_candidate_embeddings = tf.expand_dims(candidate_embeddings, axis=-1)
positive_scores = tf.matmul(query_embeddings, pos_candidate_embeddings)
positive_scores = tf.reduce_max(positive_scores, axis=1)
negative_scores = tf.matmul(
query_embeddings, neg_candidate_embeddings, transpose_b=True)
negative_scores = tf.reduce_max(negative_scores, axis=1)
y_true = tf.concat(
[tf.ones_like(positive_scores),
tf.zeros_like(negative_scores)],
axis=1)
y_pred = tf.concat([positive_scores, negative_scores], axis=1)
update_ops = []
for metric in self._top_k_metrics:
if metric.name.startswith(
("head_", "Head_")) and is_head_item is not None:
update_ops.append(
metric.update_state(
y_true=y_true, y_pred=y_pred, sample_weight=is_head_item))
elif metric.name.startswith(
("tail_", "Tail_")) and is_head_item is not None:
# If item is not head, then the item is considered to be a tail item.
is_tail_item = 1.0 - is_head_item
update_ops.append(
metric.update_state(
y_true=y_true, y_pred=y_pred, sample_weight=is_tail_item))
else:
update_ops.append(metric.update_state(y_true=y_true, y_pred=y_pred))
return tf.group(update_ops)
|
[
"copybara-worker@google.com"
] |
copybara-worker@google.com
|
cd7945760a4924d927021ded1d1b0d4d2a70b9a4
|
390565b36da4e69a08a69f1d59b7d58a3ffd2fdf
|
/python/baseline/reader.py
|
e0d40319da1df29dfc0b91e4cd315de3788011b3
|
[
"Apache-2.0"
] |
permissive
|
demoninpiano/baseline
|
3a87afc831bbdef56f25652e2e90205dee33a290
|
e3e541a241f673813cc4628de90411d1ecee5e3f
|
refs/heads/master
| 2021-07-08T02:10:42.430721
| 2017-10-05T14:34:15
| 2017-10-05T14:34:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,106
|
py
|
import baseline.data
import numpy as np
from collections import Counter
import re
import codecs
from baseline.utils import import_user_module, revlut
import os
def num_lines(filename):
lines = 0
with codecs.open(filename, encoding='utf-8', mode='r') as f:
for _ in f:
lines = lines + 1
return lines
def _build_vocab_for_col(col, files):
vocab = Counter()
vocab['<GO>'] = 1
vocab['<EOS>'] = 1
for file in files:
if file is None:
continue
with codecs.open(file, encoding='utf-8', mode='r') as f:
for line in f:
cols = re.split("\t", line)
text = re.split("\s", cols[col])
for w in text:
w = w.strip()
vocab[w] += 1
return vocab
class ParallelCorpusReader(object):
def __init__(self,
max_sentence_length=1000,
vec_alloc=np.zeros,
src_vec_trans=None,
trim=False):
self.vec_alloc = vec_alloc
self.src_vec_trans = src_vec_trans
self.max_sentence_length = max_sentence_length
self.trim = trim
def build_vocabs(self, files):
pass
def load_examples(self, tsfile, vocab1, vocab2):
pass
def load(self, tsfile, vocab1, vocab2, batchsz, shuffle=False):
examples = self.load_examples(tsfile, vocab1, vocab2)
return baseline.data.Seq2SeqDataFeed(examples, batchsz,
shuffle=shuffle, src_vec_trans=self.src_vec_trans,
vec_alloc=self.vec_alloc, trim=self.trim)
class TSVParallelCorpusReader(ParallelCorpusReader):
def __init__(self,
max_sentence_length=1000,
vec_alloc=np.zeros,
src_vec_trans=None,
trim=False, src_col_num=0, dst_col_num=1):
super(TSVParallelCorpusReader, self).__init__(max_sentence_length, vec_alloc, src_vec_trans, trim)
self.src_col_num = src_col_num
self.dst_col_num = dst_col_num
def build_vocabs(self, files):
src_vocab = _build_vocab_for_col(self.src_col_num, files)
dst_vocab = _build_vocab_for_col(self.dst_col_num, files)
return src_vocab, dst_vocab
def load_examples(self, tsfile, vocab1, vocab2):
GO = vocab2['<GO>']
EOS = vocab2['<EOS>']
mxlen = self.max_sentence_length
ts = []
with codecs.open(tsfile, encoding='utf-8', mode='r') as f:
for line in f:
splits = re.split("\t", line.strip())
src = re.split("\s+", splits[0])
dst = re.split("\s+", splits[1])
srcl = self.vec_alloc(mxlen, dtype=np.int)
tgtl = self.vec_alloc(mxlen, dtype=np.int)
src_len = len(src)
tgt_len = len(dst) + 2 # <GO>,...,<EOS>
end1 = min(src_len, mxlen)
end2 = min(tgt_len, mxlen)-2
tgtl[0] = GO
src_len = end1
tgt_len = end2+2
for j in range(end1):
srcl[j] = vocab1[src[j]]
for j in range(end2):
tgtl[j + 1] = vocab2[dst[j]]
tgtl[end2] = EOS
ts.append((srcl, tgtl, src_len, tgt_len))
return baseline.data.Seq2SeqExamples(ts)
class MultiFileParallelCorpusReader(ParallelCorpusReader):
def __init__(self, src_suffix, dst_suffix,
max_sentence_length=1000,
vec_alloc=np.zeros,
src_vec_trans=None,
trim=False):
super(MultiFileParallelCorpusReader, self).__init__(max_sentence_length, vec_alloc, src_vec_trans, trim)
self.src_suffix = src_suffix
self.dst_suffix = dst_suffix
if not src_suffix.startswith('.'):
self.src_suffix = '.' + self.src_suffix
if not dst_suffix.startswith('.'):
self.dst_suffix = '.' + self.dst_suffix
def build_vocabs(self, files):
src_vocab = _build_vocab_for_col(0, files)
dst_vocab = src_vocab
return src_vocab, dst_vocab
def load_examples(self, tsfile, vocab1, vocab2):
PAD = vocab1['<PAD>']
GO = vocab2['<GO>']
EOS = vocab2['<EOS>']
mxlen = self.max_sentence_length
ts = []
with codecs.open(tsfile + self.src_suffix, encoding='utf-8', mode='r') as fsrc:
with codecs.open(tsfile + self.dst_suffix, encoding='utf-8', mode='r') as fdst:
for src, dst in zip(fsrc, fdst):
src = re.split("\s+", src.strip())
dst = re.split("\s+", dst.strip())
srcl = self.vec_alloc(mxlen, dtype=np.int)
tgtl = self.vec_alloc(mxlen, dtype=np.int)
src_len = len(src)
tgt_len = len(dst) + 2
end1 = min(src_len, mxlen)
end2 = min(tgt_len, mxlen)-2
tgtl[0] = GO
src_len = end1
tgt_len = end2+2
for j in range(end1):
srcl[j] = vocab1[src[j]]
for j in range(end2):
tgtl[j + 1] = vocab2[dst[j]]
tgtl[end2] = EOS
ts.append((srcl, tgtl, src_len, tgt_len))
return baseline.data.Seq2SeqExamples(ts)
def create_parallel_corpus_reader(mxlen, alloc_fn, trim, src_vec_trans, **kwargs):
reader_type = kwargs.get('reader_type', 'default')
if reader_type == 'default':
print('Reading parallel file corpus')
pair_suffix = kwargs.get('pair_suffix')
reader = MultiFileParallelCorpusReader(pair_suffix[0], pair_suffix[1],
mxlen, alloc_fn,
src_vec_trans, trim)
elif reader_type == 'tsv':
print('Reading tab-separated corpus')
reader = TSVParallelCorpusReader(mxlen, alloc_fn, src_vec_trans, trim)
else:
mod = import_user_module("reader", reader_type)
return mod.create_parallel_corpus_reader(mxlen, alloc_fn,
src_vec_trans, trim, **kwargs)
return reader
def identity_trans_fn(x):
return x
class CONLLSeqReader(object):
UNREP_EMOTICONS = (
':)',
':(((',
':D',
'=)',
':-)',
'=(',
'(=',
'=[[',
)
def __init__(self, max_sentence_length=-1, max_word_length=-1, word_trans_fn=None,
vec_alloc=np.zeros, vec_shape=np.shape, trim=False):
self.cleanup_fn = identity_trans_fn if word_trans_fn is None else word_trans_fn
self.max_sentence_length = max_sentence_length
self.max_word_length = max_word_length
self.vec_alloc = vec_alloc
self.vec_shape = vec_shape
self.trim = trim
self.label2index = {"<PAD>": 0, "<GO>": 1, "<EOS>": 2}
@staticmethod
def web_cleanup(word):
if word.startswith('http'): return 'URL'
if word.startswith('@'): return '@@@@'
if word.startswith('#'): return '####'
if word == '"': return ','
if word in CONLLSeqReader.UNREP_EMOTICONS: return ';)'
if word == '<3': return '<3'
return word
def build_vocab(self, files):
vocab_word = Counter()
vocab_ch = Counter()
maxw = 0
maxs = 0
for file in files:
if file is None:
continue
sl = 0
with codecs.open(file, encoding='utf-8', mode='r') as f:
for line in f:
line = line.strip()
if line == '':
maxs = max(maxs, sl)
sl = 0
else:
states = re.split("\s", line)
sl += 1
w = states[0]
vocab_word[self.cleanup_fn(w)] += 1
maxw = max(maxw, len(w))
for k in w:
vocab_ch[k] += 1
self.max_word_length = min(maxw, self.max_word_length) if self.max_word_length > 0 else maxw
self.max_sentence_length = min(maxs, self.max_sentence_length) if self.max_sentence_length > 0 else maxs
print('Max sentence length %d' % self.max_sentence_length)
print('Max word length %d' % self.max_word_length)
return vocab_ch, vocab_word
@staticmethod
def read_lines(tsfile):
txts = []
lbls = []
txt = []
lbl = []
with codecs.open(tsfile, encoding='utf-8', mode='r') as f:
for line in f:
states = re.split("\s", line.strip())
if len(states) > 1:
txt.append(states[0])
lbl.append(states[-1])
else:
txts.append(txt)
lbls.append(lbl)
txt = []
lbl = []
return txts, lbls
def load(self, filename, words_vocab, chars_vocab, batchsz, shuffle=False):
ts = []
idx = 2 # GO=0, START=1, EOS=2
mxlen = self.max_sentence_length
maxw = self.max_word_length
txts, lbls = CONLLSeqReader.read_lines(filename)
for i in range(len(txts)):
xs_ch = self.vec_alloc((mxlen, maxw), dtype=np.int)
xs = self.vec_alloc((mxlen), dtype=np.int)
ys = self.vec_alloc((mxlen), dtype=np.int)
lv = lbls[i]
v = txts[i]
length = mxlen
for j in range(mxlen):
if j == len(v):
length = j
break
w = v[j]
nch = min(len(w), maxw)
label = lv[j]
if label not in self.label2index:
idx += 1
self.label2index[label] = idx
ys[j] = self.label2index[label]
xs[j] = words_vocab.get(self.cleanup_fn(w))
for k in range(nch):
xs_ch[j, k] = chars_vocab.get(w[k], 0)
ts.append((xs, xs_ch, ys, length, i))
examples = baseline.data.SeqWordCharTagExamples(ts)
return baseline.data.SeqWordCharLabelDataFeed(examples, batchsz=batchsz, shuffle=shuffle, vec_alloc=self.vec_alloc, vec_shape=self.vec_shape), txts
def create_seq_pred_reader(mxlen, mxwlen, word_trans_fn, vec_alloc, vec_shape, trim, **kwargs):
reader_type = kwargs.get('reader_type', 'default')
if reader_type == 'default':
print('Reading CONLL sequence file corpus')
reader = CONLLSeqReader(mxlen, mxwlen, word_trans_fn,
vec_alloc, vec_shape, trim)
else:
mod = import_user_module("reader", reader_type)
reader = mod.create_seq_pred_reader(mxlen, mxwlen, word_trans_fn,
vec_alloc, vec_shape, trim, **kwargs)
return reader
class SeqLabelReader(object):
def __init__(self):
pass
def build_vocab(self, files, **kwargs):
pass
def load(self, filename, index, batchsz, **kwargs):
pass
class TSVSeqLabelReader(SeqLabelReader):
REPLACE = { "'s": " 's ",
"'ve": " 've ",
"n't": " n't ",
"'re": " 're ",
"'d": " 'd ",
"'ll": " 'll ",
",": " , ",
"!": " ! ",
}
def __init__(self, mxlen=1000, mxfiltsz=0, clean_fn=None, vec_alloc=np.zeros, src_vec_trans=None):
super(TSVSeqLabelReader, self).__init__()
self.vocab = None
self.label2index = {}
self.clean_fn = clean_fn
self.mxlen = mxlen
self.mxfiltsz = mxfiltsz
self.vec_alloc = vec_alloc
if self.clean_fn is None:
self.clean_fn = lambda x: x
self.src_vec_trans = src_vec_trans
@staticmethod
def splits(text):
return list(filter(lambda s: len(s) != 0, re.split('\s+', text)))
@staticmethod
def do_clean(l):
l = l.lower()
l = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", l)
for k, v in TSVSeqLabelReader.REPLACE.items():
l = l.replace(k, v)
return l.strip()
@staticmethod
def label_and_sentence(line, clean_fn):
label_text = re.split('[\t\s]+', line)
label = label_text[0]
text = label_text[1:]
text = ' '.join(list(filter(lambda s: len(s) != 0, [clean_fn(w) for w in text])))
return label, text
def build_vocab(self, files, **kwargs):
"""Take a directory (as a string), or an array of files and build a vocabulary
Take in a directory or an array of individual files (as a list). If the argument is
a string, it may be a directory, in which case, all files in the directory will be loaded
to form a vocabulary.
:param files: Either a directory (str), or an array of individual files
:return:
"""
label_idx = len(self.label2index)
if type(files) == str:
if os.path.isdir(files):
base = files
files = filter(os.path.isfile, [os.path.join(base, x) for x in os.listdir(base)])
else:
files = [files]
vocab = Counter()
for file in files:
if file is None:
continue
with codecs.open(file, encoding='utf-8', mode='r') as f:
for line in f:
label, text = TSVSeqLabelReader.label_and_sentence(line, self.clean_fn)
if label not in self.label2index:
self.label2index[label] = label_idx
label_idx += 1
for w in TSVSeqLabelReader.splits(text):
vocab[w] += 1
return vocab, self.get_labels()
def get_labels(self):
labels = [''] * len(self.label2index)
for label, index in self.label2index.items():
labels[index] = label
return labels
def load(self, filename, index, batchsz, **kwargs):
PAD = index['<PAD>']
shuffle = kwargs.get('shuffle', False)
halffiltsz = self.mxfiltsz // 2
nozplen = self.mxlen - 2*halffiltsz
examples = []
with codecs.open(filename, encoding='utf-8', mode='r') as f:
for offset, line in enumerate(f):
label, text = TSVSeqLabelReader.label_and_sentence(line, self.clean_fn)
y = self.label2index[label]
toks = TSVSeqLabelReader.splits(text)
mx = min(len(toks), nozplen)
toks = toks[:mx]
x = self.vec_alloc(self.mxlen, dtype=int)
for j in range(len(toks)):
w = toks[j]
key = index.get(w, PAD)
x[j+halffiltsz] = key
examples.append((x, y))
return baseline.data.SeqLabelDataFeed(baseline.data.SeqLabelExamples(examples),
batchsz=batchsz, shuffle=shuffle, vec_alloc=self.vec_alloc, src_vec_trans=self.src_vec_trans)
def create_pred_reader(mxlen, zeropadding, clean_fn, vec_alloc, src_vec_trans, **kwargs):
reader_type = kwargs.get('reader_type', 'default')
if reader_type == 'default':
reader = TSVSeqLabelReader(mxlen, zeropadding, clean_fn, vec_alloc, src_vec_trans)
else:
mod = import_user_module("reader", reader_type)
reader = mod.create_pred_reader(mxlen, zeropadding, clean_fn, vec_alloc, src_vec_trans, **kwargs)
return reader
class PTBSeqReader(object):
def __init__(self, max_word_length, nbptt):
self.max_word_length = max_word_length
self.nbptt = nbptt
def build_vocab(self, files):
vocab_word = Counter()
vocab_ch = Counter()
maxw = 0
num_words_in_files = []
for file in files:
if file is None:
continue
with codecs.open(file, encoding='utf-8', mode='r') as f:
num_words = 0
for line in f:
sentence = line.split() + ['<EOS>']
num_words += len(sentence)
for w in sentence:
vocab_word[w] += 1
maxw = max(maxw, len(w))
for k in w:
vocab_ch[k] += 1
num_words_in_files.append(num_words)
self.max_word_length = min(maxw, self.max_word_length)
print('Max word length %d' % self.max_word_length)
return vocab_ch, vocab_word, num_words_in_files
def load(self, filename, words_vocab, chars_vocab, num_words, batchsz, vec_alloc=np.zeros):
xch = vec_alloc((num_words, self.max_word_length), np.int)
x = vec_alloc((num_words), np.int)
i = 0
with codecs.open(filename, encoding='utf-8', mode='r') as f:
for line in f:
sentence = line.split() + ['<EOS>']
num_words += len(sentence)
for w in sentence:
x[i] = words_vocab.get(w)
nch = min(len(w), self.max_word_length)
for k in range(nch):
xch[i, k] = chars_vocab.get(w[k], 0)
i += 1
return baseline.data.SeqWordCharDataFeed(x, xch, self.nbptt, batchsz, self.max_word_length)
def create_lm_reader(max_word_length, nbptt, **kwargs):
reader_type = kwargs.get('reader_type', 'default')
if reader_type == 'default':
reader = PTBSeqReader(max_word_length, nbptt)
else:
mod = import_user_module("reader", reader_type)
reader = mod.create_lm_reader(max_word_length, nbptt, **kwargs)
return reader
|
[
"dpressel@gmail.com"
] |
dpressel@gmail.com
|
ddf15062b858f78fb39fed56808c8b1e276647cd
|
f4b3be2a3955c26b4e05ab162fa4909cf9a14f11
|
/CRB/validators/subsystems/enforcements/enf088.py
|
c28e582eaead1d50d8b94f0b33352ef67a14a38f
|
[] |
no_license
|
njovujsh/crbdjango
|
fd1f61403c1fbdac01b1bda5145faeb4b9ef9608
|
fdf5cc6ca5920a596c5463187d29202719664144
|
refs/heads/master
| 2022-12-04T18:13:07.709963
| 2018-05-14T09:07:47
| 2018-05-14T09:07:47
| 133,333,767
| 0
| 0
| null | 2022-11-22T01:44:28
| 2018-05-14T09:04:17
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 599
|
py
|
from validators.subsystems.enforcements import enf001
class ENF088(enf001.ENF001):
def __init__(self, mobject, field, priority, action):
super(ENF088, self).__init__(mobject, field, priority, action)
self.status = None
self.fcs = None
def validate_field(self, field, records):
try:
if(records.Applicant_Classification == "1"):
if(field):
return True
else:
return False
else:
return True
except:
raise
|
[
"njovujsh@gmail.com"
] |
njovujsh@gmail.com
|
878d117d208c8bddc445d9193bd60d9962bc2d04
|
ad553dd718a8df51dabc9ba636040da740db57cf
|
/.history/app_20181202205024.py
|
2016dd936c4c5606bd2c690a9091adbc44772a0d
|
[] |
no_license
|
NergisAktug/E-Commerce-PythonWithFlask-Sqlite3
|
8e67f12c28b11a7a30d13788f8dc991f80ac7696
|
69ff4433aa7ae52ef854d5e25472dbd67fd59106
|
refs/heads/main
| 2023-01-01T14:03:40.897592
| 2020-10-19T20:36:19
| 2020-10-19T20:36:19
| 300,379,376
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,828
|
py
|
"""Flask Login Example and instagram fallowing find"""
from flask import Flask, url_for, render_template, request, redirect, session, escape
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.secret_key = 'any random string'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///kullanicilar.db'
db = SQLAlchemy(app)
class User(db.Model):
""" Create user table"""
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
password = db.Column(db.String(80))
def __init__(self, username, password):
self.username = username
self.password = password
@app.route('/')
def index():
if not session.get('giris_yap'):
return render_template('index.html')
else:
if request.method == 'POST':
return render_template('index.html')
return render_template('index.html')
@app.route('/üye')
def uye():
return render_template('/üye.html')
@app.route('/giris', methods=['GET', 'POST'])
def giris():
if request.method == 'GET':
return render_template('kayit.html')
else:
name = request.form['username']
passw = request.form['password']
data = User.query.filter_by(username=name, password=passw).first()
if data is not None:
session['giris_yap'] = True
return redirect(url_for('index'))
else:
return render_template('index.html')
@app.route('/kayit', methods=['GET', 'POST'])
def kayit():
"""Register Form"""
if request.method == 'POST':
new_user = User(username=request.form.get('username'), password=request.form.get('password'))
db.session.add(new_user)
db.session.commit()
return render_template('üye.html')
return render_template('kayit.html')
@app.route("/cıkıs")
def cıkıs():
session['logged_in'] = False
return redirect(url_for('index'))
if __name__ == '__main__':
db.create_all()
app.run(debug=True)
|
[
"nergis.aktug2014@gmail.com"
] |
nergis.aktug2014@gmail.com
|
c266c889f792a3b3629b97cb48f01a1e98e7ab09
|
4ca0cb74402be70c63ad8e1c67b529cd7770ba38
|
/19_model-view_controller/mvc.py
|
f0b57f822676e41408bad59aeb0327aba2d02a44
|
[] |
no_license
|
alxfed/python-design-patterns
|
06af6f8e47925bcafe39a117943dd8287a6fe567
|
b1a1ffb02b6e81e44bc7f0491376f9121b325a09
|
refs/heads/master
| 2020-04-02T04:34:18.060976
| 2019-12-18T16:08:00
| 2019-12-18T16:08:00
| 154,022,815
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 680
|
py
|
"""
mvc.py
"""
import sys
class GenericController(object):
def __init__(self):
self.model = GenericModel()
self.view = GenericView()
def handle(self, request):
data = self.model.get_data(request)
self.view.generate_response(data)
class GenericModel(object):
def __init__(self):
pass
def get_data(self, request):
return {'request': request}
class GenericView(object):
def __init__(self):
pass
def generate_response(self, data):
print(data)
def main(name):
request_handler = GenericController()
request_handler.handle(name)
if __name__ == "__main__":
main(sys.argv[1])
|
[
"alxfed@gmail.com"
] |
alxfed@gmail.com
|
66379b12d4d5befc395446e0bd7e8fd9610fbfe9
|
7626a8371c7a847f93bdae5e1d6e03ee9667c3ba
|
/func/print_area_kz/venv/bin/sqlformat
|
87ba197dbb67cb1b0c1fd9666d91f0b0353cc1f2
|
[] |
no_license
|
zzyzx4/sp
|
52c815fd115b4605942baa73687838f64cd41864
|
90c7a90b3de27af674422e2c8892bad5ba7891e8
|
refs/heads/master
| 2020-05-23T21:20:28.166932
| 2019-07-19T11:56:49
| 2019-07-19T11:56:49
| 186,950,380
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 260
|
#!/home/user/PycharmProjects/print_area_kz/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from sqlparse.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"dastik0101@gmail.com"
] |
dastik0101@gmail.com
|
|
7da11b2b6560f70a07cfc5ee79bacf3b82b37c85
|
926f23a55dbe360a67bcda9714e1d28a300501bc
|
/stylelens_search_vector/api_client.py
|
e401df529385b97e7649f581d5080a6fcf9ecbad
|
[] |
no_license
|
bluehackmaster/stylelens-search-vector
|
08dd24b38e410b40710a7cbeb2dd4a303c981669
|
a45a089039dcfa0fbfbe77ac3c12b39088147303
|
refs/heads/master
| 2021-05-08T04:27:06.091937
| 2017-10-26T13:08:31
| 2017-10-26T13:08:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 24,215
|
py
|
# coding: utf-8
"""
stylelens-search-vector
This is a API document for Vector search on bl-search-faiss\"
OpenAPI spec version: 0.0.1
Contact: bluehackmaster@bluehack.net
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import re
import json
import mimetypes
import tempfile
import threading
from datetime import date, datetime
# python 2 and python 3 compatibility library
from six import PY3, integer_types, iteritems, text_type
from six.moves.urllib.parse import quote
from . import models
from .configuration import Configuration
from .rest import ApiException, RESTClientObject
class ApiClient(object):
"""
Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param host: The base path for the server to call.
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to the API.
"""
PRIMITIVE_TYPES = (float, bool, bytes, text_type) + integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': date,
'datetime': datetime,
'object': object,
}
def __init__(self, host=None, header_name=None, header_value=None, cookie=None):
"""
Constructor of the class.
"""
self.rest_client = RESTClientObject()
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
if host is None:
self.host = Configuration().host
else:
self.host = host
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
@property
def user_agent(self):
"""
Gets user agent.
"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
"""
Sets user agent.
"""
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, callback=None,
_return_http_data_only=None, collection_formats=None, _preload_content=True,
_request_timeout=None):
config = Configuration()
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k, quote(str(v), safe=config.safe_chars_for_path_param))
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.host + resource_path
# perform request and return response
response_data = self.request(method, url,
query_params=query_params,
headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if callback:
if _return_http_data_only:
callback(return_data)
else:
callback((return_data, response_data.status, response_data.getheaders()))
elif _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status, response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""
Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime, date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""
Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""
Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match('list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == date:
return self.__deserialize_date(data)
elif klass == datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, callback=None,
_return_http_data_only=None, collection_formats=None, _preload_content=True,
_request_timeout=None):
"""
Makes the HTTP request (synchronous) and return the deserialized data.
To make an async request, define a function for callback.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param callback function: Callback function for asynchronous request.
If provide this parameter,
the request will be called asynchronously.
:param _return_http_data_only: response data without head status code and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will be returned without
reading/decoding response data. Default is True.
:param _request_timeout: timeout setting for this request. If one number provided, it will be total request
timeout. It can also be a pair (tuple) of (connection, read) timeouts.
:return:
If provide parameter callback,
the request will be called asynchronously.
The method will return the request thread.
If parameter callback is None,
then the method will return the response directly.
"""
if callback is None:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings, callback,
_return_http_data_only, collection_formats, _preload_content, _request_timeout)
else:
thread = threading.Thread(target=self.__call_api,
args=(resource_path, method,
path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
callback, _return_http_data_only,
collection_formats, _preload_content, _request_timeout))
thread.start()
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True, _request_timeout=None):
"""
Makes the HTTP request using RESTClient.
"""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""
Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in iteritems(params) if isinstance(params, dict) else params:
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""
Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = mimetypes.\
guess_type(filename)[0] or 'application/octet-stream'
params.append(tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""
Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""
Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""
Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
config = Configuration()
if not auth_settings:
return
for auth in auth_settings:
auth_setting = config.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
config = Configuration()
fd, path = tempfile.mkstemp(dir=config.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.\
search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition).\
group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "w") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""
Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return unicode(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""
Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""
Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise ApiException(
status=0,
reason="Failed to parse `{0}` into a date object".format(string)
)
def __deserialize_datatime(self, string):
"""
Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise ApiException(
status=0,
reason=(
"Failed to parse `{0}` into a datetime object"
.format(string)
)
)
def __deserialize_model(self, data, klass):
"""
Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if not klass.swagger_types:
return data
kwargs = {}
for attr, attr_type in iteritems(klass.swagger_types):
if data is not None \
and klass.attribute_map[attr] in data \
and isinstance(data, (list, dict)):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
return instance
|
[
"master@bluehack.net"
] |
master@bluehack.net
|
1c59053d7c6f0cc642b0dbe1ecc9f46b90c2c6f1
|
34745a8d54fa7e3d9e4237415eb52e507508ad79
|
/Python_Advanced/05_Functions Advanced/Exercise/04_negative_vs_positive.py
|
db0c4a49d3f9878fa05166a33c9f802e169e1017
|
[] |
no_license
|
DilyanTsenkov/SoftUni-Software-Engineering
|
50476af0dc88b267d72c56fa87eeb88d841164b2
|
fe446e3a50a00bb2e48d71ab8f783e0a4a406094
|
refs/heads/main
| 2023-08-12T18:18:42.144210
| 2021-09-25T11:10:38
| 2021-09-25T11:10:38
| 317,235,419
| 1
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,072
|
py
|
def absolute(negative_sum):
return abs(negative_sum)
def compare_negative_positive_sum(negative_sum, positive_sum):
if positive_sum >= negative_sum:
return True
else:
return False
def negative_separator(numbers):
if numbers < 0:
return True
def positive_separator(number):
if number >= 0:
return True
def printer(true_ot_false, positive_sum, negative_sum):
print(negative_sum)
print(positive_sum)
if true_ot_false:
print(f"The positives are stronger than the negatives")
else:
print(f"The negatives are stronger than the positives")
def sum_calc(nums):
return sum(nums)
numbers = [int(el) for el in input().split()]
negative = list(filter(negative_separator, numbers))
positive = list(filter(positive_separator, numbers))
negative_sum = sum_calc(negative)
positive_sum = sum_calc(positive)
negative_abs_sum = absolute(negative_sum)
printer(compare_negative_positive_sum(negative_abs_sum, positive_sum), positive_sum, negative_sum)
|
[
"noreply@github.com"
] |
DilyanTsenkov.noreply@github.com
|
a370c978a47bc4b67c07d327141825fd9ce68d99
|
b441503bcdb484d098885b19a989932b8d053a71
|
/neural_sp/evaluators/wordpiece.py
|
aae95de11e128601df6e62d74b585a82e86bef85
|
[
"Apache-2.0"
] |
permissive
|
entn-at/neural_sp
|
a266594b357b175b0fea18253433e32adc62810c
|
9dbbb4ab3985b825f8e9120a603a6caa141c8bdd
|
refs/heads/master
| 2020-08-28T05:48:28.928667
| 2020-06-22T19:17:53
| 2020-06-22T19:17:53
| 217,611,439
| 0
| 0
| null | 2019-10-25T20:40:18
| 2019-10-25T20:40:18
| null |
UTF-8
|
Python
| false
| false
| 7,250
|
py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2018 Kyoto University (Hirofumi Inaguma)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""Evaluate the wordpiece-level model by WER."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from tqdm import tqdm
from neural_sp.evaluators.edit_distance import compute_wer
from neural_sp.utils import mkdir_join
logger = logging.getLogger(__name__)
def eval_wordpiece(models, dataset, recog_params, epoch,
recog_dir=None, streaming=False, progressbar=False,
fine_grained=False):
"""Evaluate the wordpiece-level model by WER.
Args:
models (list): models to evaluate
dataset (Dataset): evaluation dataset
recog_params (dict):
epoch (int):
recog_dir (str):
streaming (bool): streaming decoding for the session-level evaluation
progressbar (bool): visualize the progressbar
fine_grained (bool): calculate fine-grained WER distributions based on input lengths
Returns:
wer (float): Word error rate
cer (float): Character error rate
"""
# Reset data counter
dataset.reset(recog_params['recog_batch_size'])
if recog_dir is None:
recog_dir = 'decode_' + dataset.set + '_ep' + str(epoch) + '_beam' + str(recog_params['recog_beam_width'])
recog_dir += '_lp' + str(recog_params['recog_length_penalty'])
recog_dir += '_cp' + str(recog_params['recog_coverage_penalty'])
recog_dir += '_' + str(recog_params['recog_min_len_ratio']) + '_' + str(recog_params['recog_max_len_ratio'])
recog_dir += '_lm' + str(recog_params['recog_lm_weight'])
ref_trn_save_path = mkdir_join(models[0].save_path, recog_dir, 'ref.trn')
hyp_trn_save_path = mkdir_join(models[0].save_path, recog_dir, 'hyp.trn')
else:
ref_trn_save_path = mkdir_join(recog_dir, 'ref.trn')
hyp_trn_save_path = mkdir_join(recog_dir, 'hyp.trn')
wer, cer = 0, 0
n_sub_w, n_ins_w, n_del_w = 0, 0, 0
n_sub_c, n_ins_c, n_del_c = 0, 0, 0
n_word, n_char = 0, 0
n_streamable, quantity_rate, n_utt = 0, 0, 0
last_success_frame_ratio = 0
if progressbar:
pbar = tqdm(total=len(dataset))
# calculate WER distribution based on input lengths
wer_dist = {}
with open(hyp_trn_save_path, 'w') as f_hyp, open(ref_trn_save_path, 'w') as f_ref:
while True:
batch, is_new_epoch = dataset.next(recog_params['recog_batch_size'])
if streaming or recog_params['recog_chunk_sync']:
best_hyps_id, _ = models[0].decode_streaming(
batch['xs'], recog_params, dataset.idx2token[0],
exclude_eos=True)
else:
best_hyps_id, _ = models[0].decode(
batch['xs'], recog_params,
idx2token=dataset.idx2token[0] if progressbar else None,
exclude_eos=True,
refs_id=batch['ys'],
utt_ids=batch['utt_ids'],
speakers=batch['sessions' if dataset.corpus == 'swbd' else 'speakers'],
ensemble_models=models[1:] if len(models) > 1 else [])
for b in range(len(batch['xs'])):
ref = batch['text'][b]
if ref[0] == '<':
ref = ref.split('>')[1]
hyp = dataset.idx2token[0](best_hyps_id[b])
# Write to trn
speaker = str(batch['speakers'][b]).replace('-', '_')
if streaming:
utt_id = str(batch['utt_ids'][b]) + '_0000000_0000001'
else:
utt_id = str(batch['utt_ids'][b])
f_ref.write(ref + ' (' + speaker + '-' + utt_id + ')\n')
f_hyp.write(hyp + ' (' + speaker + '-' + utt_id + ')\n')
logger.debug('utt-id: %s' % utt_id)
logger.debug('Ref: %s' % ref)
logger.debug('Hyp: %s' % hyp)
logger.debug('-' * 150)
if not streaming:
# Compute WER
wer_b, sub_b, ins_b, del_b = compute_wer(ref=ref.split(' '),
hyp=hyp.split(' '),
normalize=False)
wer += wer_b
n_sub_w += sub_b
n_ins_w += ins_b
n_del_w += del_b
n_word += len(ref.split(' '))
if fine_grained:
xlen_bin = (batch['xlens'][b] // 200 + 1) * 200
if xlen_bin in wer_dist.keys():
wer_dist[xlen_bin] += [wer_b / 100]
else:
wer_dist[xlen_bin] = [wer_b / 100]
# Compute CER
if dataset.corpus == 'csj':
ref = ref.replace(' ', '')
hyp = hyp.replace(' ', '')
cer_b, sub_b, ins_b, del_b = compute_wer(ref=list(ref),
hyp=list(hyp),
normalize=False)
cer += cer_b
n_sub_c += sub_b
n_ins_c += ins_b
n_del_c += del_b
n_char += len(ref)
if models[0].streamable():
n_streamable += 1
else:
last_success_frame_ratio += models[0].last_success_frame_ratio()
quantity_rate += models[0].quantity_rate()
n_utt += 1
if progressbar:
pbar.update(1)
if is_new_epoch:
break
if progressbar:
pbar.close()
# Reset data counters
dataset.reset()
if not streaming:
wer /= n_word
n_sub_w /= n_word
n_ins_w /= n_word
n_del_w /= n_word
cer /= n_char
n_sub_c /= n_char
n_ins_c /= n_char
n_del_c /= n_char
if n_utt - n_streamable > 0:
last_success_frame_ratio /= (n_utt - n_streamable)
n_streamable /= n_utt
quantity_rate /= n_utt
if fine_grained:
for len_bin, wers in sorted(wer_dist.items(), key=lambda x: x[0]):
logger.info(' WER (%s): %.2f %% (%d)' % (dataset.set, sum(wers) / len(wers), len_bin))
logger.debug('WER (%s): %.2f %%' % (dataset.set, wer))
logger.debug('SUB: %.2f / INS: %.2f / DEL: %.2f' % (n_sub_w, n_ins_w, n_del_w))
logger.debug('CER (%s): %.2f %%' % (dataset.set, cer))
logger.debug('SUB: %.2f / INS: %.2f / DEL: %.2f' % (n_sub_c, n_ins_c, n_del_c))
logger.info('Streamablility (%s): %.2f %%' % (dataset.set, n_streamable * 100))
logger.info('Quantity rate (%s): %.2f %%' % (dataset.set, quantity_rate * 100))
logger.info('Last success frame ratio (%s): %.2f %%' % (dataset.set, last_success_frame_ratio))
return wer, cer
|
[
"hiro.mhbc@gmail.com"
] |
hiro.mhbc@gmail.com
|
f98a1584a105d194c9e6e6a5e93adcc623f4cfab
|
cb61ba31b27b232ebc8c802d7ca40c72bcdfe152
|
/leetcode/931. Minimum Falling Path Sum/soln.py
|
1e4b6c29b5e019be51d4b3abc9a345a86c121f90
|
[
"Apache-2.0"
] |
permissive
|
saisankargochhayat/algo_quest
|
c7c48187c76b5cd7c2ec3f0557432606e9096241
|
a24f9a22c019ab31d56bd5a7ca5ba790d54ce5dc
|
refs/heads/master
| 2021-07-04T15:21:33.606174
| 2021-02-07T23:42:43
| 2021-02-07T23:42:43
| 67,831,927
| 5
| 1
|
Apache-2.0
| 2019-10-28T03:51:03
| 2016-09-09T20:51:29
|
Python
|
UTF-8
|
Python
| false
| false
| 1,022
|
py
|
# So we create a DP and start from the last low and start building up the the upper rows in the dp 2d array
# dp[i][j] represents the minimum path to reach that element with the given constraints.
class Solution:
def minFallingPathSum(self, A: List[List[int]]) -> int:
dp = [[0 for j in range(len(A[0]))] for i in range(len(A))]
# we fill the last row with original values as the min path from last row to last row is its own value
for i in range(len(A)-1, -1, -1):
for j in range(len(A[0])-1, -1, -1):
# Fill last row
if i == len(A)-1:
dp[i][j] = A[i][j]
# left corner
elif j == 0:
dp[i][j] = A[i][j] + min(dp[i+1][j], dp[i+1][j+1])
elif j == len(A[0])-1:
dp[i][j] = A[i][j] + min(dp[i+1][j-1], dp[i+1][j])
else:
dp[i][j] = A[i][j] + min(dp[i+1][j-1], dp[i+1][j], dp[i+1][j+1])
return min(dp[0])
|
[
"saisankargochhayat@gmail.com"
] |
saisankargochhayat@gmail.com
|
b96f2c76b38323327b3fd2cd6fe341d4e3148b74
|
ec4ce2cc5e08e032f2bdb7d8e6ba616e80e6f5f7
|
/chapter11_test_code/test_cities.py
|
f669296324136f72db551ae3d88cecb53a02dda6
|
[] |
no_license
|
AiZhanghan/python-crash-course-a-hands-on-project-based-introduction-to-programming
|
8fc54ef69636c88985df00b546bc49c4a2378e79
|
9d8c9fde7d6ab9fe664fa718e1516d7442eafd00
|
refs/heads/master
| 2020-09-28T18:28:56.558413
| 2019-12-12T11:05:43
| 2019-12-12T11:05:43
| 226,835,456
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 772
|
py
|
import unittest
from city_functions import get_formated_city_name
class CityTestCase(unittest.TestCase):
'''测试city_functions.py'''
def test_city_country(self):
'''能够正确地处理像Santiago, Chile这样的城市吗?'''
formetted_city_name = get_formated_city_name('santiago', 'chile')
self.assertEqual(formetted_city_name, 'Santiago, Chile')
def test_city_country_population(self):
'''
能够正确地处理像Santiago, Chile - population 5000000这样的城市吗?
'''
formatted_city_name = get_formated_city_name('santiago',
'chile', 5000000)
self.assertEqual(formatted_city_name,
'Santiago, Chile - population 5000000')
unittest.main()
|
[
"35103759+AiZhanghan@users.noreply.github.com"
] |
35103759+AiZhanghan@users.noreply.github.com
|
dcc2d399258f579438cf9daa73f67a2279579a6f
|
731a33f8bb92bad31ab233416d8ef6eb3a9f3fe0
|
/minlplib_instances/smallinvSNPr1b050-055.py
|
b776f5ed09f35dcb7ccc13d0d18939f16d47a7d3
|
[] |
no_license
|
ChristophNeumann/IPCP
|
d34c7ec3730a5d0dcf3ec14f023d4b90536c1e31
|
6e3d14cc9ed43f3c4f6c070ebbce21da5a059cb7
|
refs/heads/main
| 2023-02-22T09:54:39.412086
| 2021-01-27T17:30:50
| 2021-01-27T17:30:50
| 319,694,028
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 167,364
|
py
|
# MINLP written by GAMS Convert at 02/15/18 11:44:28
#
# Equation counts
# Total E G L N X C B
# 4 0 2 2 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 101 1 0 100 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 401 301 100 0
from pyomo.environ import *
model = m = ConcreteModel()
m.i1 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i2 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i3 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i4 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i5 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i6 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i7 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i8 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i9 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i10 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i11 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i12 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i13 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i14 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i15 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i16 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i17 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i18 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i19 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i20 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i21 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i22 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i23 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i24 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i25 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i26 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i27 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i28 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i29 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i30 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i31 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i32 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i33 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i34 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i35 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i36 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i37 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i38 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i39 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i40 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i41 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i42 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i43 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i44 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i45 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i46 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i47 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i48 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i49 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i50 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i51 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i52 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i53 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i54 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i55 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i56 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i57 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i58 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i59 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i60 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i61 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i62 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i63 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i64 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i65 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i66 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i67 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i68 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i69 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i70 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i71 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i72 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i73 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i74 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i75 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i76 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i77 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i78 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i79 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i80 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i81 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i82 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i83 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i84 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i85 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i86 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i87 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i88 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i89 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i90 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i91 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i92 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i93 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i94 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i95 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i96 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i97 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i98 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i99 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.i100 = Var(within=Integers,bounds=(0,1E20),initialize=0)
m.x101 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr=m.x101, sense=minimize)
m.c1 = Constraint(expr=0.00841507*m.i1**2 + 0.0222536*m.i2**2 + 0.0056479*m.i3**2 + 0.00333322*m.i4**2 + 0.00490963*m.i5
**2 + 0.0221034*m.i6**2 + 0.00509899*m.i7**2 + 0.049464*m.i8**2 + 0.0171508*m.i9**2 + 0.0064643*
m.i10**2 + 0.0218437*m.i11**2 + 0.00346366*m.i12**2 + 0.0458502*m.i13**2 + 0.0747061*m.i14**2 +
0.0196511*m.i15**2 + 0.014222*m.i16**2 + 0.0147535*m.i17**2 + 0.00398615*m.i18**2 + 0.00644484*
m.i19**2 + 0.0322232*m.i20**2 + 0.00887889*m.i21**2 + 0.0434025*m.i22**2 + 0.00981376*m.i23**2 +
0.0133193*m.i24**2 + 0.00471036*m.i25**2 + 0.00359843*m.i26**2 + 0.0112312*m.i27**2 + 0.00476479*
m.i28**2 + 0.00356255*m.i29**2 + 0.0730121*m.i30**2 + 0.00785721*m.i31**2 + 0.0243787*m.i32**2 +
0.0171188*m.i33**2 + 0.00439547*m.i34**2 + 0.00502594*m.i35**2 + 0.0580619*m.i36**2 + 0.0135984*
m.i37**2 + 0.00254137*m.i38**2 + 0.0153341*m.i39**2 + 0.109758*m.i40**2 + 0.0346065*m.i41**2 +
0.0127589*m.i42**2 + 0.011147*m.i43**2 + 0.0156318*m.i44**2 + 0.00556588*m.i45**2 + 0.00302864*
m.i46**2 + 0.0214898*m.i47**2 + 0.00499587*m.i48**2 + 0.00864393*m.i49**2 + 0.0228248*m.i50**2 +
0.0077726*m.i51**2 + 0.00992767*m.i52**2 + 0.0184506*m.i53**2 + 0.0113481*m.i54**2 + 0.0067583*
m.i55**2 + 0.0150416*m.i56**2 + 0.00324193*m.i57**2 + 0.00478196*m.i58**2 + 0.0132471*m.i59**2 +
0.00273446*m.i60**2 + 0.0282459*m.i61**2 + 0.0230221*m.i62**2 + 0.0240972*m.i63**2 + 0.00829946*
m.i64**2 + 0.00688665*m.i65**2 + 0.00858803*m.i66**2 + 0.00778038*m.i67**2 + 0.0082583*m.i68**2
+ 0.022885*m.i69**2 + 0.00568332*m.i70**2 + 0.0234021*m.i71**2 + 0.00924249*m.i72**2 +
0.00669675*m.i73**2 + 0.0109501*m.i74**2 + 0.00663385*m.i75**2 + 0.00328058*m.i76**2 + 0.0112814*
m.i77**2 + 0.00341076*m.i78**2 + 0.0400653*m.i79**2 + 0.00876827*m.i80**2 + 0.0138276*m.i81**2 +
0.00246987*m.i82**2 + 0.0406516*m.i83**2 + 0.00947194*m.i84**2 + 0.00647449*m.i85**2 + 0.0107715*
m.i86**2 + 0.00803069*m.i87**2 + 0.106502*m.i88**2 + 0.00815263*m.i89**2 + 0.0171707*m.i90**2 +
0.0163522*m.i91**2 + 0.00911726*m.i92**2 + 0.00287317*m.i93**2 + 0.00360309*m.i94**2 + 0.00699161
*m.i95**2 + 0.0340959*m.i96**2 + 0.00958446*m.i97**2 + 0.0147951*m.i98**2 + 0.0177595*m.i99**2 +
0.0208523*m.i100**2 + 0.00692522*m.i1*m.i2 + 0.00066464*m.i1*m.i3 + 0.00388744*m.i1*m.i4 +
0.001108218*m.i1*m.i5 + 0.0046712*m.i1*m.i6 + 0.00771824*m.i1*m.i7 + 0.0020653*m.i1*m.i8 +
0.001524626*m.i1*m.i9 + 0.00484724*m.i1*m.i10 + 0.00733242*m.i1*m.i11 + 0.00556218*m.i1*m.i12 +
0.0052571*m.i1*m.i13 + 0.0218926*m.i1*m.i14 + 0.01352862*m.i1*m.i15 + 0.00549784*m.i1*m.i16 +
0.00235342*m.i1*m.i17 + 0.00448206*m.i1*m.i18 + 0.0072148*m.i1*m.i19 + 0.00958894*m.i1*m.i20 +
0.00376328*m.i1*m.i21 + 0.0117501*m.i1*m.i22 + 0.00575998*m.i1*m.i23 - 0.000109147*m.i1*m.i24 +
0.000604944*m.i1*m.i25 + 0.00473296*m.i1*m.i26 + 0.000356572*m.i1*m.i27 - 0.001552262*m.i1*m.i28
+ 0.00119092*m.i1*m.i29 + 0.01373684*m.i1*m.i30 + 0.0059113*m.i1*m.i31 + 0.00623524*m.i1*m.i32
+ 0.00801204*m.i1*m.i33 + 0.00108736*m.i1*m.i34 + 0.001491474*m.i1*m.i35 + 0.01080356*m.i1*m.i36
+ 0.00559202*m.i1*m.i37 + 7.8057e-6*m.i1*m.i38 + 0.00831004*m.i1*m.i39 + 0.001096208*m.i1*m.i40
+ 0.001136658*m.i1*m.i41 + 0.0073715*m.i1*m.i42 + 0.000726938*m.i1*m.i43 + 0.00621872*m.i1*m.i44
+ 0.00646596*m.i1*m.i45 + 0.00441466*m.i1*m.i46 + 0.001262528*m.i1*m.i47 + 0.00567366*m.i1*m.i48
+ 0.00690472*m.i1*m.i49 + 0.01140754*m.i1*m.i50 + 0.00275514*m.i1*m.i51 + 0.00633434*m.i1*m.i52
+ 0.00842252*m.i1*m.i53 + 0.00674544*m.i1*m.i54 + 0.00577156*m.i1*m.i55 + 0.000723972*m.i1*m.i56
+ 0.00617654*m.i1*m.i57 + 0.00426758*m.i1*m.i58 + 0.00581362*m.i1*m.i59 + 0.00305964*m.i1*m.i60
+ 0.00915838*m.i1*m.i61 + 0.00408204*m.i1*m.i62 + 0.00526036*m.i1*m.i63 + 0.00641708*m.i1*m.i64
+ 0.001311362*m.i1*m.i65 + 0.00589896*m.i1*m.i66 + 0.001450664*m.i1*m.i67 + 0.0054669*m.i1*m.i68
+ 0.00759698*m.i1*m.i69 + 0.0069591*m.i1*m.i70 + 0.0023689*m.i1*m.i71 + 0.0026146*m.i1*m.i72 +
0.00520422*m.i1*m.i73 + 0.00959956*m.i1*m.i74 + 0.00799166*m.i1*m.i75 + 0.00256248*m.i1*m.i76 +
0.01210352*m.i1*m.i77 + 0.00469514*m.i1*m.i78 + 0.00329676*m.i1*m.i79 + 0.0068214*m.i1*m.i80 +
0.00190637*m.i1*m.i81 + 0.00256972*m.i1*m.i82 - 0.00577696*m.i1*m.i83 + 0.00245394*m.i1*m.i84 +
0.00585966*m.i1*m.i85 + 0.00330078*m.i1*m.i86 + 0.00362852*m.i1*m.i87 + 0.0064137*m.i1*m.i88 +
0.00375038*m.i1*m.i89 + 0.00666048*m.i1*m.i90 + 0.00942176*m.i1*m.i91 + 0.00379828*m.i1*m.i92 +
0.00246526*m.i1*m.i93 + 0.0029997*m.i1*m.i94 + 0.00592606*m.i1*m.i95 + 0.0136565*m.i1*m.i96 +
0.00562112*m.i1*m.i97 + 0.0031101*m.i1*m.i98 + 0.00328418*m.i1*m.i99 + 0.00992138*m.i1*m.i100 +
0.01159836*m.i2*m.i3 + 0.00432612*m.i2*m.i4 + 0.01055774*m.i2*m.i5 + 0.0235592*m.i2*m.i6 +
0.0053913*m.i2*m.i7 + 0.01748966*m.i2*m.i8 + 0.01322526*m.i2*m.i9 + 0.01103896*m.i2*m.i10 +
0.001420928*m.i2*m.i11 + 0.00303766*m.i2*m.i12 + 0.0325414*m.i2*m.i13 + 0.0528886*m.i2*m.i14 +
0.0344486*m.i2*m.i15 + 0.01889664*m.i2*m.i16 + 0.01085498*m.i2*m.i17 + 0.01133696*m.i2*m.i18 +
0.0105108*m.i2*m.i19 + 0.041965*m.i2*m.i20 + 0.01908526*m.i2*m.i21 + 0.0438608*m.i2*m.i22 +
0.01760436*m.i2*m.i23 + 0.0177692*m.i2*m.i24 + 0.01401386*m.i2*m.i25 + 0.01130076*m.i2*m.i26 +
0.0201926*m.i2*m.i27 + 0.00893526*m.i2*m.i28 + 0.01013464*m.i2*m.i29 + 0.0522552*m.i2*m.i30 +
0.00674062*m.i2*m.i31 + 0.0386894*m.i2*m.i32 + 0.01840562*m.i2*m.i33 + 0.0079061*m.i2*m.i34 +
0.01050574*m.i2*m.i35 + 0.038882*m.i2*m.i36 + 0.0209782*m.i2*m.i37 + 0.00569346*m.i2*m.i38 +
0.0259324*m.i2*m.i39 + 0.0472088*m.i2*m.i40 + 0.0282636*m.i2*m.i41 + 0.0225892*m.i2*m.i42 +
0.01104052*m.i2*m.i43 + 0.0218496*m.i2*m.i44 + 0.00682534*m.i2*m.i45 + 0.01022898*m.i2*m.i46 +
0.0273094*m.i2*m.i47 + 0.01045064*m.i2*m.i48 + 0.01767338*m.i2*m.i49 + 0.0311902*m.i2*m.i50 +
0.0126455*m.i2*m.i51 + 0.0206168*m.i2*m.i52 + 0.0261894*m.i2*m.i53 + 0.024527*m.i2*m.i54 +
0.01734138*m.i2*m.i55 + 0.01224052*m.i2*m.i56 + 0.01152072*m.i2*m.i57 + 0.01028864*m.i2*m.i58 +
0.01883544*m.i2*m.i59 + 0.00908648*m.i2*m.i60 + 0.0449708*m.i2*m.i61 + 0.0363664*m.i2*m.i62 +
0.01577062*m.i2*m.i63 + 0.01266282*m.i2*m.i64 + 0.01385216*m.i2*m.i65 + 0.00440902*m.i2*m.i66 +
0.01711764*m.i2*m.i67 + 0.0110787*m.i2*m.i68 + 0.0341778*m.i2*m.i69 + 0.0156542*m.i2*m.i70 +
0.01891112*m.i2*m.i71 + 0.0216326*m.i2*m.i72 + 0.01534328*m.i2*m.i73 + 0.01661334*m.i2*m.i74 +
0.01534594*m.i2*m.i75 + 0.01116732*m.i2*m.i76 + 0.01402982*m.i2*m.i77 + 0.00963242*m.i2*m.i78 +
0.0200668*m.i2*m.i79 + 0.01379116*m.i2*m.i80 + 0.01910046*m.i2*m.i81 + 0.0077605*m.i2*m.i82 -
0.000954558*m.i2*m.i83 + 0.01255918*m.i2*m.i84 + 0.0126639*m.i2*m.i85 + 0.0201936*m.i2*m.i86 +
0.017931*m.i2*m.i87 + 0.0389418*m.i2*m.i88 + 0.00845916*m.i2*m.i89 + 0.0267914*m.i2*m.i90 +
0.0193905*m.i2*m.i91 + 0.01261014*m.i2*m.i92 + 0.0069012*m.i2*m.i93 + 0.00876014*m.i2*m.i94 +
0.01829908*m.i2*m.i95 + 0.0373396*m.i2*m.i96 + 0.0211262*m.i2*m.i97 + 0.01549032*m.i2*m.i98 +
0.0247114*m.i2*m.i99 + 0.0324248*m.i2*m.i100 - 0.000720538*m.i3*m.i4 + 0.00453322*m.i3*m.i5 +
0.00638226*m.i3*m.i6 + 0.000938158*m.i3*m.i7 + 0.0035154*m.i3*m.i8 + 0.00681962*m.i3*m.i9 +
0.006345*m.i3*m.i10 + 0.00232904*m.i3*m.i11 - 0.00054599*m.i3*m.i12 + 0.01850556*m.i3*m.i13 +
0.01892336*m.i3*m.i14 + 0.00820906*m.i3*m.i15 + 0.00848796*m.i3*m.i16 + 0.0100743*m.i3*m.i17 +
0.00327798*m.i3*m.i18 + 0.000498452*m.i3*m.i19 + 0.01775572*m.i3*m.i20 + 0.00919688*m.i3*m.i21 +
0.01282772*m.i3*m.i22 + 0.00853066*m.i3*m.i23 + 0.00506148*m.i3*m.i24 + 0.004557*m.i3*m.i25 +
0.001737768*m.i3*m.i26 + 0.00560326*m.i3*m.i27 + 0.00374962*m.i3*m.i28 + 0.000427408*m.i3*m.i29
+ 0.01831098*m.i3*m.i30 + 0.00791496*m.i3*m.i31 + 0.01306*m.i3*m.i32 + 0.0143109*m.i3*m.i33 +
0.00324578*m.i3*m.i34 + 0.00289704*m.i3*m.i35 + 0.01899172*m.i3*m.i36 + 0.00855898*m.i3*m.i37 +
0.000764782*m.i3*m.i38 + 0.01045622*m.i3*m.i39 + 0.0241684*m.i3*m.i40 + 0.01022702*m.i3*m.i41 +
0.0096569*m.i3*m.i42 + 0.00605256*m.i3*m.i43 + 0.0087656*m.i3*m.i44 + 0.00231868*m.i3*m.i45 +
0.003075*m.i3*m.i46 + 0.00904418*m.i3*m.i47 + 0.00346386*m.i3*m.i48 + 0.00970054*m.i3*m.i49 +
0.0107517*m.i3*m.i50 + 0.00833706*m.i3*m.i51 + 0.00601022*m.i3*m.i52 + 0.00885472*m.i3*m.i53 +
0.0087269*m.i3*m.i54 + 0.00799796*m.i3*m.i55 + 0.0077742*m.i3*m.i56 + 0.00233028*m.i3*m.i57 +
0.00392772*m.i3*m.i58 + 0.00960436*m.i3*m.i59 + 0.000506858*m.i3*m.i60 + 0.01485036*m.i3*m.i61 +
0.01172454*m.i3*m.i62 + 0.00763564*m.i3*m.i63 + 0.00510368*m.i3*m.i64 + 0.00739458*m.i3*m.i65 +
0.00321864*m.i3*m.i66 + 0.00506992*m.i3*m.i67 + 0.001582392*m.i3*m.i68 + 0.0133327*m.i3*m.i69 +
0.00346984*m.i3*m.i70 + 0.00591914*m.i3*m.i71 + 0.0050918*m.i3*m.i72 + 0.00762942*m.i3*m.i73 +
0.0072567*m.i3*m.i74 + 0.0028432*m.i3*m.i75 + 0.00258746*m.i3*m.i76 + 0.00665946*m.i3*m.i77 +
0.001559716*m.i3*m.i78 + 0.0114221*m.i3*m.i79 + 0.00359546*m.i3*m.i80 + 0.00675946*m.i3*m.i81 +
0.001328782*m.i3*m.i82 + 0.00450512*m.i3*m.i83 + 0.00859628*m.i3*m.i84 + 0.00541618*m.i3*m.i85 +
0.01126372*m.i3*m.i86 + 0.00604642*m.i3*m.i87 + 0.01802074*m.i3*m.i88 + 0.0056414*m.i3*m.i89 +
0.00952436*m.i3*m.i90 + 0.00568388*m.i3*m.i91 + 0.0086732*m.i3*m.i92 + 0.001482822*m.i3*m.i93 +
0.0026677*m.i3*m.i94 + 0.00675394*m.i3*m.i95 + 0.01169216*m.i3*m.i96 + 0.0076724*m.i3*m.i97 +
0.00761804*m.i3*m.i98 + 0.01192344*m.i3*m.i99 + 0.01326866*m.i3*m.i100 + 0.00169903*m.i4*m.i5 +
0.00300136*m.i4*m.i6 + 0.00385392*m.i4*m.i7 + 0.00382362*m.i4*m.i8 + 0.00575034*m.i4*m.i9 +
0.00125203*m.i4*m.i10 + 0.000828078*m.i4*m.i11 + 0.00404896*m.i4*m.i12 - 0.001180878*m.i4*m.i13
+ 0.00956206*m.i4*m.i14 + 0.00571904*m.i4*m.i15 + 0.0047927*m.i4*m.i16 + 0.001736122*m.i4*m.i17
+ 0.001900434*m.i4*m.i18 + 0.00498296*m.i4*m.i19 + 0.0055112*m.i4*m.i20 + 0.00199047*m.i4*m.i21
+ 0.00302926*m.i4*m.i22 + 0.001107052*m.i4*m.i23 + 0.0032099*m.i4*m.i24 + 0.00202704*m.i4*m.i25
+ 0.0049441*m.i4*m.i26 + 0.00296714*m.i4*m.i27 + 0.001430786*m.i4*m.i28 + 0.00335542*m.i4*m.i29
+ 0.0072271*m.i4*m.i30 + 0.001983328*m.i4*m.i31 + 0.00263338*m.i4*m.i32 + 0.0034098*m.i4*m.i33
+ 0.001978102*m.i4*m.i34 + 0.00248436*m.i4*m.i35 + 0.001037234*m.i4*m.i36 + 0.001931824*m.i4*
m.i37 + 0.00154955*m.i4*m.i38 + 0.00293776*m.i4*m.i39 - 0.01282698*m.i4*m.i40 + 0.001937926*m.i4*
m.i41 + 0.0052959*m.i4*m.i42 + 0.001856036*m.i4*m.i43 + 0.000740384*m.i4*m.i44 + 0.00372246*m.i4*
m.i45 + 0.00362974*m.i4*m.i46 + 0.001687258*m.i4*m.i47 + 0.00297792*m.i4*m.i48 + 0.0024381*m.i4*
m.i49 + 0.00581304*m.i4*m.i50 + 0.000775592*m.i4*m.i51 + 0.00512872*m.i4*m.i52 + 0.00302932*m.i4*
m.i53 + 0.00451004*m.i4*m.i54 + 0.00355054*m.i4*m.i55 + 0.000365898*m.i4*m.i56 + 0.00396452*m.i4*
m.i57 + 0.00218522*m.i4*m.i58 + 0.001602712*m.i4*m.i59 + 0.00378946*m.i4*m.i60 + 0.00528342*m.i4*
m.i61 + 0.00345546*m.i4*m.i62 + 0.0072364*m.i4*m.i63 + 0.00460504*m.i4*m.i64 + 0.00362066*m.i4*
m.i65 + 0.00176825*m.i4*m.i66 + 0.00326082*m.i4*m.i67 + 0.00494324*m.i4*m.i68 + 0.00478058*m.i4*
m.i69 + 0.0047424*m.i4*m.i70 + 0.00406804*m.i4*m.i71 + 0.00356438*m.i4*m.i72 + 0.0039191*m.i4*
m.i73 + 0.00506266*m.i4*m.i74 + 0.005213*m.i4*m.i75 + 0.00334114*m.i4*m.i76 + 0.00410168*m.i4*
m.i77 + 0.00325268*m.i4*m.i78 + 0.000621396*m.i4*m.i79 + 0.00679868*m.i4*m.i80 + 0.001665408*m.i4
*m.i81 + 0.00231708*m.i4*m.i82 - 0.0025243*m.i4*m.i83 + 0.00277762*m.i4*m.i84 + 0.0040202*m.i4*
m.i85 + 0.001500566*m.i4*m.i86 + 0.001680814*m.i4*m.i87 + 0.00640404*m.i4*m.i88 + 0.00397656*m.i4
*m.i89 + 0.000508164*m.i4*m.i90 + 0.00565534*m.i4*m.i91 + 0.0031999*m.i4*m.i92 + 0.0007233*m.i4*
m.i93 + 0.001347788*m.i4*m.i94 + 0.00386662*m.i4*m.i95 + 0.0056032*m.i4*m.i96 + 0.00392786*m.i4*
m.i97 + 0.0032706*m.i4*m.i98 + 0.000716722*m.i4*m.i99 + 0.00200998*m.i4*m.i100 + 0.00725878*m.i5*
m.i6 + 0.000634496*m.i5*m.i7 + 0.0112129*m.i5*m.i8 + 0.006535*m.i5*m.i9 + 0.0076756*m.i5*m.i10 -
0.00455426*m.i5*m.i11 + 0.001111236*m.i5*m.i12 + 0.01473142*m.i5*m.i13 + 0.01556352*m.i5*m.i14 +
0.00889148*m.i5*m.i15 + 0.00833956*m.i5*m.i16 + 0.01155304*m.i5*m.i17 + 0.0044319*m.i5*m.i18 +
0.0061696*m.i5*m.i19 + 0.01660846*m.i5*m.i20 + 0.00921042*m.i5*m.i21 + 0.01240074*m.i5*m.i22 +
0.00930536*m.i5*m.i23 + 0.00636938*m.i5*m.i24 + 0.00582298*m.i5*m.i25 + 0.00314834*m.i5*m.i26 +
0.00569034*m.i5*m.i27 + 0.00513186*m.i5*m.i28 + 0.00443806*m.i5*m.i29 + 0.01398194*m.i5*m.i30 +
0.00649478*m.i5*m.i31 + 0.01579432*m.i5*m.i32 + 0.00734872*m.i5*m.i33 + 0.0056108*m.i5*m.i34 +
0.00623672*m.i5*m.i35 + 0.01544598*m.i5*m.i36 + 0.01144796*m.i5*m.i37 + 0.0024117*m.i5*m.i38 +
0.00970728*m.i5*m.i39 + 0.0182302*m.i5*m.i40 + 0.00790876*m.i5*m.i41 + 0.00731488*m.i5*m.i42 +
0.00543454*m.i5*m.i43 + 0.00647722*m.i5*m.i44 + 0.0035064*m.i5*m.i45 + 0.00307696*m.i5*m.i46 +
0.00716814*m.i5*m.i47 + 0.001828662*m.i5*m.i48 + 0.00846664*m.i5*m.i49 + 0.01292148*m.i5*m.i50 +
0.0081737*m.i5*m.i51 + 0.00647086*m.i5*m.i52 + 0.00609644*m.i5*m.i53 + 0.00842446*m.i5*m.i54 +
0.00619594*m.i5*m.i55 + 0.01114364*m.i5*m.i56 + 0.00464056*m.i5*m.i57 + 0.00294786*m.i5*m.i58 +
0.01085566*m.i5*m.i59 + 0.00324938*m.i5*m.i60 + 0.01321296*m.i5*m.i61 + 0.00956118*m.i5*m.i62 +
0.00799502*m.i5*m.i63 + 0.00255928*m.i5*m.i64 + 0.00635808*m.i5*m.i65 + 0.00425494*m.i5*m.i66 +
0.00743456*m.i5*m.i67 + 0.003997*m.i5*m.i68 + 0.01327542*m.i5*m.i69 + 0.00624764*m.i5*m.i70 +
0.00544782*m.i5*m.i71 + 0.00583882*m.i5*m.i72 + 0.00712322*m.i5*m.i73 + 0.00675538*m.i5*m.i74 +
0.00471928*m.i5*m.i75 + 0.00331686*m.i5*m.i76 + 0.0064726*m.i5*m.i77 + 0.0043073*m.i5*m.i78 +
0.01376458*m.i5*m.i79 + 0.00590054*m.i5*m.i80 + 0.00544478*m.i5*m.i81 + 0.00433406*m.i5*m.i82 +
0.0018936*m.i5*m.i83 + 0.00732892*m.i5*m.i84 + 0.00654804*m.i5*m.i85 + 0.00769986*m.i5*m.i86 +
0.00924248*m.i5*m.i87 + 0.01858866*m.i5*m.i88 + 0.00588762*m.i5*m.i89 + 0.00671372*m.i5*m.i90 +
0.00513832*m.i5*m.i91 + 0.00597632*m.i5*m.i92 + 0.0033572*m.i5*m.i93 + 0.00718978*m.i5*m.i94 +
0.00692006*m.i5*m.i95 + 0.0082357*m.i5*m.i96 + 0.00798976*m.i5*m.i97 + 0.00578018*m.i5*m.i98 +
0.00997244*m.i5*m.i99 + 0.00861536*m.i5*m.i100 + 0.00682146*m.i6*m.i7 + 0.00318158*m.i6*m.i8 +
0.01402384*m.i6*m.i9 + 0.01146794*m.i6*m.i10 + 0.00514562*m.i6*m.i11 + 0.001749894*m.i6*m.i12 +
0.0349226*m.i6*m.i13 + 0.0204032*m.i6*m.i14 + 0.0257432*m.i6*m.i15 + 0.01758104*m.i6*m.i16 +
0.01908054*m.i6*m.i17 + 0.00928378*m.i6*m.i18 + 0.00320468*m.i6*m.i19 + 0.0315536*m.i6*m.i20 +
0.01792788*m.i6*m.i21 + 0.0231518*m.i6*m.i22 + 0.01485588*m.i6*m.i23 + 0.01959078*m.i6*m.i24 +
0.01015748*m.i6*m.i25 + 0.00771848*m.i6*m.i26 + 0.0203708*m.i6*m.i27 + 0.00861336*m.i6*m.i28 +
0.00733064*m.i6*m.i29 + 0.0211284*m.i6*m.i30 + 0.01136376*m.i6*m.i31 + 0.0298052*m.i6*m.i32 +
0.01763386*m.i6*m.i33 + 0.01196962*m.i6*m.i34 + 0.00970124*m.i6*m.i35 + 0.0426536*m.i6*m.i36 +
0.0162704*m.i6*m.i37 + 0.00511032*m.i6*m.i38 + 0.0211034*m.i6*m.i39 + 0.0536216*m.i6*m.i40 +
0.0314338*m.i6*m.i41 + 0.0212846*m.i6*m.i42 + 0.01544516*m.i6*m.i43 + 0.0203852*m.i6*m.i44 +
0.00711214*m.i6*m.i45 + 0.01012528*m.i6*m.i46 + 0.0378006*m.i6*m.i47 + 0.00769828*m.i6*m.i48 +
0.01043538*m.i6*m.i49 + 0.0235092*m.i6*m.i50 + 0.00574084*m.i6*m.i51 + 0.01540822*m.i6*m.i52 +
0.01066192*m.i6*m.i53 + 0.01947344*m.i6*m.i54 + 0.01212224*m.i6*m.i55 + 0.01841288*m.i6*m.i56 +
0.00863178*m.i6*m.i57 + 0.0123986*m.i6*m.i58 + 0.01033934*m.i6*m.i59 + 0.00473636*m.i6*m.i60 +
0.0271978*m.i6*m.i61 + 0.0244978*m.i6*m.i62 + 0.0206042*m.i6*m.i63 + 0.0123061*m.i6*m.i64 +
0.00969592*m.i6*m.i65 + 0.0105285*m.i6*m.i66 + 0.01296694*m.i6*m.i67 + 0.00467684*m.i6*m.i68 +
0.0206522*m.i6*m.i69 + 0.01181216*m.i6*m.i70 + 0.034569*m.i6*m.i71 + 0.01713412*m.i6*m.i72 +
0.00997084*m.i6*m.i73 + 0.00934556*m.i6*m.i74 + 0.00446476*m.i6*m.i75 + 0.00591468*m.i6*m.i76 +
0.00902732*m.i6*m.i77 + 0.00684842*m.i6*m.i78 + 0.000346556*m.i6*m.i79 + 0.01344964*m.i6*m.i80 +
0.028585*m.i6*m.i81 + 0.00365848*m.i6*m.i82 + 0.0233826*m.i6*m.i83 + 0.01097966*m.i6*m.i84 +
0.01159854*m.i6*m.i85 + 0.0132315*m.i6*m.i86 + 0.00973116*m.i6*m.i87 + 0.01749474*m.i6*m.i88 +
0.00153948*m.i6*m.i89 + 0.01386412*m.i6*m.i90 + 0.01199914*m.i6*m.i91 + 0.0141917*m.i6*m.i92 +
0.001321806*m.i6*m.i93 + 0.00438272*m.i6*m.i94 + 0.01131596*m.i6*m.i95 + 0.01535776*m.i6*m.i96 +
0.01709068*m.i6*m.i97 + 0.024088*m.i6*m.i98 + 0.0176488*m.i6*m.i99 + 0.0244376*m.i6*m.i100 +
0.00488516*m.i7*m.i8 + 0.00626372*m.i7*m.i9 + 0.001990118*m.i7*m.i10 + 0.00360408*m.i7*m.i11 +
0.0044488*m.i7*m.i12 + 0.00345036*m.i7*m.i13 + 0.01022598*m.i7*m.i14 + 0.00914736*m.i7*m.i15 +
0.00744612*m.i7*m.i16 + 0.0041386*m.i7*m.i17 + 0.00439536*m.i7*m.i18 + 0.00478826*m.i7*m.i19 +
0.00946126*m.i7*m.i20 + 0.00383118*m.i7*m.i21 + 0.00577738*m.i7*m.i22 + 0.0023517*m.i7*m.i23 +
0.0050588*m.i7*m.i24 + 0.0021953*m.i7*m.i25 + 0.00304582*m.i7*m.i26 + 0.0025687*m.i7*m.i27 +
0.001019412*m.i7*m.i28 + 0.001803492*m.i7*m.i29 + 0.00840076*m.i7*m.i30 + 0.00405006*m.i7*m.i31
+ 0.00330894*m.i7*m.i32 + 0.00379124*m.i7*m.i33 + 0.00297878*m.i7*m.i34 + 0.00257924*m.i7*m.i35
+ 0.00710268*m.i7*m.i36 + 0.00290856*m.i7*m.i37 + 0.00084645*m.i7*m.i38 + 0.00616224*m.i7*m.i39
+ 0.00012188*m.i7*m.i40 + 0.00931498*m.i7*m.i41 + 0.00783*m.i7*m.i42 + 0.00769852*m.i7*m.i43 +
0.00783756*m.i7*m.i44 + 0.0049081*m.i7*m.i45 + 0.00379762*m.i7*m.i46 + 0.00691856*m.i7*m.i47 +
0.00516014*m.i7*m.i48 + 0.00525658*m.i7*m.i49 + 0.00529626*m.i7*m.i50 + 0.00103022*m.i7*m.i51 +
0.00545452*m.i7*m.i52 + 0.00609146*m.i7*m.i53 + 0.0066465*m.i7*m.i54 + 0.0057959*m.i7*m.i55 +
0.00384568*m.i7*m.i56 + 0.00518642*m.i7*m.i57 + 0.0049888*m.i7*m.i58 + 0.00240984*m.i7*m.i59 +
0.001870666*m.i7*m.i60 + 0.00856542*m.i7*m.i61 + 0.00433228*m.i7*m.i62 + 0.00926318*m.i7*m.i63 +
0.00802564*m.i7*m.i64 + 0.002679*m.i7*m.i65 + 0.00656044*m.i7*m.i66 + 0.00189873*m.i7*m.i67 +
0.00559974*m.i7*m.i68 + 0.0059088*m.i7*m.i69 + 0.00502274*m.i7*m.i70 + 0.00714092*m.i7*m.i71 +
0.00451814*m.i7*m.i72 + 0.0055096*m.i7*m.i73 + 0.0054579*m.i7*m.i74 + 0.00428152*m.i7*m.i75 +
0.00201372*m.i7*m.i76 + 0.00763776*m.i7*m.i77 + 0.001767634*m.i7*m.i78 - 0.00404984*m.i7*m.i79 +
0.00693072*m.i7*m.i80 + 0.00453578*m.i7*m.i81 + 0.001431356*m.i7*m.i82 + 0.001000832*m.i7*m.i83
+ 0.00363592*m.i7*m.i84 + 0.00399748*m.i7*m.i85 + 0.00244412*m.i7*m.i86 - 0.00038172*m.i7*m.i87
+ 0.00670104*m.i7*m.i88 + 0.00351634*m.i7*m.i89 + 0.000192176*m.i7*m.i90 + 0.00766242*m.i7*m.i91
+ 0.00431432*m.i7*m.i92 + 0.00099522*m.i7*m.i93 + 0.00215394*m.i7*m.i94 + 0.00467712*m.i7*m.i95
+ 0.00551306*m.i7*m.i96 + 0.00524514*m.i7*m.i97 + 0.00715168*m.i7*m.i98 + 0.00269474*m.i7*m.i99
+ 0.006577*m.i7*m.i100 + 0.01497394*m.i8*m.i9 + 0.0108969*m.i8*m.i10 + 0.00659842*m.i8*m.i11 +
0.00635336*m.i8*m.i12 + 0.0313098*m.i8*m.i13 + 0.0387588*m.i8*m.i14 + 0.01963812*m.i8*m.i15 +
0.00587206*m.i8*m.i16 + 0.0158028*m.i8*m.i17 + 0.00433344*m.i8*m.i18 + 0.01027216*m.i8*m.i19 +
0.0310764*m.i8*m.i20 + 0.01480666*m.i8*m.i21 + 0.0292324*m.i8*m.i22 + 0.01097454*m.i8*m.i23 +
0.01637932*m.i8*m.i24 + 0.0081932*m.i8*m.i25 + 0.00625414*m.i8*m.i26 + 0.01206926*m.i8*m.i27 +
0.00960586*m.i8*m.i28 + 0.00767454*m.i8*m.i29 + 0.0389634*m.i8*m.i30 + 0.01047056*m.i8*m.i31 +
0.0243166*m.i8*m.i32 + 0.01490526*m.i8*m.i33 + 0.0048023*m.i8*m.i34 + 0.00582726*m.i8*m.i35 +
0.0310084*m.i8*m.i36 + 0.01520046*m.i8*m.i37 + 0.00435652*m.i8*m.i38 + 0.01820518*m.i8*m.i39 +
0.028962*m.i8*m.i40 + 0.0236162*m.i8*m.i41 + 0.0089807*m.i8*m.i42 + 0.01679084*m.i8*m.i43 +
0.01575264*m.i8*m.i44 - 0.00596962*m.i8*m.i45 + 0.0045504*m.i8*m.i46 + 0.0135935*m.i8*m.i47 +
0.00528224*m.i8*m.i48 + 0.01215584*m.i8*m.i49 + 0.01116408*m.i8*m.i50 + 0.00976906*m.i8*m.i51 +
0.01011206*m.i8*m.i52 + 0.0224104*m.i8*m.i53 + 0.01007602*m.i8*m.i54 + 0.01583128*m.i8*m.i55 +
0.00761084*m.i8*m.i56 + 0.00804396*m.i8*m.i57 + 0.01038608*m.i8*m.i58 + 0.01602498*m.i8*m.i59 +
0.00380248*m.i8*m.i60 + 0.0227414*m.i8*m.i61 + 0.0208778*m.i8*m.i62 + 0.01278874*m.i8*m.i63 +
0.00882622*m.i8*m.i64 + 0.01253422*m.i8*m.i65 + 0.00938202*m.i8*m.i66 + 0.0132364*m.i8*m.i67 +
0.00341364*m.i8*m.i68 + 0.0217686*m.i8*m.i69 + 0.01082106*m.i8*m.i70 + 0.0109575*m.i8*m.i71 +
0.01032418*m.i8*m.i72 + 0.01203924*m.i8*m.i73 + 0.01820078*m.i8*m.i74 + 0.00454846*m.i8*m.i75 +
0.00699592*m.i8*m.i76 + 0.017175*m.i8*m.i77 + 0.00418326*m.i8*m.i78 + 0.003044*m.i8*m.i79 +
0.00913958*m.i8*m.i80 + 0.01058642*m.i8*m.i81 + 0.00609436*m.i8*m.i82 + 0.00939194*m.i8*m.i83 +
0.01860882*m.i8*m.i84 + 0.00544766*m.i8*m.i85 + 0.00672898*m.i8*m.i86 + 0.00847128*m.i8*m.i87 +
0.0399532*m.i8*m.i88 + 0.00230258*m.i8*m.i89 + 0.00647968*m.i8*m.i90 + 0.00663734*m.i8*m.i91 +
0.00723392*m.i8*m.i92 + 0.0028363*m.i8*m.i93 + 0.01094692*m.i8*m.i94 + 0.01122622*m.i8*m.i95 +
0.01922686*m.i8*m.i96 + 0.0178042*m.i8*m.i97 + 0.00987488*m.i8*m.i98 + 0.0201768*m.i8*m.i99 +
0.00916962*m.i8*m.i100 + 0.00380196*m.i9*m.i10 + 0.000241806*m.i9*m.i11 + 0.00422182*m.i9*m.i12
+ 0.01745366*m.i9*m.i13 + 0.01560378*m.i9*m.i14 + 0.01797116*m.i9*m.i15 + 0.0104377*m.i9*m.i16
+ 0.01789532*m.i9*m.i17 + 0.0058031*m.i9*m.i18 + 0.00524852*m.i9*m.i19 + 0.0217664*m.i9*m.i20 +
0.0137801*m.i9*m.i21 + 0.00556924*m.i9*m.i22 + 0.00707894*m.i9*m.i23 + 0.00383446*m.i9*m.i24 +
0.00797136*m.i9*m.i25 + 0.00671112*m.i9*m.i26 + 0.00962638*m.i9*m.i27 + 0.00548282*m.i9*m.i28 +
0.00537842*m.i9*m.i29 + 0.01125578*m.i9*m.i30 + 0.01033708*m.i9*m.i31 + 0.01741482*m.i9*m.i32 +
0.01282666*m.i9*m.i33 + 0.00490948*m.i9*m.i34 + 0.00344028*m.i9*m.i35 + 0.01643714*m.i9*m.i36 +
0.00871578*m.i9*m.i37 + 0.002884*m.i9*m.i38 + 0.01596496*m.i9*m.i39 + 0.0171071*m.i9*m.i40 +
0.0282184*m.i9*m.i41 + 0.0157083*m.i9*m.i42 + 0.01908622*m.i9*m.i43 + 0.01887462*m.i9*m.i44 +
0.00621506*m.i9*m.i45 + 0.00706654*m.i9*m.i46 + 0.01685764*m.i9*m.i47 + 0.0046064*m.i9*m.i48 +
0.01393082*m.i9*m.i49 + 0.01366172*m.i9*m.i50 + 0.00974224*m.i9*m.i51 + 0.01117786*m.i9*m.i52 +
0.0105042*m.i9*m.i53 + 0.01603942*m.i9*m.i54 + 0.01154502*m.i9*m.i55 + 0.0187017*m.i9*m.i56 +
0.0063051*m.i9*m.i57 + 0.01180982*m.i9*m.i58 + 0.01148738*m.i9*m.i59 + 0.0045111*m.i9*m.i60 +
0.01782442*m.i9*m.i61 + 0.01261594*m.i9*m.i62 + 0.0275116*m.i9*m.i63 + 0.01370986*m.i9*m.i64 +
0.01301448*m.i9*m.i65 + 0.00909146*m.i9*m.i66 + 0.00880956*m.i9*m.i67 + 0.00542126*m.i9*m.i68 +
0.0173699*m.i9*m.i69 + 0.0063573*m.i9*m.i70 + 0.01464082*m.i9*m.i71 + 0.01030184*m.i9*m.i72 +
0.01342364*m.i9*m.i73 + 0.01050302*m.i9*m.i74 + 0.00580926*m.i9*m.i75 + 0.00669824*m.i9*m.i76 +
0.0154461*m.i9*m.i77 + 0.00331996*m.i9*m.i78 - 0.00117976*m.i9*m.i79 + 0.0134427*m.i9*m.i80 +
0.01200946*m.i9*m.i81 + 0.00261992*m.i9*m.i82 + 0.01802554*m.i9*m.i83 + 0.01281546*m.i9*m.i84 +
0.00817562*m.i9*m.i85 + 0.01353278*m.i9*m.i86 + 0.0065419*m.i9*m.i87 + 0.0287756*m.i9*m.i88 +
0.00438656*m.i9*m.i89 + 0.006514*m.i9*m.i90 + 0.00948704*m.i9*m.i91 + 0.01460712*m.i9*m.i92 +
0.00442406*m.i9*m.i93 + 0.00525338*m.i9*m.i94 + 0.01080594*m.i9*m.i95 + 0.007284*m.i9*m.i96 +
0.01145784*m.i9*m.i97 + 0.01167366*m.i9*m.i98 + 0.01306896*m.i9*m.i99 + 0.01230056*m.i9*m.i100 +
0.00390108*m.i10*m.i11 + 0.00306506*m.i10*m.i12 + 0.0266658*m.i10*m.i13 + 0.027667*m.i10*m.i14 +
0.01278752*m.i10*m.i15 + 0.01031474*m.i10*m.i16 + 0.01126594*m.i10*m.i17 + 0.00489102*m.i10*m.i18
+ 0.00513038*m.i10*m.i19 + 0.01899656*m.i10*m.i20 + 0.01116072*m.i10*m.i21 + 0.0218888*m.i10*
m.i22 + 0.01101148*m.i10*m.i23 + 0.00938786*m.i10*m.i24 + 0.00495956*m.i10*m.i25 + 0.00409492*
m.i10*m.i26 + 0.00774196*m.i10*m.i27 + 0.00563678*m.i10*m.i28 + 0.00452506*m.i10*m.i29 +
0.0234496*m.i10*m.i30 + 0.00879878*m.i10*m.i31 + 0.01816086*m.i10*m.i32 + 0.01204676*m.i10*m.i33
+ 0.00474448*m.i10*m.i34 + 0.00478426*m.i10*m.i35 + 0.0297012*m.i10*m.i36 + 0.0151832*m.i10*
m.i37 + 0.00256504*m.i10*m.i38 + 0.01482468*m.i10*m.i39 + 0.0351312*m.i10*m.i40 + 0.00722204*
m.i10*m.i41 + 0.00911442*m.i10*m.i42 + 0.00459148*m.i10*m.i43 + 0.00643892*m.i10*m.i44 +
0.00232242*m.i10*m.i45 + 0.00525016*m.i10*m.i46 + 0.00918898*m.i10*m.i47 + 0.00604914*m.i10*m.i48
+ 0.00855226*m.i10*m.i49 + 0.01758968*m.i10*m.i50 + 0.00905476*m.i10*m.i51 + 0.0076611*m.i10*
m.i52 + 0.01159398*m.i10*m.i53 + 0.00933998*m.i10*m.i54 + 0.00932956*m.i10*m.i55 + 0.0077777*
m.i10*m.i56 + 0.00585234*m.i10*m.i57 + 0.00494612*m.i10*m.i58 + 0.01267098*m.i10*m.i59 +
0.0025072*m.i10*m.i60 + 0.01652258*m.i10*m.i61 + 0.0113132*m.i10*m.i62 + 0.00647572*m.i10*m.i63
+ 0.00509638*m.i10*m.i64 + 0.00796924*m.i10*m.i65 + 0.00671784*m.i10*m.i66 + 0.00876736*m.i10*
m.i67 + 0.00330284*m.i10*m.i68 + 0.0143256*m.i10*m.i69 + 0.00658518*m.i10*m.i70 + 0.00751304*
m.i10*m.i71 + 0.00447272*m.i10*m.i72 + 0.00707326*m.i10*m.i73 + 0.01022514*m.i10*m.i74 +
0.00629098*m.i10*m.i75 + 0.00437386*m.i10*m.i76 + 0.0069722*m.i10*m.i77 + 0.00631338*m.i10*m.i78
+ 0.01475202*m.i10*m.i79 + 0.00722624*m.i10*m.i80 + 0.00973154*m.i10*m.i81 + 0.00371556*m.i10*
m.i82 + 0.00253096*m.i10*m.i83 + 0.008833*m.i10*m.i84 + 0.00871744*m.i10*m.i85 + 0.0101816*m.i10*
m.i86 + 0.01000738*m.i10*m.i87 + 0.01974334*m.i10*m.i88 + 0.00587674*m.i10*m.i89 + 0.0124516*
m.i10*m.i90 + 0.00915752*m.i10*m.i91 + 0.00913708*m.i10*m.i92 + 0.00200378*m.i10*m.i93 +
0.00536928*m.i10*m.i94 + 0.00823672*m.i10*m.i95 + 0.01736144*m.i10*m.i96 + 0.01105742*m.i10*m.i97
+ 0.01023842*m.i10*m.i98 + 0.01685104*m.i10*m.i99 + 0.01457986*m.i10*m.i100 + 0.000833086*m.i11*
m.i12 + 0.00999478*m.i11*m.i13 + 0.01344484*m.i11*m.i14 + 0.0031808*m.i11*m.i15 + 0.01117228*
m.i11*m.i16 + 0.000697152*m.i11*m.i17 + 0.000585828*m.i11*m.i18 + 0.00585952*m.i11*m.i19 +
0.00859976*m.i11*m.i20 + 0.00502902*m.i11*m.i21 + 0.00447154*m.i11*m.i22 + 0.001969568*m.i11*
m.i23 + 0.0049358*m.i11*m.i24 - 0.00029705*m.i11*m.i25 + 0.0008833*m.i11*m.i26 + 0.00788936*m.i11
*m.i27 + 0.00223564*m.i11*m.i28 - 0.001370818*m.i11*m.i29 + 0.0148367*m.i11*m.i30 + 0.01084338*
m.i11*m.i31 + 0.000606756*m.i11*m.i32 + 0.00591896*m.i11*m.i33 - 0.00408456*m.i11*m.i34 -
0.002724*m.i11*m.i35 + 0.01495302*m.i11*m.i36 + 0.0001528802*m.i11*m.i37 + 0.000200858*m.i11*
m.i38 + 0.00843216*m.i11*m.i39 + 0.01341476*m.i11*m.i40 + 0.01160686*m.i11*m.i41 + 0.00464728*
m.i11*m.i42 + 0.00803576*m.i11*m.i43 + 0.00270742*m.i11*m.i44 - 0.00352162*m.i11*m.i45 +
0.000947796*m.i11*m.i46 + 0.00388898*m.i11*m.i47 + 0.00557236*m.i11*m.i48 + 0.00208008*m.i11*
m.i49 + 0.000931698*m.i11*m.i50 + 0.000654446*m.i11*m.i51 + 0.00650504*m.i11*m.i52 + 0.000501194*
m.i11*m.i53 + 0.00681518*m.i11*m.i54 + 0.00601122*m.i11*m.i55 - 0.00507122*m.i11*m.i56 +
0.000483176*m.i11*m.i57 + 0.00482018*m.i11*m.i58 + 0.0064067*m.i11*m.i59 - 0.000166498*m.i11*
m.i60 + 0.00575774*m.i11*m.i61 + 0.00725456*m.i11*m.i62 + 0.00219412*m.i11*m.i63 + 0.0084673*
m.i11*m.i64 + 0.000333436*m.i11*m.i65 + 0.00655332*m.i11*m.i66 - 0.00257168*m.i11*m.i67 +
0.01199786*m.i11*m.i68 + 0.0059299*m.i11*m.i69 + 0.001843394*m.i11*m.i70 + 0.01060724*m.i11*m.i71
+ 0.00647206*m.i11*m.i72 + 0.00231676*m.i11*m.i73 + 0.00580344*m.i11*m.i74 + 0.00620538*m.i11*
m.i75 - 0.000334258*m.i11*m.i76 + 0.00656424*m.i11*m.i77 - 0.001286316*m.i11*m.i78 + 0.00546106*
m.i11*m.i79 - 0.000202642*m.i11*m.i80 + 0.00426114*m.i11*m.i81 - 0.00204892*m.i11*m.i82 +
0.01117602*m.i11*m.i83 + 0.01034244*m.i11*m.i84 + 0.00449542*m.i11*m.i85 + 0.00797378*m.i11*m.i86
- 0.000792844*m.i11*m.i87 + 0.01939124*m.i11*m.i88 + 0.00432784*m.i11*m.i89 + 0.00204578*m.i11*
m.i90 + 0.021152*m.i11*m.i91 + 0.00283286*m.i11*m.i92 - 0.00407532*m.i11*m.i93 - 0.001198622*
m.i11*m.i94 + 0.0056114*m.i11*m.i95 + 0.00560696*m.i11*m.i96 + 0.00867776*m.i11*m.i97 +
0.01208222*m.i11*m.i98 + 0.00209588*m.i11*m.i99 + 0.0061276*m.i11*m.i100 + 0.00580036*m.i12*m.i13
+ 0.01674486*m.i12*m.i14 + 0.00758412*m.i12*m.i15 + 0.0061097*m.i12*m.i16 + 0.00406024*m.i12*
m.i17 + 0.00246134*m.i12*m.i18 + 0.00422294*m.i12*m.i19 + 0.00359302*m.i12*m.i20 + 0.0027503*
m.i12*m.i21 + 0.01042736*m.i12*m.i22 + 0.001094158*m.i12*m.i23 + 0.00410122*m.i12*m.i24 +
0.0025257*m.i12*m.i25 + 0.00319626*m.i12*m.i26 + 0.00241386*m.i12*m.i27 + 0.001365712*m.i12*m.i28
+ 0.00285332*m.i12*m.i29 + 0.01617908*m.i12*m.i30 + 0.00231724*m.i12*m.i31 + 0.00343892*m.i12*
m.i32 + 0.00256516*m.i12*m.i33 + 0.001014308*m.i12*m.i34 + 0.001643396*m.i12*m.i35 + 0.00879946*
m.i12*m.i36 + 0.00422942*m.i12*m.i37 + 0.001108756*m.i12*m.i38 + 0.0068803*m.i12*m.i39 -
0.00375268*m.i12*m.i40 + 0.0029422*m.i12*m.i41 + 0.00429146*m.i12*m.i42 + 0.00277958*m.i12*m.i43
+ 0.00284814*m.i12*m.i44 + 0.001633544*m.i12*m.i45 + 0.00422296*m.i12*m.i46 + 0.000606884*m.i12*
m.i47 + 0.0041981*m.i12*m.i48 + 0.00378962*m.i12*m.i49 + 0.00842602*m.i12*m.i50 + 0.002132*m.i12*
m.i51 + 0.00482062*m.i12*m.i52 + 0.00806126*m.i12*m.i53 + 0.00387284*m.i12*m.i54 + 0.0039366*
m.i12*m.i55 + 0.000612768*m.i12*m.i56 + 0.0044852*m.i12*m.i57 + 0.00284844*m.i12*m.i58 +
0.00336708*m.i12*m.i59 + 0.0030099*m.i12*m.i60 + 0.00693418*m.i12*m.i61 + 0.0046908*m.i12*m.i62
+ 0.00538386*m.i12*m.i63 + 0.00560854*m.i12*m.i64 + 0.00360994*m.i12*m.i65 + 0.00317544*m.i12*
m.i66 + 0.00443286*m.i12*m.i67 + 0.00420074*m.i12*m.i68 + 0.00506986*m.i12*m.i69 + 0.00415464*
m.i12*m.i70 + 0.00220046*m.i12*m.i71 + 0.00230386*m.i12*m.i72 + 0.00311708*m.i12*m.i73 +
0.00731294*m.i12*m.i74 + 0.0048156*m.i12*m.i75 + 0.00332812*m.i12*m.i76 + 0.00439802*m.i12*m.i77
+ 0.00371872*m.i12*m.i78 + 0.00601328*m.i12*m.i79 + 0.00749754*m.i12*m.i80 + 0.00280082*m.i12*
m.i81 + 0.00202854*m.i12*m.i82 + 0.001389608*m.i12*m.i83 + 0.00387764*m.i12*m.i84 + 0.00354982*
m.i12*m.i85 + 0.00265444*m.i12*m.i86 + 0.0022211*m.i12*m.i87 + 0.00666916*m.i12*m.i88 +
0.00412408*m.i12*m.i89 + 0.00421336*m.i12*m.i90 + 0.00306034*m.i12*m.i91 + 0.00210254*m.i12*m.i92
+ 0.001819242*m.i12*m.i93 + 0.0007903*m.i12*m.i94 + 0.00409078*m.i12*m.i95 + 0.00988156*m.i12*
m.i96 + 0.00522182*m.i12*m.i97 + 0.00482098*m.i12*m.i98 + 0.0042136*m.i12*m.i99 + 0.00408986*
m.i12*m.i100 + 0.0674968*m.i13*m.i14 + 0.0344974*m.i13*m.i15 + 0.0330226*m.i13*m.i16 + 0.0319354*
m.i13*m.i17 + 0.01218366*m.i13*m.i18 + 0.00519196*m.i13*m.i19 + 0.044536*m.i13*m.i20 + 0.0277772*
m.i13*m.i21 + 0.0622606*m.i13*m.i22 + 0.0259408*m.i13*m.i23 + 0.0302608*m.i13*m.i24 + 0.0163455*
m.i13*m.i25 + 0.0077583*m.i13*m.i26 + 0.0227636*m.i13*m.i27 + 0.01173702*m.i13*m.i28 + 0.00769116
*m.i13*m.i29 + 0.0709126*m.i13*m.i30 + 0.01974624*m.i13*m.i31 + 0.0471936*m.i13*m.i32 + 0.0320402
*m.i13*m.i33 + 0.0107856*m.i13*m.i34 + 0.00663924*m.i13*m.i35 + 0.0963608*m.i13*m.i36 + 0.0383208
*m.i13*m.i37 + 0.00629602*m.i13*m.i38 + 0.0436584*m.i13*m.i39 + 0.113305*m.i13*m.i40 + 0.030603*
m.i13*m.i41 + 0.0334486*m.i13*m.i42 + 0.0221094*m.i13*m.i43 + 0.0261022*m.i13*m.i44 + 0.00384036*
m.i13*m.i45 + 0.01393368*m.i13*m.i46 + 0.0390862*m.i13*m.i47 + 0.01408516*m.i13*m.i48 + 0.0200136
*m.i13*m.i49 + 0.0473844*m.i13*m.i50 + 0.0233922*m.i13*m.i51 + 0.0267544*m.i13*m.i52 + 0.0382128*
m.i13*m.i53 + 0.026998*m.i13*m.i54 + 0.0232812*m.i13*m.i55 + 0.0210468*m.i13*m.i56 + 0.01155576*
m.i13*m.i57 + 0.01460704*m.i13*m.i58 + 0.0315638*m.i13*m.i59 + 0.00606798*m.i13*m.i60 + 0.048913*
m.i13*m.i61 + 0.0422528*m.i13*m.i62 + 0.0227364*m.i13*m.i63 + 0.0218176*m.i13*m.i64 + 0.020181*
m.i13*m.i65 + 0.0171918*m.i13*m.i66 + 0.0231896*m.i13*m.i67 + 0.00653966*m.i13*m.i68 + 0.0386908*
m.i13*m.i69 + 0.01310368*m.i13*m.i70 + 0.0233574*m.i13*m.i71 + 0.01370986*m.i13*m.i72 +
0.01644046*m.i13*m.i73 + 0.0239108*m.i13*m.i74 + 0.01209114*m.i13*m.i75 + 0.00733894*m.i13*m.i76
+ 0.01831752*m.i13*m.i77 + 0.01361596*m.i13*m.i78 + 0.0349392*m.i13*m.i79 + 0.01738086*m.i13*
m.i80 + 0.0327952*m.i13*m.i81 + 0.00370036*m.i13*m.i82 + 0.0275306*m.i13*m.i83 + 0.0237408*m.i13*
m.i84 + 0.023854*m.i13*m.i85 + 0.0298082*m.i13*m.i86 + 0.01954408*m.i13*m.i87 + 0.0427146*m.i13*
m.i88 + 0.00800344*m.i13*m.i89 + 0.0379614*m.i13*m.i90 + 0.0237386*m.i13*m.i91 + 0.0280402*m.i13*
m.i92 + 0.00539152*m.i13*m.i93 + 0.00878456*m.i13*m.i94 + 0.0258544*m.i13*m.i95 + 0.0525716*m.i13
*m.i96 + 0.0324866*m.i13*m.i97 + 0.03178*m.i13*m.i98 + 0.0440898*m.i13*m.i99 + 0.0425102*m.i13*
m.i100 + 0.0526828*m.i14*m.i15 + 0.037439*m.i14*m.i16 + 0.0256328*m.i14*m.i17 + 0.0100326*m.i14*
m.i18 + 0.02287*m.i14*m.i19 + 0.05764*m.i14*m.i20 + 0.0305304*m.i14*m.i21 + 0.0790588*m.i14*m.i22
+ 0.0273134*m.i14*m.i23 + 0.0226144*m.i14*m.i24 + 0.01919436*m.i14*m.i25 + 0.01634394*m.i14*
m.i26 + 0.0200216*m.i14*m.i27 + 0.01187024*m.i14*m.i28 + 0.0175096*m.i14*m.i29 + 0.1303416*m.i14*
m.i30 + 0.01783484*m.i14*m.i31 + 0.0483706*m.i14*m.i32 + 0.0389666*m.i14*m.i33 + 0.00488422*m.i14
*m.i34 + 0.01045608*m.i14*m.i35 + 0.0811654*m.i14*m.i36 + 0.0367626*m.i14*m.i37 + 0.00522434*
m.i14*m.i38 + 0.05055*m.i14*m.i39 + 0.0849278*m.i14*m.i40 + 0.0341058*m.i14*m.i41 + 0.029549*
m.i14*m.i42 + 0.0119177*m.i14*m.i43 + 0.034956*m.i14*m.i44 + 0.0084943*m.i14*m.i45 + 0.01853266*
m.i14*m.i46 + 0.01893124*m.i14*m.i47 + 0.0205662*m.i14*m.i48 + 0.0326974*m.i14*m.i49 + 0.0610942*
m.i14*m.i50 + 0.0265816*m.i14*m.i51 + 0.0345152*m.i14*m.i52 + 0.0602904*m.i14*m.i53 + 0.0299894*
m.i14*m.i54 + 0.029724*m.i14*m.i55 + 0.00991024*m.i14*m.i56 + 0.0212834*m.i14*m.i57 + 0.01611994*
m.i14*m.i58 + 0.0349608*m.i14*m.i59 + 0.01544524*m.i14*m.i60 + 0.0660828*m.i14*m.i61 + 0.0517844*
m.i14*m.i62 + 0.0288716*m.i14*m.i63 + 0.02065*m.i14*m.i64 + 0.0285834*m.i14*m.i65 + 0.01348302*
m.i14*m.i66 + 0.0306592*m.i14*m.i67 + 0.01828946*m.i14*m.i68 + 0.0537368*m.i14*m.i69 + 0.0271944*
m.i14*m.i70 + 0.01793364*m.i14*m.i71 + 0.0206146*m.i14*m.i72 + 0.0281438*m.i14*m.i73 + 0.038653*
m.i14*m.i74 + 0.0322466*m.i14*m.i75 + 0.0212534*m.i14*m.i76 + 0.0336072*m.i14*m.i77 + 0.01910646*
m.i14*m.i78 + 0.0653414*m.i14*m.i79 + 0.0269972*m.i14*m.i80 + 0.0273492*m.i14*m.i81 + 0.01038358*
m.i14*m.i82 + 0.00619204*m.i14*m.i83 + 0.0273406*m.i14*m.i84 + 0.0211516*m.i14*m.i85 + 0.0382364*
m.i14*m.i86 + 0.0345294*m.i14*m.i87 + 0.1230516*m.i14*m.i88 + 0.032645*m.i14*m.i89 + 0.0494242*
m.i14*m.i90 + 0.030464*m.i14*m.i91 + 0.0229316*m.i14*m.i92 + 0.01328606*m.i14*m.i93 + 0.01219994*
m.i14*m.i94 + 0.0308436*m.i14*m.i95 + 0.0853596*m.i14*m.i96 + 0.0354032*m.i14*m.i97 + 0.0262134*
m.i14*m.i98 + 0.0473304*m.i14*m.i99 + 0.037143*m.i14*m.i100 + 0.01723066*m.i15*m.i16 + 0.0144032*
m.i15*m.i17 + 0.01011568*m.i15*m.i18 + 0.01071386*m.i15*m.i19 + 0.0363128*m.i15*m.i20 + 0.0200062
*m.i15*m.i21 + 0.0429276*m.i15*m.i22 + 0.01550086*m.i15*m.i23 + 0.01336936*m.i15*m.i24 +
0.01153424*m.i15*m.i25 + 0.01291552*m.i15*m.i26 + 0.01571376*m.i15*m.i27 + 0.0057752*m.i15*m.i28
+ 0.01132328*m.i15*m.i29 + 0.04615*m.i15*m.i30 + 0.0095472*m.i15*m.i31 + 0.0348208*m.i15*m.i32
+ 0.01999334*m.i15*m.i33 + 0.00687142*m.i15*m.i34 + 0.00887602*m.i15*m.i35 + 0.0412134*m.i15*
m.i36 + 0.0222294*m.i15*m.i37 + 0.0044452*m.i15*m.i38 + 0.0275012*m.i15*m.i39 + 0.0449902*m.i15*
m.i40 + 0.0316194*m.i15*m.i41 + 0.021335*m.i15*m.i42 + 0.01203424*m.i15*m.i43 + 0.0250958*m.i15*
m.i44 + 0.00747774*m.i15*m.i45 + 0.01208838*m.i15*m.i46 + 0.0258298*m.i15*m.i47 + 0.01217868*
m.i15*m.i48 + 0.0181139*m.i15*m.i49 + 0.0324096*m.i15*m.i50 + 0.01156602*m.i15*m.i51 + 0.01869794
*m.i15*m.i52 + 0.0276488*m.i15*m.i53 + 0.0230496*m.i15*m.i54 + 0.0171536*m.i15*m.i55 + 0.01527606
*m.i15*m.i56 + 0.01288824*m.i15*m.i57 + 0.014014*m.i15*m.i58 + 0.01657292*m.i15*m.i59 + 0.0080112
*m.i15*m.i60 + 0.0380938*m.i15*m.i61 + 0.0298954*m.i15*m.i62 + 0.0218266*m.i15*m.i63 + 0.01580514
*m.i15*m.i64 + 0.01327226*m.i15*m.i65 + 0.01171988*m.i15*m.i66 + 0.01749552*m.i15*m.i67 +
0.00958228*m.i15*m.i68 + 0.02991*m.i15*m.i69 + 0.01687722*m.i15*m.i70 + 0.0214718*m.i15*m.i71 +
0.0177952*m.i15*m.i72 + 0.01429134*m.i15*m.i73 + 0.01835742*m.i15*m.i74 + 0.014413*m.i15*m.i75 +
0.01215492*m.i15*m.i76 + 0.01888264*m.i15*m.i77 + 0.01135654*m.i15*m.i78 + 0.01419354*m.i15*m.i79
+ 0.01589948*m.i15*m.i80 + 0.01996746*m.i15*m.i81 + 0.00616376*m.i15*m.i82 + 0.00905236*m.i15*
m.i83 + 0.01329424*m.i15*m.i84 + 0.01265054*m.i15*m.i85 + 0.01743812*m.i15*m.i86 + 0.01662354*
m.i15*m.i87 + 0.0326642*m.i15*m.i88 + 0.00648876*m.i15*m.i89 + 0.0255582*m.i15*m.i90 + 0.01710528
*m.i15*m.i91 + 0.01530604*m.i15*m.i92 + 0.00729364*m.i15*m.i93 + 0.00786908*m.i15*m.i94 +
0.0169034*m.i15*m.i95 + 0.034265*m.i15*m.i96 + 0.0206426*m.i15*m.i97 + 0.01574576*m.i15*m.i98 +
0.0251768*m.i15*m.i99 + 0.0302234*m.i15*m.i100 + 0.0180502*m.i16*m.i17 + 0.00797572*m.i16*m.i18
+ 0.00993386*m.i16*m.i19 + 0.0236072*m.i16*m.i20 + 0.01425014*m.i16*m.i21 + 0.0269392*m.i16*
m.i22 + 0.01322908*m.i16*m.i23 + 0.01719786*m.i16*m.i24 + 0.00995474*m.i16*m.i25 + 0.00544834*
m.i16*m.i26 + 0.01319632*m.i16*m.i27 + 0.00695148*m.i16*m.i28 + 0.00568042*m.i16*m.i29 + 0.045082
*m.i16*m.i30 + 0.01190474*m.i16*m.i31 + 0.01955462*m.i16*m.i32 + 0.0138212*m.i16*m.i33 +
0.00642106*m.i16*m.i34 + 0.00665524*m.i16*m.i35 + 0.0380492*m.i16*m.i36 + 0.01602708*m.i16*m.i37
+ 0.00369958*m.i16*m.i38 + 0.0220792*m.i16*m.i39 + 0.0304262*m.i16*m.i40 + 0.01843444*m.i16*
m.i41 + 0.021247*m.i16*m.i42 + 0.01518988*m.i16*m.i43 + 0.01406774*m.i16*m.i44 + 0.0051723*m.i16*
m.i45 + 0.0080675*m.i16*m.i46 + 0.0176419*m.i16*m.i47 + 0.0090298*m.i16*m.i48 + 0.0126196*m.i16*
m.i49 + 0.025967*m.i16*m.i50 + 0.01140228*m.i16*m.i51 + 0.01900414*m.i16*m.i52 + 0.01781402*m.i16
*m.i53 + 0.0194748*m.i16*m.i54 + 0.01211848*m.i16*m.i55 + 0.01166912*m.i16*m.i56 + 0.00870972*
m.i16*m.i57 + 0.00719416*m.i16*m.i58 + 0.01574372*m.i16*m.i59 + 0.00725944*m.i16*m.i60 +
0.0294988*m.i16*m.i61 + 0.0260914*m.i16*m.i62 + 0.01974094*m.i16*m.i63 + 0.01434116*m.i16*m.i64
+ 0.00954816*m.i16*m.i65 + 0.0087947*m.i16*m.i66 + 0.01216302*m.i16*m.i67 + 0.01307338*m.i16*
m.i68 + 0.023669*m.i16*m.i69 + 0.01061826*m.i16*m.i70 + 0.01531198*m.i16*m.i71 + 0.01282252*m.i16
*m.i72 + 0.01136194*m.i16*m.i73 + 0.01289612*m.i16*m.i74 + 0.0111961*m.i16*m.i75 + 0.00467394*
m.i16*m.i76 + 0.0120207*m.i16*m.i77 + 0.00634502*m.i16*m.i78 + 0.0272842*m.i16*m.i79 + 0.01354848
*m.i16*m.i80 + 0.01491878*m.i16*m.i81 + 0.00372788*m.i16*m.i82 + 0.01347184*m.i16*m.i83 +
0.01367452*m.i16*m.i84 + 0.01430584*m.i16*m.i85 + 0.01662228*m.i16*m.i86 + 0.01019354*m.i16*m.i87
+ 0.031864*m.i16*m.i88 + 0.01389622*m.i16*m.i89 + 0.01404588*m.i16*m.i90 + 0.01898344*m.i16*
m.i91 + 0.01310136*m.i16*m.i92 + 0.00293122*m.i16*m.i93 + 0.00548746*m.i16*m.i94 + 0.01674526*
m.i16*m.i95 + 0.0263504*m.i16*m.i96 + 0.0187966*m.i16*m.i97 + 0.0198675*m.i16*m.i98 + 0.0160833*
m.i16*m.i99 + 0.01885334*m.i16*m.i100 + 0.00599666*m.i17*m.i18 + 0.0047675*m.i17*m.i19 +
0.0265872*m.i17*m.i20 + 0.01628802*m.i17*m.i21 + 0.01871884*m.i17*m.i22 + 0.01233104*m.i17*m.i23
+ 0.01365522*m.i17*m.i24 + 0.00989432*m.i17*m.i25 + 0.00330258*m.i17*m.i26 + 0.0116841*m.i17*
m.i27 + 0.0079471*m.i17*m.i28 + 0.0045994*m.i17*m.i29 + 0.0254766*m.i17*m.i30 + 0.01659406*m.i17*
m.i31 + 0.0220846*m.i17*m.i32 + 0.01861566*m.i17*m.i33 + 0.00948066*m.i17*m.i34 + 0.0090429*m.i17
*m.i35 + 0.0337978*m.i17*m.i36 + 0.01595384*m.i17*m.i37 + 0.00235078*m.i17*m.i38 + 0.0201494*
m.i17*m.i39 + 0.0342284*m.i17*m.i40 + 0.0277738*m.i17*m.i41 + 0.01731318*m.i17*m.i42 + 0.01753214
*m.i17*m.i43 + 0.01978996*m.i17*m.i44 + 0.00369934*m.i17*m.i45 + 0.00718436*m.i17*m.i46 +
0.01949342*m.i17*m.i47 + 0.00499956*m.i17*m.i48 + 0.01707236*m.i17*m.i49 + 0.0203004*m.i17*m.i50
+ 0.01279548*m.i17*m.i51 + 0.011643*m.i17*m.i52 + 0.01115602*m.i17*m.i53 + 0.01587576*m.i17*
m.i54 + 0.010193*m.i17*m.i55 + 0.0217498*m.i17*m.i56 + 0.0064957*m.i17*m.i57 + 0.00989022*m.i17*
m.i58 + 0.01554654*m.i17*m.i59 + 0.00382894*m.i17*m.i60 + 0.01868378*m.i17*m.i61 + 0.01822302*
m.i17*m.i62 + 0.0270002*m.i17*m.i63 + 0.01054316*m.i17*m.i64 + 0.01114578*m.i17*m.i65 + 0.010706*
m.i17*m.i66 + 0.01057722*m.i17*m.i67 + 0.00541042*m.i17*m.i68 + 0.022045*m.i17*m.i69 + 0.00933892
*m.i17*m.i70 + 0.0217256*m.i17*m.i71 + 0.010527*m.i17*m.i72 + 0.01245986*m.i17*m.i73 + 0.01462496
*m.i17*m.i74 + 0.00471612*m.i17*m.i75 + 0.00385082*m.i17*m.i76 + 0.0150046*m.i17*m.i77 +
0.00469912*m.i17*m.i78 + 0.01570408*m.i17*m.i79 + 0.01238884*m.i17*m.i80 + 0.0167981*m.i17*m.i81
+ 0.00275656*m.i17*m.i82 + 0.0264668*m.i17*m.i83 + 0.01754616*m.i17*m.i84 + 0.0104241*m.i17*
m.i85 + 0.0155118*m.i17*m.i86 + 0.00992204*m.i17*m.i87 + 0.0334656*m.i17*m.i88 + 0.0100102*m.i17*
m.i89 + 0.00830234*m.i17*m.i90 + 0.00830522*m.i17*m.i91 + 0.01347376*m.i17*m.i92 + 0.00371114*
m.i17*m.i93 + 0.00721878*m.i17*m.i94 + 0.01197232*m.i17*m.i95 + 0.01097582*m.i17*m.i96 +
0.0153446*m.i17*m.i97 + 0.01911512*m.i17*m.i98 + 0.0158341*m.i17*m.i99 + 0.01647016*m.i17*m.i100
+ 0.0038501*m.i18*m.i19 + 0.01438424*m.i18*m.i20 + 0.00575166*m.i18*m.i21 + 0.01286738*m.i18*
m.i22 + 0.0072269*m.i18*m.i23 + 0.00577628*m.i18*m.i24 + 0.00353166*m.i18*m.i25 + 0.00406754*
m.i18*m.i26 + 0.00586712*m.i18*m.i27 + 0.00246394*m.i18*m.i28 + 0.00208424*m.i18*m.i29 +
0.00868042*m.i18*m.i30 + 0.00488392*m.i18*m.i31 + 0.01139774*m.i18*m.i32 + 0.00652178*m.i18*m.i33
+ 0.00514824*m.i18*m.i34 + 0.00420068*m.i18*m.i35 + 0.01314078*m.i18*m.i36 + 0.00738678*m.i18*
m.i37 + 0.00212172*m.i18*m.i38 + 0.00767338*m.i18*m.i39 + 0.01491396*m.i18*m.i40 + 0.00689198*
m.i18*m.i41 + 0.00941516*m.i18*m.i42 + 0.00703674*m.i18*m.i43 + 0.00623926*m.i18*m.i44 +
0.0042213*m.i18*m.i45 + 0.00377366*m.i18*m.i46 + 0.01005392*m.i18*m.i47 + 0.00385304*m.i18*m.i48
+ 0.0061538*m.i18*m.i49 + 0.00828744*m.i18*m.i50 + 0.00452496*m.i18*m.i51 + 0.00647618*m.i18*
m.i52 + 0.00595912*m.i18*m.i53 + 0.00909974*m.i18*m.i54 + 0.00683082*m.i18*m.i55 + 0.00696058*
m.i18*m.i56 + 0.00489492*m.i18*m.i57 + 0.00399036*m.i18*m.i58 + 0.0071619*m.i18*m.i59 +
0.00282566*m.i18*m.i60 + 0.01253118*m.i18*m.i61 + 0.01017836*m.i18*m.i62 + 0.0054806*m.i18*m.i63
+ 0.00679494*m.i18*m.i64 + 0.00492774*m.i18*m.i65 + 0.00294036*m.i18*m.i66 + 0.00302154*m.i18*
m.i67 + 0.00492864*m.i18*m.i68 + 0.01002278*m.i18*m.i69 + 0.00498708*m.i18*m.i70 + 0.00467346*
m.i18*m.i71 + 0.00622154*m.i18*m.i72 + 0.0060522*m.i18*m.i73 + 0.00606086*m.i18*m.i74 +
0.00435108*m.i18*m.i75 + 0.00246578*m.i18*m.i76 + 0.00518572*m.i18*m.i77 + 0.00318624*m.i18*m.i78
+ 0.00460288*m.i18*m.i79 + 0.007017*m.i18*m.i80 + 0.00647242*m.i18*m.i81 + 0.00407958*m.i18*
m.i82 - 0.000888864*m.i18*m.i83 + 0.00537106*m.i18*m.i84 + 0.00634694*m.i18*m.i85 + 0.00514234*
m.i18*m.i86 + 0.00350408*m.i18*m.i87 - 0.00202898*m.i18*m.i88 + 0.001751682*m.i18*m.i89 +
0.0065019*m.i18*m.i90 + 0.007451*m.i18*m.i91 + 0.0035437*m.i18*m.i92 + 0.001995674*m.i18*m.i93 +
0.00436006*m.i18*m.i94 + 0.00715274*m.i18*m.i95 + 0.00776482*m.i18*m.i96 + 0.00710082*m.i18*m.i97
+ 0.00609606*m.i18*m.i98 + 0.00652362*m.i18*m.i99 + 0.01247386*m.i18*m.i100 + 0.01204848*m.i19*
m.i20 + 0.00628788*m.i19*m.i21 + 0.00938206*m.i19*m.i22 + 0.00540152*m.i19*m.i23 + 0.00366816*
m.i19*m.i24 + 0.00424804*m.i19*m.i25 + 0.00443146*m.i19*m.i26 + 0.00550836*m.i19*m.i27 +
0.00441186*m.i19*m.i28 + 0.00464964*m.i19*m.i29 + 0.0215394*m.i19*m.i30 + 0.00534434*m.i19*m.i31
+ 0.01089826*m.i19*m.i32 + 0.00384858*m.i19*m.i33 + 0.00271286*m.i19*m.i34 + 0.00459438*m.i19*
m.i35 + 0.00753494*m.i19*m.i36 + 0.00675858*m.i19*m.i37 + 0.00330138*m.i19*m.i38 + 0.01012594*
m.i19*m.i39 + 0.00097236*m.i19*m.i40 + 0.00697634*m.i19*m.i41 + 0.0055734*m.i19*m.i42 +
0.00439042*m.i19*m.i43 + 0.00466626*m.i19*m.i44 + 0.0056599*m.i19*m.i45 + 0.00343664*m.i19*m.i46
+ 0.00191227*m.i19*m.i47 + 0.00409474*m.i19*m.i48 + 0.00728426*m.i19*m.i49 + 0.0118005*m.i19*
m.i50 + 0.00439032*m.i19*m.i51 + 0.00819602*m.i19*m.i52 + 0.00683532*m.i19*m.i53 + 0.00927236*
m.i19*m.i54 + 0.00638082*m.i19*m.i55 + 0.0049778*m.i19*m.i56 + 0.0064092*m.i19*m.i57 + 0.00332368
*m.i19*m.i58 + 0.00797006*m.i19*m.i59 + 0.00515114*m.i19*m.i60 + 0.0140857*m.i19*m.i61 +
0.00824548*m.i19*m.i62 + 0.00645382*m.i19*m.i63 + 0.00492056*m.i19*m.i64 + 0.0040063*m.i19*m.i65
+ 0.00621702*m.i19*m.i66 + 0.00486474*m.i19*m.i67 + 0.01089728*m.i19*m.i68 + 0.01064856*m.i19*
m.i69 + 0.00763898*m.i19*m.i70 + 0.00304924*m.i19*m.i71 + 0.00746516*m.i19*m.i72 + 0.0073895*
m.i19*m.i73 + 0.008372*m.i19*m.i74 + 0.0096269*m.i19*m.i75 + 0.00403824*m.i19*m.i76 + 0.00896868*
m.i19*m.i77 + 0.00369816*m.i19*m.i78 + 0.01338638*m.i19*m.i79 + 0.00702566*m.i19*m.i80 +
0.00204776*m.i19*m.i81 + 0.0040369*m.i19*m.i82 - 0.00617474*m.i19*m.i83 + 0.00664876*m.i19*m.i84
+ 0.00640014*m.i19*m.i85 + 0.00537574*m.i19*m.i86 + 0.00744762*m.i19*m.i87 + 0.0288232*m.i19*
m.i88 + 0.0089059*m.i19*m.i89 + 0.00438344*m.i19*m.i90 + 0.01192674*m.i19*m.i91 + 0.00326376*
m.i19*m.i92 + 0.00330764*m.i19*m.i93 + 0.00649262*m.i19*m.i94 + 0.0076392*m.i19*m.i95 +
0.01075072*m.i19*m.i96 + 0.00749846*m.i19*m.i97 + 0.00563188*m.i19*m.i98 + 0.00430788*m.i19*m.i99
+ 0.00505074*m.i19*m.i100 + 0.026993*m.i20*m.i21 + 0.0407142*m.i20*m.i22 + 0.0262048*m.i20*m.i23
+ 0.0233804*m.i20*m.i24 + 0.01566388*m.i20*m.i25 + 0.01254316*m.i20*m.i26 + 0.0230746*m.i20*
m.i27 + 0.01228074*m.i20*m.i28 + 0.01141404*m.i20*m.i29 + 0.046979*m.i20*m.i30 + 0.01956928*m.i20
*m.i31 + 0.0444886*m.i20*m.i32 + 0.0345924*m.i20*m.i33 + 0.01450852*m.i20*m.i34 + 0.01607032*
m.i20*m.i35 + 0.0534276*m.i20*m.i36 + 0.027915*m.i20*m.i37 + 0.00446976*m.i20*m.i38 + 0.0310128*
m.i20*m.i39 + 0.0617194*m.i20*m.i40 + 0.0418284*m.i20*m.i41 + 0.0284554*m.i20*m.i42 + 0.0202322*
m.i20*m.i43 + 0.0309222*m.i20*m.i44 + 0.00850138*m.i20*m.i45 + 0.01226594*m.i20*m.i46 + 0.0355744
*m.i20*m.i47 + 0.01044628*m.i20*m.i48 + 0.0261968*m.i20*m.i49 + 0.0353182*m.i20*m.i50 +
0.01768812*m.i20*m.i51 + 0.0227266*m.i20*m.i52 + 0.0229416*m.i20*m.i53 + 0.0285392*m.i20*m.i54 +
0.024215*m.i20*m.i55 + 0.0227*m.i20*m.i56 + 0.01349126*m.i20*m.i57 + 0.01576804*m.i20*m.i58 +
0.0251472*m.i20*m.i59 + 0.00678918*m.i20*m.i60 + 0.0460104*m.i20*m.i61 + 0.0362612*m.i20*m.i62 +
0.0246576*m.i20*m.i63 + 0.01897386*m.i20*m.i64 + 0.021042*m.i20*m.i65 + 0.01449872*m.i20*m.i66 +
0.01901978*m.i20*m.i67 + 0.01289314*m.i20*m.i68 + 0.04318*m.i20*m.i69 + 0.0192612*m.i20*m.i70 +
0.0319956*m.i20*m.i71 + 0.0241418*m.i20*m.i72 + 0.0231068*m.i20*m.i73 + 0.0232748*m.i20*m.i74 +
0.01394672*m.i20*m.i75 + 0.01233534*m.i20*m.i76 + 0.0250086*m.i20*m.i77 + 0.01003866*m.i20*m.i78
+ 0.01782134*m.i20*m.i79 + 0.0175231*m.i20*m.i80 + 0.0266842*m.i20*m.i81 + 0.00899148*m.i20*
m.i82 + 0.01916166*m.i20*m.i83 + 0.0237898*m.i20*m.i84 + 0.01674726*m.i20*m.i85 + 0.0243836*m.i20
*m.i86 + 0.0205712*m.i20*m.i87 + 0.0526016*m.i20*m.i88 + 0.01299922*m.i20*m.i89 + 0.0223216*m.i20
*m.i90 + 0.0221722*m.i20*m.i91 + 0.0200512*m.i20*m.i92 + 0.00605128*m.i20*m.i93 + 0.01422172*
m.i20*m.i94 + 0.0209666*m.i20*m.i95 + 0.0316224*m.i20*m.i96 + 0.0278754*m.i20*m.i97 + 0.0266692*
m.i20*m.i98 + 0.032317*m.i20*m.i99 + 0.0372718*m.i20*m.i100 + 0.0225584*m.i21*m.i22 + 0.01330824*
m.i21*m.i23 + 0.01120138*m.i21*m.i24 + 0.00988644*m.i21*m.i25 + 0.0053562*m.i21*m.i26 +
0.01171726*m.i21*m.i27 + 0.0075308*m.i21*m.i28 + 0.0062293*m.i21*m.i29 + 0.028151*m.i21*m.i30 +
0.01116532*m.i21*m.i31 + 0.024731*m.i21*m.i32 + 0.01403094*m.i21*m.i33 + 0.0053378*m.i21*m.i34 +
0.0062169*m.i21*m.i35 + 0.0322338*m.i21*m.i36 + 0.0173092*m.i21*m.i37 + 0.00310282*m.i21*m.i38 +
0.01943686*m.i21*m.i39 + 0.0397312*m.i21*m.i40 + 0.0227668*m.i21*m.i41 + 0.01402322*m.i21*m.i42
+ 0.01184862*m.i21*m.i43 + 0.01574106*m.i21*m.i44 + 0.00351088*m.i21*m.i45 + 0.00692094*m.i21*
m.i46 + 0.01710158*m.i21*m.i47 + 0.00581758*m.i21*m.i48 + 0.013985*m.i21*m.i49 + 0.0205976*m.i21*
m.i50 + 0.01286968*m.i21*m.i51 + 0.01222018*m.i21*m.i52 + 0.01492284*m.i21*m.i53 + 0.01502328*
m.i21*m.i54 + 0.01279528*m.i21*m.i55 + 0.01443928*m.i21*m.i56 + 0.00711002*m.i21*m.i57 +
0.00897148*m.i21*m.i58 + 0.0175601*m.i21*m.i59 + 0.00366562*m.i21*m.i60 + 0.0240206*m.i21*m.i61
+ 0.01871124*m.i21*m.i62 + 0.01471548*m.i21*m.i63 + 0.00910326*m.i21*m.i64 + 0.01121548*m.i21*
m.i65 + 0.0093615*m.i21*m.i66 + 0.0129081*m.i21*m.i67 + 0.0055548*m.i21*m.i68 + 0.0214638*m.i21*
m.i69 + 0.00932128*m.i21*m.i70 + 0.01654162*m.i21*m.i71 + 0.01150414*m.i21*m.i72 + 0.01130758*
m.i21*m.i73 + 0.01195864*m.i21*m.i74 + 0.00685764*m.i21*m.i75 + 0.00673976*m.i21*m.i76 +
0.01092518*m.i21*m.i77 + 0.00610126*m.i21*m.i78 + 0.0166491*m.i21*m.i79 + 0.00973956*m.i21*m.i80
+ 0.01360816*m.i21*m.i81 + 0.00413938*m.i21*m.i82 + 0.01295166*m.i21*m.i83 + 0.01359658*m.i21*
m.i84 + 0.0100056*m.i21*m.i85 + 0.01591198*m.i21*m.i86 + 0.01302584*m.i21*m.i87 + 0.0321888*m.i21
*m.i88 + 0.0069057*m.i21*m.i89 + 0.01467542*m.i21*m.i90 + 0.0104985*m.i21*m.i91 + 0.01203108*
m.i21*m.i92 + 0.00438602*m.i21*m.i93 + 0.0064228*m.i21*m.i94 + 0.0109577*m.i21*m.i95 + 0.01683074
*m.i21*m.i96 + 0.01510662*m.i21*m.i97 + 0.013665*m.i21*m.i98 + 0.01994166*m.i21*m.i99 + 0.0184821
*m.i21*m.i100 + 0.01713984*m.i22*m.i23 + 0.0290628*m.i22*m.i24 + 0.01659484*m.i22*m.i25 +
0.01330504*m.i22*m.i26 + 0.0220338*m.i22*m.i27 + 0.0096401*m.i22*m.i28 + 0.01336178*m.i22*m.i29
+ 0.0794522*m.i22*m.i30 + 0.00912184*m.i22*m.i31 + 0.0466568*m.i22*m.i32 + 0.0203942*m.i22*m.i33
+ 0.00695226*m.i22*m.i34 + 0.0125215*m.i22*m.i35 + 0.0728992*m.i22*m.i36 + 0.0354588*m.i22*m.i37
+ 0.00691112*m.i22*m.i38 + 0.037201*m.i22*m.i39 + 0.0756082*m.i22*m.i40 + 0.0292772*m.i22*m.i41
+ 0.0266054*m.i22*m.i42 + 0.01269282*m.i22*m.i43 + 0.0230306*m.i22*m.i44 + 0.000804368*m.i22*
m.i45 + 0.01545384*m.i22*m.i46 + 0.0296748*m.i22*m.i47 + 0.0193381*m.i22*m.i48 + 0.0200644*m.i22*
m.i49 + 0.0450946*m.i22*m.i50 + 0.01567104*m.i22*m.i51 + 0.0202574*m.i22*m.i52 + 0.0456018*m.i22*
m.i53 + 0.024727*m.i22*m.i54 + 0.01871804*m.i22*m.i55 + 0.01574656*m.i22*m.i56 + 0.01426746*m.i22
*m.i57 + 0.0112117*m.i22*m.i58 + 0.0237092*m.i22*m.i59 + 0.01100176*m.i22*m.i60 + 0.0484136*m.i22
*m.i61 + 0.0477626*m.i22*m.i62 + 0.01715072*m.i22*m.i63 + 0.01569402*m.i22*m.i64 + 0.0163363*
m.i22*m.i65 + 0.00819194*m.i22*m.i66 + 0.0250362*m.i22*m.i67 + 0.01191736*m.i22*m.i68 + 0.0445474
*m.i22*m.i69 + 0.0208408*m.i22*m.i70 + 0.0196514*m.i22*m.i71 + 0.01993902*m.i22*m.i72 +
0.01317816*m.i22*m.i73 + 0.0290184*m.i22*m.i74 + 0.022028*m.i22*m.i75 + 0.01241074*m.i22*m.i76 +
0.01467528*m.i22*m.i77 + 0.0179883*m.i22*m.i78 + 0.040464*m.i22*m.i79 + 0.01646476*m.i22*m.i80 +
0.0251454*m.i22*m.i81 + 0.00665554*m.i22*m.i82 - 0.00094782*m.i22*m.i83 + 0.01809638*m.i22*m.i84
+ 0.01658492*m.i22*m.i85 + 0.0242392*m.i22*m.i86 + 0.0215874*m.i22*m.i87 + 0.0229098*m.i22*m.i88
+ 0.01114584*m.i22*m.i89 + 0.046945*m.i22*m.i90 + 0.0230318*m.i22*m.i91 + 0.01381346*m.i22*m.i92
+ 0.0100301*m.i22*m.i93 + 0.00837496*m.i22*m.i94 + 0.0250054*m.i22*m.i95 + 0.0620424*m.i22*m.i96
+ 0.0302296*m.i22*m.i97 + 0.0248336*m.i22*m.i98 + 0.0372288*m.i22*m.i99 + 0.0441042*m.i22*m.i100
+ 0.00618108*m.i23*m.i24 + 0.00567144*m.i23*m.i25 + 0.0048866*m.i23*m.i26 + 0.00839514*m.i23*
m.i27 + 0.00487436*m.i23*m.i28 + 0.004356*m.i23*m.i29 + 0.024299*m.i23*m.i30 + 0.00996842*m.i23*
m.i31 + 0.0204928*m.i23*m.i32 + 0.01726232*m.i23*m.i33 + 0.00564344*m.i23*m.i34 + 0.00506272*
m.i23*m.i35 + 0.027322*m.i23*m.i36 + 0.01648718*m.i23*m.i37 + 0.001813512*m.i23*m.i38 + 0.0143408
*m.i23*m.i39 + 0.0410642*m.i23*m.i40 + 0.00822668*m.i23*m.i41 + 0.01397884*m.i23*m.i42 +
0.00751294*m.i23*m.i43 + 0.01081252*m.i23*m.i44 + 0.00375058*m.i23*m.i45 + 0.00488444*m.i23*m.i46
+ 0.01210078*m.i23*m.i47 + 0.0050334*m.i23*m.i48 + 0.01042672*m.i23*m.i49 + 0.01834872*m.i23*
m.i50 + 0.0122672*m.i23*m.i51 + 0.01291522*m.i23*m.i52 + 0.01243908*m.i23*m.i53 + 0.01372984*
m.i23*m.i54 + 0.0114482*m.i23*m.i55 + 0.0105593*m.i23*m.i56 + 0.00644542*m.i23*m.i57 + 0.00648944
*m.i23*m.i58 + 0.01543002*m.i23*m.i59 + 0.0037869*m.i23*m.i60 + 0.0214726*m.i23*m.i61 +
0.01495998*m.i23*m.i62 + 0.00692592*m.i23*m.i63 + 0.00648514*m.i23*m.i64 + 0.00794602*m.i23*m.i65
+ 0.00558232*m.i23*m.i66 + 0.0093087*m.i23*m.i67 + 0.000819996*m.i23*m.i68 + 0.01512186*m.i23*
m.i69 + 0.0070338*m.i23*m.i70 + 0.00840292*m.i23*m.i71 + 0.00668858*m.i23*m.i72 + 0.00956292*
m.i23*m.i73 + 0.00972254*m.i23*m.i74 + 0.00409738*m.i23*m.i75 + 0.00544566*m.i23*m.i76 +
0.01207296*m.i23*m.i77 + 0.00561846*m.i23*m.i78 + 0.01639358*m.i23*m.i79 + 0.00769632*m.i23*m.i80
+ 0.01062502*m.i23*m.i81 + 0.0060578*m.i23*m.i82 + 0.00866906*m.i23*m.i83 + 0.00707332*m.i23*
m.i84 + 0.01006612*m.i23*m.i85 + 0.01147664*m.i23*m.i86 + 0.0127172*m.i23*m.i87 + 0.01718458*
m.i23*m.i88 + 0.00499896*m.i23*m.i89 + 0.01300446*m.i23*m.i90 + 0.00824348*m.i23*m.i91 +
0.01100222*m.i23*m.i92 + 0.00359882*m.i23*m.i93 + 0.00760194*m.i23*m.i94 + 0.01026304*m.i23*m.i95
+ 0.01748628*m.i23*m.i96 + 0.01222018*m.i23*m.i97 + 0.00656104*m.i23*m.i98 + 0.01929844*m.i23*
m.i99 + 0.01526792*m.i23*m.i100 + 0.01061256*m.i24*m.i25 + 0.00390748*m.i24*m.i26 + 0.0176534*
m.i24*m.i27 + 0.00973526*m.i24*m.i28 + 0.00580416*m.i24*m.i29 + 0.0308904*m.i24*m.i30 +
0.00564094*m.i24*m.i31 + 0.0202996*m.i24*m.i32 + 0.00846578*m.i24*m.i33 + 0.00878324*m.i24*m.i34
+ 0.0092725*m.i24*m.i35 + 0.0386418*m.i24*m.i36 + 0.01405906*m.i24*m.i37 + 0.0050169*m.i24*m.i38
+ 0.01753958*m.i24*m.i39 + 0.0277342*m.i24*m.i40 + 0.0200538*m.i24*m.i41 + 0.0160148*m.i24*m.i42
+ 0.01157484*m.i24*m.i43 + 0.0097945*m.i24*m.i44 + 0.00047637*m.i24*m.i45 + 0.0074696*m.i24*
m.i46 + 0.0232922*m.i24*m.i47 + 0.0064693*m.i24*m.i48 + 0.0076863*m.i24*m.i49 + 0.01970906*m.i24*
m.i50 + 0.00539232*m.i24*m.i51 + 0.01285448*m.i24*m.i52 + 0.0120141*m.i24*m.i53 + 0.0124346*m.i24
*m.i54 + 0.00898946*m.i24*m.i55 + 0.00726448*m.i24*m.i56 + 0.0065436*m.i24*m.i57 + 0.00501008*
m.i24*m.i58 + 0.01101314*m.i24*m.i59 + 0.00470396*m.i24*m.i60 + 0.0237074*m.i24*m.i61 + 0.0228986
*m.i24*m.i62 + 0.01228188*m.i24*m.i63 + 0.01100376*m.i24*m.i64 + 0.00915078*m.i24*m.i65 +
0.0069269*m.i24*m.i66 + 0.01206108*m.i24*m.i67 + 0.00908652*m.i24*m.i68 + 0.0217466*m.i24*m.i69
+ 0.00887002*m.i24*m.i70 + 0.022452*m.i24*m.i71 + 0.0139555*m.i24*m.i72 + 0.00715706*m.i24*m.i73
+ 0.01096546*m.i24*m.i74 + 0.00744888*m.i24*m.i75 + 0.0028668*m.i24*m.i76 + 0.0036177*m.i24*
m.i77 + 0.00580328*m.i24*m.i78 + 0.0086669*m.i24*m.i79 + 0.00929752*m.i24*m.i80 + 0.01854944*
m.i24*m.i81 + 0.0023229*m.i24*m.i82 + 0.01207648*m.i24*m.i83 + 0.01205652*m.i24*m.i84 + 0.0096674
*m.i24*m.i85 + 0.0108503*m.i24*m.i86 + 0.00597266*m.i24*m.i87 + 0.0190243*m.i24*m.i88 +
0.00640978*m.i24*m.i89 + 0.01034642*m.i24*m.i90 + 0.01193214*m.i24*m.i91 + 0.00822214*m.i24*m.i92
+ 0.00070224*m.i24*m.i93 + 0.00307244*m.i24*m.i94 + 0.01092084*m.i24*m.i95 + 0.0203774*m.i24*
m.i96 + 0.01743418*m.i24*m.i97 + 0.0232524*m.i24*m.i98 + 0.01437366*m.i24*m.i99 + 0.01998814*
m.i24*m.i100 + 0.00270846*m.i25*m.i26 + 0.00878244*m.i25*m.i27 + 0.00564506*m.i25*m.i28 +
0.00404084*m.i25*m.i29 + 0.0227806*m.i25*m.i30 + 0.00477484*m.i25*m.i31 + 0.016725*m.i25*m.i32 +
0.00496432*m.i25*m.i33 + 0.00361518*m.i25*m.i34 + 0.00462338*m.i25*m.i35 + 0.0204146*m.i25*m.i36
+ 0.01087624*m.i25*m.i37 + 0.00256388*m.i25*m.i38 + 0.01236456*m.i25*m.i39 + 0.01769162*m.i25*
m.i40 + 0.01576792*m.i25*m.i41 + 0.00928236*m.i25*m.i42 + 0.00793946*m.i25*m.i43 + 0.00966756*
m.i25*m.i44 + 0.00248138*m.i25*m.i45 + 0.00485932*m.i25*m.i46 + 0.0122764*m.i25*m.i47 + 0.0023089
*m.i25*m.i48 + 0.00859364*m.i25*m.i49 + 0.01421118*m.i25*m.i50 + 0.00733214*m.i25*m.i51 +
0.00816206*m.i25*m.i52 + 0.00960248*m.i25*m.i53 + 0.00866518*m.i25*m.i54 + 0.00692386*m.i25*m.i55
+ 0.00882586*m.i25*m.i56 + 0.00434948*m.i25*m.i57 + 0.0041589*m.i25*m.i58 + 0.01055232*m.i25*
m.i59 + 0.00330494*m.i25*m.i60 + 0.01561392*m.i25*m.i61 + 0.0126551*m.i25*m.i62 + 0.00815092*
m.i25*m.i63 + 0.00612506*m.i25*m.i64 + 0.0070869*m.i25*m.i65 + 0.00424002*m.i25*m.i66 +
0.00879504*m.i25*m.i67 + 0.0058829*m.i25*m.i68 + 0.01439048*m.i25*m.i69 + 0.00610238*m.i25*m.i70
+ 0.01131906*m.i25*m.i71 + 0.00889538*m.i25*m.i72 + 0.00612414*m.i25*m.i73 + 0.00846104*m.i25*
m.i74 + 0.0057198*m.i25*m.i75 + 0.00393476*m.i25*m.i76 + 0.00432972*m.i25*m.i77 + 0.00446968*
m.i25*m.i78 + 0.0141591*m.i25*m.i79 + 0.00681524*m.i25*m.i80 + 0.00839778*m.i25*m.i81 +
0.00242412*m.i25*m.i82 + 0.0061299*m.i25*m.i83 + 0.00821362*m.i25*m.i84 + 0.0059951*m.i25*m.i85
+ 0.01036166*m.i25*m.i86 + 0.0075501*m.i25*m.i87 + 0.0208316*m.i25*m.i88 + 0.00461656*m.i25*
m.i89 + 0.01024232*m.i25*m.i90 + 0.00541446*m.i25*m.i91 + 0.0058998*m.i25*m.i92 + 0.00419408*
m.i25*m.i93 + 0.0034525*m.i25*m.i94 + 0.00742618*m.i25*m.i95 + 0.01117296*m.i25*m.i96 +
0.00976304*m.i25*m.i97 + 0.01005714*m.i25*m.i98 + 0.00997578*m.i25*m.i99 + 0.01119052*m.i25*
m.i100 + 0.0054348*m.i26*m.i27 + 0.00158545*m.i26*m.i28 + 0.00507804*m.i26*m.i29 + 0.01115184*
m.i26*m.i30 + 0.00280118*m.i26*m.i31 + 0.0103351*m.i26*m.i32 + 0.00796856*m.i26*m.i33 +
0.00322344*m.i26*m.i34 + 0.00410686*m.i26*m.i35 + 0.00922294*m.i26*m.i36 + 0.00708292*m.i26*m.i37
+ 0.00218796*m.i26*m.i38 + 0.00667316*m.i26*m.i39 + 0.00604564*m.i26*m.i40 + 0.00774532*m.i26*
m.i41 + 0.00814596*m.i26*m.i42 + 0.0026451*m.i26*m.i43 + 0.00582206*m.i26*m.i44 + 0.00332382*
m.i26*m.i45 + 0.00451686*m.i26*m.i46 + 0.00733916*m.i26*m.i47 + 0.00476946*m.i26*m.i48 +
0.00485772*m.i26*m.i49 + 0.0100103*m.i26*m.i50 + 0.00280844*m.i26*m.i51 + 0.00687248*m.i26*m.i52
+ 0.00732458*m.i26*m.i53 + 0.00815206*m.i26*m.i54 + 0.00612236*m.i26*m.i55 + 0.00307146*m.i26*
m.i56 + 0.0049056*m.i26*m.i57 + 0.00412472*m.i26*m.i58 + 0.0040935*m.i26*m.i59 + 0.0040596*m.i26*
m.i60 + 0.01138906*m.i26*m.i61 + 0.00976836*m.i26*m.i62 + 0.0087752*m.i26*m.i63 + 0.00574374*
m.i26*m.i64 + 0.00539202*m.i26*m.i65 + 0.0020772*m.i26*m.i66 + 0.00535872*m.i26*m.i67 + 0.0041987
*m.i26*m.i68 + 0.00941624*m.i26*m.i69 + 0.00708368*m.i26*m.i70 + 0.00623148*m.i26*m.i71 +
0.0059506*m.i26*m.i72 + 0.00509138*m.i26*m.i73 + 0.00640786*m.i26*m.i74 + 0.00599214*m.i26*m.i75
+ 0.00535234*m.i26*m.i76 + 0.0061449*m.i26*m.i77 + 0.0049639*m.i26*m.i78 + 0.00212662*m.i26*
m.i79 + 0.00709762*m.i26*m.i80 + 0.00556936*m.i26*m.i81 + 0.0033022*m.i26*m.i82 - 0.0001706112*
m.i26*m.i83 + 0.0042184*m.i26*m.i84 + 0.00533878*m.i26*m.i85 + 0.00407216*m.i26*m.i86 + 0.0050287
*m.i26*m.i87 + 0.00492458*m.i26*m.i88 + 0.00236614*m.i26*m.i89 + 0.0069424*m.i26*m.i90 +
0.00767098*m.i26*m.i91 + 0.00534286*m.i26*m.i92 + 0.001624812*m.i26*m.i93 + 0.00309366*m.i26*
m.i94 + 0.00617648*m.i26*m.i95 + 0.01108742*m.i26*m.i96 + 0.0068572*m.i26*m.i97 + 0.00411952*
m.i26*m.i98 + 0.00653102*m.i26*m.i99 + 0.00944332*m.i26*m.i100 + 0.01236278*m.i27*m.i28 +
0.00615174*m.i27*m.i29 + 0.0284656*m.i27*m.i30 + 0.00531366*m.i27*m.i31 + 0.0227234*m.i27*m.i32
+ 0.01239532*m.i27*m.i33 + 0.00873604*m.i27*m.i34 + 0.01006162*m.i27*m.i35 + 0.0244272*m.i27*
m.i36 + 0.01206064*m.i27*m.i37 + 0.00764146*m.i27*m.i38 + 0.01638042*m.i27*m.i39 + 0.0281728*
m.i27*m.i40 + 0.0236864*m.i27*m.i41 + 0.01394576*m.i27*m.i42 + 0.01151236*m.i27*m.i43 +
0.00967762*m.i27*m.i44 + 0.0001345884*m.i27*m.i45 + 0.00656542*m.i27*m.i46 + 0.0226088*m.i27*
m.i47 + 0.00665866*m.i27*m.i48 + 0.00867994*m.i27*m.i49 + 0.01519986*m.i27*m.i50 + 0.00516678*
m.i27*m.i51 + 0.01290734*m.i27*m.i52 + 0.00750112*m.i27*m.i53 + 0.015481*m.i27*m.i54 + 0.00918208
*m.i27*m.i55 + 0.01133662*m.i27*m.i56 + 0.00655584*m.i27*m.i57 + 0.00645326*m.i27*m.i58 +
0.01022706*m.i27*m.i59 + 0.00655942*m.i27*m.i60 + 0.0230718*m.i27*m.i61 + 0.0200196*m.i27*m.i62
+ 0.01214952*m.i27*m.i63 + 0.00996324*m.i27*m.i64 + 0.00982212*m.i27*m.i65 + 0.00606814*m.i27*
m.i66 + 0.00854006*m.i27*m.i67 + 0.00819936*m.i27*m.i68 + 0.01608286*m.i27*m.i69 + 0.00821942*
m.i27*m.i70 + 0.0230626*m.i27*m.i71 + 0.01648106*m.i27*m.i72 + 0.00833058*m.i27*m.i73 + 0.0119455
*m.i27*m.i74 + 0.0073591*m.i27*m.i75 + 0.00553444*m.i27*m.i76 + 0.00629646*m.i27*m.i77 +
0.00406434*m.i27*m.i78 + 0.00760068*m.i27*m.i79 + 0.00662478*m.i27*m.i80 + 0.0198678*m.i27*m.i81
+ 0.0044671*m.i27*m.i82 + 0.01205228*m.i27*m.i83 + 0.0106948*m.i27*m.i84 + 0.00763694*m.i27*
m.i85 + 0.01122432*m.i27*m.i86 + 0.00899094*m.i27*m.i87 + 0.0237458*m.i27*m.i88 + 0.00548044*
m.i27*m.i89 + 0.01135562*m.i27*m.i90 + 0.01131762*m.i27*m.i91 + 0.00767916*m.i27*m.i92 +
0.00281062*m.i27*m.i93 + 0.00450634*m.i27*m.i94 + 0.01029564*m.i27*m.i95 + 0.01573164*m.i27*m.i96
+ 0.01494338*m.i27*m.i97 + 0.01900252*m.i27*m.i98 + 0.01470772*m.i27*m.i99 + 0.01866828*m.i27*
m.i100 + 0.00362518*m.i28*m.i29 + 0.01640256*m.i28*m.i30 + 0.00349192*m.i28*m.i31 + 0.0129237*
m.i28*m.i32 + 0.00538584*m.i28*m.i33 + 0.00533474*m.i28*m.i34 + 0.00643216*m.i28*m.i35 +
0.01292206*m.i28*m.i36 + 0.00798078*m.i28*m.i37 + 0.0054977*m.i28*m.i38 + 0.00885966*m.i28*m.i39
+ 0.016828*m.i28*m.i40 + 0.01167374*m.i28*m.i41 + 0.00549216*m.i28*m.i42 + 0.00692364*m.i28*
m.i43 + 0.00370672*m.i28*m.i44 + 0.000284348*m.i28*m.i45 + 0.00277668*m.i28*m.i46 + 0.00936392*
m.i28*m.i47 + 0.00267238*m.i28*m.i48 + 0.00522892*m.i28*m.i49 + 0.00779258*m.i28*m.i50 +
0.0043462*m.i28*m.i51 + 0.00591302*m.i28*m.i52 + 0.00320368*m.i28*m.i53 + 0.00698682*m.i28*m.i54
+ 0.00560018*m.i28*m.i55 + 0.0075828*m.i28*m.i56 + 0.00361162*m.i28*m.i57 + 0.00229658*m.i28*
m.i58 + 0.00780328*m.i28*m.i59 + 0.0033416*m.i28*m.i60 + 0.01168298*m.i28*m.i61 + 0.0082366*m.i28
*m.i62 + 0.00465746*m.i28*m.i63 + 0.00328332*m.i28*m.i64 + 0.00685966*m.i28*m.i65 + 0.00386632*
m.i28*m.i66 + 0.0053142*m.i28*m.i67 + 0.00432904*m.i28*m.i68 + 0.00791276*m.i28*m.i69 + 0.0040137
*m.i28*m.i70 + 0.01081358*m.i28*m.i71 + 0.00841874*m.i28*m.i72 + 0.00534694*m.i28*m.i73 +
0.00677544*m.i28*m.i74 + 0.00391198*m.i28*m.i75 + 0.00308942*m.i28*m.i76 + 0.00250778*m.i28*m.i77
+ 0.00189916*m.i28*m.i78 + 0.00856184*m.i28*m.i79 + 0.00337182*m.i28*m.i80 + 0.00959416*m.i28*
m.i81 + 0.00329038*m.i28*m.i82 + 0.00388664*m.i28*m.i83 + 0.00685968*m.i28*m.i84 + 0.00406002*
m.i28*m.i85 + 0.00658126*m.i28*m.i86 + 0.00646838*m.i28*m.i87 + 0.0218548*m.i28*m.i88 +
0.00541992*m.i28*m.i89 + 0.00503116*m.i28*m.i90 + 0.00418236*m.i28*m.i91 + 0.0040874*m.i28*m.i92
+ 0.0022624*m.i28*m.i93 + 0.00392254*m.i28*m.i94 + 0.00482686*m.i28*m.i95 + 0.00726382*m.i28*
m.i96 + 0.00767472*m.i28*m.i97 + 0.01066418*m.i28*m.i98 + 0.00883358*m.i28*m.i99 + 0.0070211*
m.i28*m.i100 + 0.0147917*m.i29*m.i30 + 0.001068816*m.i29*m.i31 + 0.0105712*m.i29*m.i32 +
0.00407766*m.i29*m.i33 + 0.00300076*m.i29*m.i34 + 0.00524794*m.i29*m.i35 + 0.01016322*m.i29*m.i36
+ 0.00841674*m.i29*m.i37 + 0.00258632*m.i29*m.i38 + 0.00698836*m.i29*m.i39 + 0.01223674*m.i29*
m.i40 + 0.01128912*m.i29*m.i41 + 0.00481604*m.i29*m.i42 + 0.00316394*m.i29*m.i43 + 0.00690116*
m.i29*m.i44 + 0.00082418*m.i29*m.i45 + 0.00343988*m.i29*m.i46 + 0.00660586*m.i29*m.i47 +
0.00315994*m.i29*m.i48 + 0.004109*m.i29*m.i49 + 0.01072766*m.i29*m.i50 + 0.00295018*m.i29*m.i51
+ 0.00574084*m.i29*m.i52 + 0.00735384*m.i29*m.i53 + 0.00646518*m.i29*m.i54 + 0.00437712*m.i29*
m.i55 + 0.0050201*m.i29*m.i56 + 0.00428602*m.i29*m.i57 + 0.00339284*m.i29*m.i58 + 0.00395186*
m.i29*m.i59 + 0.00369852*m.i29*m.i60 + 0.01069104*m.i29*m.i61 + 0.00877524*m.i29*m.i62 +
0.00780122*m.i29*m.i63 + 0.00319846*m.i29*m.i64 + 0.00522668*m.i29*m.i65 + 0.00318906*m.i29*m.i66
+ 0.00765554*m.i29*m.i67 + 0.00353436*m.i29*m.i68 + 0.0090668*m.i29*m.i69 + 0.0062235*m.i29*
m.i70 + 0.00879038*m.i29*m.i71 + 0.00661754*m.i29*m.i72 + 0.00355728*m.i29*m.i73 + 0.0041974*
m.i29*m.i74 + 0.00530048*m.i29*m.i75 + 0.00543652*m.i29*m.i76 + 0.00436164*m.i29*m.i77 +
0.00450742*m.i29*m.i78 + 0.00725294*m.i29*m.i79 + 0.00491692*m.i29*m.i80 + 0.00689594*m.i29*m.i81
+ 0.00288614*m.i29*m.i82 + 0.005327*m.i29*m.i83 + 0.00356482*m.i29*m.i84 + 0.00320232*m.i29*
m.i85 + 0.00401206*m.i29*m.i86 + 0.00746968*m.i29*m.i87 + 0.01484586*m.i29*m.i88 + 0.00405332*
m.i29*m.i89 + 0.00646554*m.i29*m.i90 + 0.00398186*m.i29*m.i91 + 0.0045419*m.i29*m.i92 +
0.00249602*m.i29*m.i93 + 0.00344506*m.i29*m.i94 + 0.0046313*m.i29*m.i95 + 0.01012898*m.i29*m.i96
+ 0.00666118*m.i29*m.i97 + 0.00510452*m.i29*m.i98 + 0.00865974*m.i29*m.i99 + 0.00556162*m.i29*
m.i100 + 0.01432038*m.i30*m.i31 + 0.048762*m.i30*m.i32 + 0.03246*m.i30*m.i33 + 0.00510162*m.i30*
m.i34 + 0.00990812*m.i30*m.i35 + 0.0782504*m.i30*m.i36 + 0.0336068*m.i30*m.i37 + 0.00740496*m.i30
*m.i38 + 0.0520556*m.i30*m.i39 + 0.0689666*m.i30*m.i40 + 0.0338084*m.i30*m.i41 + 0.0303886*m.i30*
m.i42 + 0.01530392*m.i30*m.i43 + 0.0286584*m.i30*m.i44 + 0.001838718*m.i30*m.i45 + 0.01735792*
m.i30*m.i46 + 0.0257124*m.i30*m.i47 + 0.01952576*m.i30*m.i48 + 0.0285968*m.i30*m.i49 + 0.0597966*
m.i30*m.i50 + 0.0235442*m.i30*m.i51 + 0.0356002*m.i30*m.i52 + 0.056815*m.i30*m.i53 + 0.031993*
m.i30*m.i54 + 0.0256864*m.i30*m.i55 + 0.012682*m.i30*m.i56 + 0.01927838*m.i30*m.i57 + 0.0132181*
m.i30*m.i58 + 0.0308396*m.i30*m.i59 + 0.01646776*m.i30*m.i60 + 0.0691402*m.i30*m.i61 + 0.0539688*
m.i30*m.i62 + 0.0253122*m.i30*m.i63 + 0.0217306*m.i30*m.i64 + 0.0238236*m.i30*m.i65 + 0.01199066*
m.i30*m.i66 + 0.0301278*m.i30*m.i67 + 0.0209952*m.i30*m.i68 + 0.0484514*m.i30*m.i69 + 0.0226726*
m.i30*m.i70 + 0.02153*m.i30*m.i71 + 0.023498*m.i30*m.i72 + 0.0217474*m.i30*m.i73 + 0.0363548*
m.i30*m.i74 + 0.0290864*m.i30*m.i75 + 0.01738014*m.i30*m.i76 + 0.0248066*m.i30*m.i77 + 0.01560782
*m.i30*m.i78 + 0.0735134*m.i30*m.i79 + 0.0216582*m.i30*m.i80 + 0.030706*m.i30*m.i81 + 0.00888388*
m.i30*m.i82 + 0.00819988*m.i30*m.i83 + 0.02421*m.i30*m.i84 + 0.01903928*m.i30*m.i85 + 0.0384208*
m.i30*m.i86 + 0.0308632*m.i30*m.i87 + 0.112101*m.i30*m.i88 + 0.0313082*m.i30*m.i89 + 0.0480838*
m.i30*m.i90 + 0.0265036*m.i30*m.i91 + 0.0219052*m.i30*m.i92 + 0.01243318*m.i30*m.i93 + 0.00866336
*m.i30*m.i94 + 0.0318698*m.i30*m.i95 + 0.0809696*m.i30*m.i96 + 0.0362056*m.i30*m.i97 + 0.0307602*
m.i30*m.i98 + 0.0452826*m.i30*m.i99 + 0.0359652*m.i30*m.i100 + 0.01352968*m.i31*m.i32 +
0.01461656*m.i31*m.i33 + 0.00410226*m.i31*m.i34 + 0.00308616*m.i31*m.i35 + 0.0221942*m.i31*m.i36
+ 0.0095014*m.i31*m.i37 + 0.0001894118*m.i31*m.i38 + 0.01328104*m.i31*m.i39 + 0.0207254*m.i31*
m.i40 + 0.01363894*m.i31*m.i41 + 0.01129202*m.i31*m.i42 + 0.0108266*m.i31*m.i43 + 0.01097008*
m.i31*m.i44 + 0.00461712*m.i31*m.i45 + 0.00463752*m.i31*m.i46 + 0.00929264*m.i31*m.i47 +
0.00473752*m.i31*m.i48 + 0.0114599*m.i31*m.i49 + 0.0117742*m.i31*m.i50 + 0.0088573*m.i31*m.i51 +
0.0075837*m.i31*m.i52 + 0.00658756*m.i31*m.i53 + 0.0113218*m.i31*m.i54 + 0.00930362*m.i31*m.i55
+ 0.01063604*m.i31*m.i56 + 0.00432704*m.i31*m.i57 + 0.00804616*m.i31*m.i58 + 0.01180986*m.i31*
m.i59 + 0.0009047*m.i31*m.i60 + 0.01200762*m.i31*m.i61 + 0.00940268*m.i31*m.i62 + 0.01417994*
m.i31*m.i63 + 0.0076164*m.i31*m.i64 + 0.00575322*m.i31*m.i65 + 0.00834872*m.i31*m.i66 +
0.00454676*m.i31*m.i67 + 0.00544346*m.i31*m.i68 + 0.0132866*m.i31*m.i69 + 0.00553084*m.i31*m.i70
+ 0.01147094*m.i31*m.i71 + 0.00577578*m.i31*m.i72 + 0.00887008*m.i31*m.i73 + 0.01059428*m.i31*
m.i74 + 0.0040723*m.i31*m.i75 + 0.00207936*m.i31*m.i76 + 0.01175316*m.i31*m.i77 + 0.00278464*
m.i31*m.i78 + 0.00880162*m.i31*m.i79 + 0.0087823*m.i31*m.i80 + 0.00669872*m.i31*m.i81 +
0.001695732*m.i31*m.i82 + 0.01128974*m.i31*m.i83 + 0.0131319*m.i31*m.i84 + 0.00861518*m.i31*m.i85
+ 0.01080682*m.i31*m.i86 + 0.00523332*m.i31*m.i87 + 0.0207656*m.i31*m.i88 + 0.00591302*m.i31*
m.i89 + 0.00439716*m.i31*m.i90 + 0.0115743*m.i31*m.i91 + 0.00995262*m.i31*m.i92 + 0.000428388*
m.i31*m.i93 + 0.00464012*m.i31*m.i94 + 0.00813868*m.i31*m.i95 + 0.00570582*m.i31*m.i96 +
0.00954936*m.i31*m.i97 + 0.01038358*m.i31*m.i98 + 0.00920842*m.i31*m.i99 + 0.01146966*m.i31*
m.i100 + 0.0209668*m.i32*m.i33 + 0.0108011*m.i32*m.i34 + 0.01248282*m.i32*m.i35 + 0.0530038*m.i32
*m.i36 + 0.0301486*m.i32*m.i37 + 0.00760388*m.i32*m.i38 + 0.0317898*m.i32*m.i39 + 0.0642986*m.i32
*m.i40 + 0.0332684*m.i32*m.i41 + 0.0235182*m.i32*m.i42 + 0.0143552*m.i32*m.i43 + 0.0235288*m.i32*
m.i44 + 0.00682838*m.i32*m.i45 + 0.01137478*m.i32*m.i46 + 0.0318282*m.i32*m.i47 + 0.00984204*
m.i32*m.i48 + 0.0207836*m.i32*m.i49 + 0.0371082*m.i32*m.i50 + 0.01715818*m.i32*m.i51 + 0.0184894*
m.i32*m.i52 + 0.0241264*m.i32*m.i53 + 0.0254814*m.i32*m.i54 + 0.01913224*m.i32*m.i55 + 0.0212986*
m.i32*m.i56 + 0.01167336*m.i32*m.i57 + 0.01191892*m.i32*m.i58 + 0.0246844*m.i32*m.i59 +
0.00772776*m.i32*m.i60 + 0.0424102*m.i32*m.i61 + 0.0330624*m.i32*m.i62 + 0.0190237*m.i32*m.i63 +
0.01185726*m.i32*m.i64 + 0.01593976*m.i32*m.i65 + 0.00931156*m.i32*m.i66 + 0.01976096*m.i32*m.i67
+ 0.00940704*m.i32*m.i68 + 0.0353824*m.i32*m.i69 + 0.01637874*m.i32*m.i70 + 0.0234414*m.i32*
m.i71 + 0.01981882*m.i32*m.i72 + 0.01518934*m.i32*m.i73 + 0.0206944*m.i32*m.i74 + 0.01368518*
m.i32*m.i75 + 0.01085922*m.i32*m.i76 + 0.0142422*m.i32*m.i77 + 0.01225292*m.i32*m.i78 + 0.025216*
m.i32*m.i79 + 0.01581384*m.i32*m.i80 + 0.0226748*m.i32*m.i81 + 0.0078489*m.i32*m.i82 + 0.00488232
*m.i32*m.i83 + 0.01715432*m.i32*m.i84 + 0.01617784*m.i32*m.i85 + 0.0224728*m.i32*m.i86 +
0.0213528*m.i32*m.i87 + 0.0404024*m.i32*m.i88 + 0.00700416*m.i32*m.i89 + 0.0284686*m.i32*m.i90 +
0.01764584*m.i32*m.i91 + 0.01747106*m.i32*m.i92 + 0.00781272*m.i32*m.i93 + 0.01173676*m.i32*m.i94
+ 0.01901852*m.i32*m.i95 + 0.032411*m.i32*m.i96 + 0.0238232*m.i32*m.i97 + 0.021198*m.i32*m.i98
+ 0.0300116*m.i32*m.i99 + 0.0354006*m.i32*m.i100 + 0.0090127*m.i33*m.i34 + 0.00772724*m.i33*
m.i35 + 0.0313702*m.i33*m.i36 + 0.01413346*m.i33*m.i37 + 0.001835906*m.i33*m.i38 + 0.01789618*
m.i33*m.i39 + 0.0342932*m.i33*m.i40 + 0.0203234*m.i33*m.i41 + 0.01859662*m.i33*m.i42 + 0.00949822
*m.i33*m.i43 + 0.0173394*m.i33*m.i44 + 0.00462026*m.i33*m.i45 + 0.0076766*m.i33*m.i46 + 0.0195887
*m.i33*m.i47 + 0.00677792*m.i33*m.i48 + 0.01593666*m.i33*m.i49 + 0.0205366*m.i33*m.i50 +
0.01028686*m.i33*m.i51 + 0.01380638*m.i33*m.i52 + 0.0139701*m.i33*m.i53 + 0.016589*m.i33*m.i54 +
0.0139115*m.i33*m.i55 + 0.01339328*m.i33*m.i56 + 0.00706492*m.i33*m.i57 + 0.01010916*m.i33*m.i58
+ 0.0112109*m.i33*m.i59 + 0.0038394*m.i33*m.i60 + 0.0232104*m.i33*m.i61 + 0.01960694*m.i33*m.i62
+ 0.01805454*m.i33*m.i63 + 0.01327968*m.i33*m.i64 + 0.0135282*m.i33*m.i65 + 0.0101248*m.i33*
m.i66 + 0.00800254*m.i33*m.i67 + 0.0030849*m.i33*m.i68 + 0.0205056*m.i33*m.i69 + 0.00997944*m.i33
*m.i70 + 0.01867754*m.i33*m.i71 + 0.01023414*m.i33*m.i72 + 0.01414764*m.i33*m.i73 + 0.01623304*
m.i33*m.i74 + 0.00580254*m.i33*m.i75 + 0.00688906*m.i33*m.i76 + 0.01955742*m.i33*m.i77 +
0.0043617*m.i33*m.i78 + 0.0110714*m.i33*m.i79 + 0.00837212*m.i33*m.i80 + 0.0186224*m.i33*m.i81 +
0.0038599*m.i33*m.i82 + 0.01828456*m.i33*m.i83 + 0.01460176*m.i33*m.i84 + 0.00984126*m.i33*m.i85
+ 0.01375926*m.i33*m.i86 + 0.01081848*m.i33*m.i87 + 0.0294078*m.i33*m.i88 + 0.00904426*m.i33*
m.i89 + 0.01335384*m.i33*m.i90 + 0.00944562*m.i33*m.i91 + 0.01586856*m.i33*m.i92 + 0.00253356*
m.i33*m.i93 + 0.00579828*m.i33*m.i94 + 0.01264366*m.i33*m.i95 + 0.0212436*m.i33*m.i96 + 0.014968*
m.i33*m.i97 + 0.01459146*m.i33*m.i98 + 0.01990882*m.i33*m.i99 + 0.020898*m.i33*m.i100 + 0.0078456
*m.i34*m.i35 + 0.01102212*m.i34*m.i36 + 0.00676724*m.i34*m.i37 + 0.00365266*m.i34*m.i38 +
0.00595098*m.i34*m.i39 + 0.01153866*m.i34*m.i40 + 0.01058304*m.i34*m.i41 + 0.00838326*m.i34*m.i42
+ 0.00601354*m.i34*m.i43 + 0.00621002*m.i34*m.i44 + 0.00388646*m.i34*m.i45 + 0.00291464*m.i34*
m.i46 + 0.01279302*m.i34*m.i47 + 0.001590652*m.i34*m.i48 + 0.00546164*m.i34*m.i49 + 0.00756668*
m.i34*m.i50 + 0.00255946*m.i34*m.i51 + 0.00586752*m.i34*m.i52 - 0.0001086844*m.i34*m.i53 +
0.00756758*m.i34*m.i54 + 0.00472132*m.i34*m.i55 + 0.0090114*m.i34*m.i56 + 0.00404276*m.i34*m.i57
+ 0.00259172*m.i34*m.i58 + 0.0043188*m.i34*m.i59 + 0.00265148*m.i34*m.i60 + 0.00988174*m.i34*
m.i61 + 0.00773706*m.i34*m.i62 + 0.00871216*m.i34*m.i63 + 0.0051719*m.i34*m.i64 + 0.005674*m.i34*
m.i65 + 0.0042472*m.i34*m.i66 + 0.0029352*m.i34*m.i67 + 0.00380488*m.i34*m.i68 + 0.00782908*m.i34
*m.i69 + 0.00528678*m.i34*m.i70 + 0.01141144*m.i34*m.i71 + 0.00731358*m.i34*m.i72 + 0.00557996*
m.i34*m.i73 + 0.00428558*m.i34*m.i74 + 0.00214164*m.i34*m.i75 + 0.001888024*m.i34*m.i76 +
0.00450712*m.i34*m.i77 + 0.001974898*m.i34*m.i78 + 0.000555542*m.i34*m.i79 + 0.004826*m.i34*m.i80
+ 0.01009798*m.i34*m.i81 + 0.00342408*m.i34*m.i82 + 0.0066259*m.i34*m.i83 + 0.00557372*m.i34*
m.i84 + 0.00493326*m.i34*m.i85 + 0.0033431*m.i34*m.i86 + 0.00355798*m.i34*m.i87 + 0.0070914*m.i34
*m.i88 + 0.00319452*m.i34*m.i89 + 0.001165088*m.i34*m.i90 + 0.00330168*m.i34*m.i91 + 0.00487072*
m.i34*m.i92 + 0.001039364*m.i34*m.i93 + 0.00462638*m.i34*m.i94 + 0.00474964*m.i34*m.i95 +
0.00307738*m.i34*m.i96 + 0.00634158*m.i34*m.i97 + 0.0093911*m.i34*m.i98 + 0.00479968*m.i34*m.i99
+ 0.00945466*m.i34*m.i100 + 0.00886108*m.i35*m.i36 + 0.008324*m.i35*m.i37 + 0.0042517*m.i35*
m.i38 + 0.0063195*m.i35*m.i39 + 0.00897334*m.i35*m.i40 + 0.01438534*m.i35*m.i41 + 0.00707384*
m.i35*m.i42 + 0.00524994*m.i35*m.i43 + 0.00729354*m.i35*m.i44 + 0.00231104*m.i35*m.i45 +
0.00317018*m.i35*m.i46 + 0.01095322*m.i35*m.i47 + 0.00256082*m.i35*m.i48 + 0.0066693*m.i35*m.i49
+ 0.00896786*m.i35*m.i50 + 0.00243944*m.i35*m.i51 + 0.00542922*m.i35*m.i52 + 0.001853016*m.i35*
m.i53 + 0.0080304*m.i35*m.i54 + 0.004194*m.i35*m.i55 + 0.00944224*m.i35*m.i56 + 0.0044097*m.i35*
m.i57 + 0.00234874*m.i35*m.i58 + 0.0045055*m.i35*m.i59 + 0.00387194*m.i35*m.i60 + 0.01070194*
m.i35*m.i61 + 0.01020854*m.i35*m.i62 + 0.00869604*m.i35*m.i63 + 0.0038381*m.i35*m.i64 +
0.00566828*m.i35*m.i65 + 0.00392276*m.i35*m.i66 + 0.00493806*m.i35*m.i67 + 0.00543634*m.i35*m.i68
+ 0.01090284*m.i35*m.i69 + 0.00744802*m.i35*m.i70 + 0.01323476*m.i35*m.i71 + 0.00994186*m.i35*
m.i72 + 0.00554564*m.i35*m.i73 + 0.00631474*m.i35*m.i74 + 0.00456554*m.i35*m.i75 + 0.00357674*
m.i35*m.i76 + 0.00520436*m.i35*m.i77 + 0.0030095*m.i35*m.i78 + 0.0057729*m.i35*m.i79 + 0.00411204
*m.i35*m.i80 + 0.00953392*m.i35*m.i81 + 0.00378046*m.i35*m.i82 + 0.00572152*m.i35*m.i83 +
0.00613732*m.i35*m.i84 + 0.00382166*m.i35*m.i85 + 0.00356476*m.i35*m.i86 + 0.00634394*m.i35*m.i87
+ 0.0111758*m.i35*m.i88 + 0.00567884*m.i35*m.i89 + 0.00368822*m.i35*m.i90 + 0.00382434*m.i35*
m.i91 + 0.00295216*m.i35*m.i92 + 0.00261056*m.i35*m.i93 + 0.00538486*m.i35*m.i94 + 0.00508518*
m.i35*m.i95 + 0.00571674*m.i35*m.i96 + 0.00749186*m.i35*m.i97 + 0.00986618*m.i35*m.i98 +
0.00565378*m.i35*m.i99 + 0.0094721*m.i35*m.i100 + 0.0440606*m.i36*m.i37 + 0.0069763*m.i36*m.i38
+ 0.0493166*m.i36*m.i39 + 0.121634*m.i36*m.i40 + 0.0358136*m.i36*m.i41 + 0.0380066*m.i36*m.i42
+ 0.0240066*m.i36*m.i43 + 0.0315302*m.i36*m.i44 + 0.00778714*m.i36*m.i45 + 0.01711478*m.i36*
m.i46 + 0.0433014*m.i36*m.i47 + 0.01592312*m.i36*m.i48 + 0.0219624*m.i36*m.i49 + 0.0584382*m.i36*
m.i50 + 0.0237454*m.i36*m.i51 + 0.030079*m.i36*m.i52 + 0.0450814*m.i36*m.i53 + 0.0285826*m.i36*
m.i54 + 0.0266392*m.i36*m.i55 + 0.01830758*m.i36*m.i56 + 0.01364522*m.i36*m.i57 + 0.01568*m.i36*
m.i58 + 0.0359108*m.i36*m.i59 + 0.00643528*m.i36*m.i60 + 0.056249*m.i36*m.i61 + 0.0503568*m.i36*
m.i62 + 0.0221574*m.i36*m.i63 + 0.023432*m.i36*m.i64 + 0.0219264*m.i36*m.i65 + 0.01946022*m.i36*
m.i66 + 0.0301552*m.i36*m.i67 + 0.00986666*m.i36*m.i68 + 0.0496472*m.i36*m.i69 + 0.0177644*m.i36*
m.i70 + 0.0308856*m.i36*m.i71 + 0.01899074*m.i36*m.i72 + 0.01805938*m.i36*m.i73 + 0.0273694*m.i36
*m.i74 + 0.01662774*m.i36*m.i75 + 0.00832596*m.i36*m.i76 + 0.0203852*m.i36*m.i77 + 0.0174271*
m.i36*m.i78 + 0.039217*m.i36*m.i79 + 0.0232082*m.i36*m.i80 + 0.0357644*m.i36*m.i81 + 0.00331724*
m.i36*m.i82 + 0.0276304*m.i36*m.i83 + 0.0267904*m.i36*m.i84 + 0.02756*m.i36*m.i85 + 0.0320374*
m.i36*m.i86 + 0.0222598*m.i36*m.i87 + 0.0496644*m.i36*m.i88 + 0.01118028*m.i36*m.i89 + 0.0432572*
m.i36*m.i90 + 0.027434*m.i36*m.i91 + 0.0293774*m.i36*m.i92 + 0.0055352*m.i36*m.i93 + 0.00852418*
m.i36*m.i94 + 0.028037*m.i36*m.i95 + 0.0642512*m.i36*m.i96 + 0.0386458*m.i36*m.i97 + 0.040981*
m.i36*m.i98 + 0.04604*m.i36*m.i99 + 0.0478424*m.i36*m.i100 + 0.00525362*m.i37*m.i38 + 0.0212576*
m.i37*m.i39 + 0.0543916*m.i37*m.i40 + 0.018282*m.i37*m.i41 + 0.01700698*m.i37*m.i42 + 0.00953368*
m.i37*m.i43 + 0.0147155*m.i37*m.i44 + 0.00425042*m.i37*m.i45 + 0.00777022*m.i37*m.i46 +
0.01646346*m.i37*m.i47 + 0.00740598*m.i37*m.i48 + 0.01274586*m.i37*m.i49 + 0.0282742*m.i37*m.i50
+ 0.01506898*m.i37*m.i51 + 0.01409464*m.i37*m.i52 + 0.01916222*m.i37*m.i53 + 0.01572296*m.i37*
m.i54 + 0.01361714*m.i37*m.i55 + 0.01302042*m.i37*m.i56 + 0.00807862*m.i37*m.i57 + 0.00701644*
m.i37*m.i58 + 0.0201438*m.i37*m.i59 + 0.00497496*m.i37*m.i60 + 0.0259544*m.i37*m.i61 + 0.01982096
*m.i37*m.i62 + 0.01082904*m.i37*m.i63 + 0.00909066*m.i37*m.i64 + 0.0112364*m.i37*m.i65 +
0.0089483*m.i37*m.i66 + 0.01522148*m.i37*m.i67 + 0.00459152*m.i37*m.i68 + 0.0214858*m.i37*m.i69
+ 0.01075074*m.i37*m.i70 + 0.0132224*m.i37*m.i71 + 0.00980738*m.i37*m.i72 + 0.00885252*m.i37*
m.i73 + 0.01427422*m.i37*m.i74 + 0.00903996*m.i37*m.i75 + 0.00768272*m.i37*m.i76 + 0.0103221*
m.i37*m.i77 + 0.01082002*m.i37*m.i78 + 0.0248284*m.i37*m.i79 + 0.01098172*m.i37*m.i80 +
0.01335848*m.i37*m.i81 + 0.00545734*m.i37*m.i82 + 0.00921544*m.i37*m.i83 + 0.0110069*m.i37*m.i84
+ 0.01385998*m.i37*m.i85 + 0.01437348*m.i37*m.i86 + 0.01621552*m.i37*m.i87 + 0.01981332*m.i37*
m.i88 + 0.00549314*m.i37*m.i89 + 0.0210958*m.i37*m.i90 + 0.0116061*m.i37*m.i91 + 0.01444326*m.i37
*m.i92 + 0.00631646*m.i37*m.i93 + 0.00847398*m.i37*m.i94 + 0.0132838*m.i37*m.i95 + 0.0257442*
m.i37*m.i96 + 0.01746728*m.i37*m.i97 + 0.01331586*m.i37*m.i98 + 0.0246618*m.i37*m.i99 + 0.0231186
*m.i37*m.i100 + 0.00427726*m.i38*m.i39 + 0.00960742*m.i38*m.i40 + 0.00588794*m.i38*m.i41 +
0.0040899*m.i38*m.i42 + 0.00370486*m.i38*m.i43 + 0.001581616*m.i38*m.i44 + 0.00157779*m.i38*m.i45
+ 0.001517842*m.i38*m.i46 + 0.00577098*m.i38*m.i47 + 0.00184948*m.i38*m.i48 + 0.001412132*m.i38*
m.i49 + 0.00473326*m.i38*m.i50 + 0.001265572*m.i38*m.i51 + 0.00389392*m.i38*m.i52 + 0.00195541*
m.i38*m.i53 + 0.0045747*m.i38*m.i54 + 0.003024*m.i38*m.i55 + 0.00322834*m.i38*m.i56 + 0.00240162*
m.i38*m.i57 + 0.000494648*m.i38*m.i58 + 0.0035117*m.i38*m.i59 + 0.00302272*m.i38*m.i60 +
0.0067192*m.i38*m.i61 + 0.00576934*m.i38*m.i62 + 0.00236514*m.i38*m.i63 + 0.00208302*m.i38*m.i64
+ 0.00359594*m.i38*m.i65 + 0.001590092*m.i38*m.i66 + 0.00239398*m.i38*m.i67 + 0.00302224*m.i38*
m.i68 + 0.00326928*m.i38*m.i69 + 0.00302294*m.i38*m.i70 + 0.0049377*m.i38*m.i71 + 0.00553496*
m.i38*m.i72 + 0.00229972*m.i38*m.i73 + 0.00318332*m.i38*m.i74 + 0.00325074*m.i38*m.i75 +
0.001803886*m.i38*m.i76 + 0.000902562*m.i38*m.i77 + 0.001651326*m.i38*m.i78 + 0.0039935*m.i38*
m.i79 + 0.00233242*m.i38*m.i80 + 0.00546644*m.i38*m.i81 + 0.00223454*m.i38*m.i82 - 0.001681894*
m.i38*m.i83 + 0.0025273*m.i38*m.i84 + 0.0032781*m.i38*m.i85 + 0.001557044*m.i38*m.i86 +
0.00327138*m.i38*m.i87 + 0.00674346*m.i38*m.i88 + 0.0020784*m.i38*m.i89 + 0.00343958*m.i38*m.i90
+ 0.00324954*m.i38*m.i91 + 0.00206404*m.i38*m.i92 + 0.00161462*m.i38*m.i93 + 0.00247166*m.i38*
m.i94 + 0.00341238*m.i38*m.i95 + 0.00585902*m.i38*m.i96 + 0.00423638*m.i38*m.i97 + 0.00566634*
m.i38*m.i98 + 0.00315378*m.i38*m.i99 + 0.00449598*m.i38*m.i100 + 0.0491892*m.i39*m.i40 +
0.0262408*m.i39*m.i41 + 0.0205234*m.i39*m.i42 + 0.01409356*m.i39*m.i43 + 0.0195666*m.i39*m.i44 +
0.00525174*m.i39*m.i45 + 0.01076856*m.i39*m.i46 + 0.0216478*m.i39*m.i47 + 0.01097136*m.i39*m.i48
+ 0.0178672*m.i39*m.i49 + 0.0324104*m.i39*m.i50 + 0.0147971*m.i39*m.i51 + 0.01855664*m.i39*m.i52
+ 0.0250992*m.i39*m.i53 + 0.0213078*m.i39*m.i54 + 0.01575182*m.i39*m.i55 + 0.01438592*m.i39*
m.i56 + 0.0105253*m.i39*m.i57 + 0.01177712*m.i39*m.i58 + 0.0207946*m.i39*m.i59 + 0.00650454*m.i39
*m.i60 + 0.036126*m.i39*m.i61 + 0.0278076*m.i39*m.i62 + 0.0206546*m.i39*m.i63 + 0.01499036*m.i39*
m.i64 + 0.01276412*m.i39*m.i65 + 0.0125414*m.i39*m.i66 + 0.01617824*m.i39*m.i67 + 0.010394*m.i39*
m.i68 + 0.0290228*m.i39*m.i69 + 0.01190924*m.i39*m.i70 + 0.01824964*m.i39*m.i71 + 0.014012*m.i39*
m.i72 + 0.01408568*m.i39*m.i73 + 0.0192582*m.i39*m.i74 + 0.01283914*m.i39*m.i75 + 0.00757714*
m.i39*m.i76 + 0.0157748*m.i39*m.i77 + 0.00886562*m.i39*m.i78 + 0.0226622*m.i39*m.i79 + 0.01506442
*m.i39*m.i80 + 0.01868878*m.i39*m.i81 + 0.00371016*m.i39*m.i82 + 0.01245306*m.i39*m.i83 +
0.01693888*m.i39*m.i84 + 0.0145704*m.i39*m.i85 + 0.0207926*m.i39*m.i86 + 0.01487822*m.i39*m.i87
+ 0.0465058*m.i39*m.i88 + 0.01052428*m.i39*m.i89 + 0.0220072*m.i39*m.i90 + 0.01887928*m.i39*
m.i91 + 0.01597714*m.i39*m.i92 + 0.00531126*m.i39*m.i93 + 0.00658506*m.i39*m.i94 + 0.01713092*
m.i39*m.i95 + 0.0328166*m.i39*m.i96 + 0.0213542*m.i39*m.i97 + 0.0210286*m.i39*m.i98 + 0.0255336*
m.i39*m.i99 + 0.0274274*m.i39*m.i100 + 0.0504412*m.i40*m.i41 + 0.0336102*m.i40*m.i42 + 0.0294804*
m.i40*m.i43 + 0.0424704*m.i40*m.i44 + 0.0030095*m.i40*m.i45 + 0.01146224*m.i40*m.i46 + 0.0507426*
m.i40*m.i47 + 0.01585054*m.i40*m.i48 + 0.0217164*m.i40*m.i49 + 0.0491478*m.i40*m.i50 + 0.0317926*
m.i40*m.i51 + 0.0284682*m.i40*m.i52 + 0.0468934*m.i40*m.i53 + 0.0309254*m.i40*m.i54 + 0.028626*
m.i40*m.i55 + 0.0309698*m.i40*m.i56 + 0.01062184*m.i40*m.i57 + 0.01987174*m.i40*m.i58 + 0.0429952
*m.i40*m.i59 + 0.00300922*m.i40*m.i60 + 0.0574936*m.i40*m.i61 + 0.0496304*m.i40*m.i62 +
0.01678646*m.i40*m.i63 + 0.0153295*m.i40*m.i64 + 0.0230176*m.i40*m.i65 + 0.0200972*m.i40*m.i66 +
0.0274442*m.i40*m.i67 - 0.00465404*m.i40*m.i68 + 0.0404524*m.i40*m.i69 + 0.01243058*m.i40*m.i70
+ 0.0333654*m.i40*m.i71 + 0.01847532*m.i40*m.i72 + 0.01863464*m.i40*m.i73 + 0.01865328*m.i40*
m.i74 + 0.0086314*m.i40*m.i75 + 0.0107773*m.i40*m.i76 + 0.0203618*m.i40*m.i77 + 0.01445046*m.i40*
m.i78 + 0.0410886*m.i40*m.i79 + 0.01194082*m.i40*m.i80 + 0.044529*m.i40*m.i81 + 0.00528742*m.i40*
m.i82 + 0.0445722*m.i40*m.i83 + 0.0229102*m.i40*m.i84 + 0.0241064*m.i40*m.i85 + 0.0368384*m.i40*
m.i86 + 0.0327072*m.i40*m.i87 + 0.0612044*m.i40*m.i88 + 0.0029601*m.i40*m.i89 + 0.0534994*m.i40*
m.i90 + 0.0258428*m.i40*m.i91 + 0.0317582*m.i40*m.i92 + 0.00965728*m.i40*m.i93 + 0.01437522*m.i40
*m.i94 + 0.0249652*m.i40*m.i95 + 0.0605768*m.i40*m.i96 + 0.0345084*m.i40*m.i97 + 0.0313726*m.i40*
m.i98 + 0.064674*m.i40*m.i99 + 0.0504464*m.i40*m.i100 + 0.0211266*m.i41*m.i42 + 0.0280268*m.i41*
m.i43 + 0.0396958*m.i41*m.i44 + 0.00245084*m.i41*m.i45 + 0.00955952*m.i41*m.i46 + 0.0396834*m.i41
*m.i47 + 0.0061862*m.i41*m.i48 + 0.02227*m.i41*m.i49 + 0.0217142*m.i41*m.i50 + 0.00978418*m.i41*
m.i51 + 0.01479238*m.i41*m.i52 + 0.016171*m.i41*m.i53 + 0.0243916*m.i41*m.i54 + 0.01422356*m.i41*
m.i55 + 0.0283342*m.i41*m.i56 + 0.00801394*m.i41*m.i57 + 0.01783044*m.i41*m.i58 + 0.01283818*
m.i41*m.i59 + 0.00500652*m.i41*m.i60 + 0.0289002*m.i41*m.i61 + 0.0313062*m.i41*m.i62 + 0.0372108*
m.i41*m.i63 + 0.0192516*m.i41*m.i64 + 0.0152555*m.i41*m.i65 + 0.01848886*m.i41*m.i66 + 0.01396382
*m.i41*m.i67 + 0.01323774*m.i41*m.i68 + 0.0319484*m.i41*m.i69 + 0.01505338*m.i41*m.i70 +
0.0464724*m.i41*m.i71 + 0.0275962*m.i41*m.i72 + 0.01531976*m.i41*m.i73 + 0.0159052*m.i41*m.i74 +
0.00897454*m.i41*m.i75 + 0.00931212*m.i41*m.i76 + 0.01958562*m.i41*m.i77 + 0.00344582*m.i41*m.i78
+ 0.00874906*m.i41*m.i79 + 0.01063594*m.i41*m.i80 + 0.02994*m.i41*m.i81 + 0.000668906*m.i41*
m.i82 + 0.0436128*m.i41*m.i83 + 0.0233408*m.i41*m.i84 + 0.00754018*m.i41*m.i85 + 0.01805636*m.i41
*m.i86 + 0.01281402*m.i41*m.i87 + 0.0523726*m.i41*m.i88 + 0.00844562*m.i41*m.i89 + 0.01302218*
m.i41*m.i90 + 0.01396562*m.i41*m.i91 + 0.01458222*m.i41*m.i92 + 0.0072903*m.i41*m.i93 +
0.00709746*m.i41*m.i94 + 0.01473562*m.i41*m.i95 + 0.01085782*m.i41*m.i96 + 0.021406*m.i41*m.i97
+ 0.0295828*m.i41*m.i98 + 0.01994264*m.i41*m.i99 + 0.0263314*m.i41*m.i100 + 0.01525376*m.i42*
m.i43 + 0.01763084*m.i42*m.i44 + 0.00749008*m.i42*m.i45 + 0.00916454*m.i42*m.i46 + 0.0235102*
m.i42*m.i47 + 0.00921988*m.i42*m.i48 + 0.01347394*m.i42*m.i49 + 0.0247352*m.i42*m.i50 +
0.01120346*m.i42*m.i51 + 0.01858118*m.i42*m.i52 + 0.01723882*m.i42*m.i53 + 0.0208142*m.i42*m.i54
+ 0.01360838*m.i42*m.i55 + 0.0118194*m.i42*m.i56 + 0.00860676*m.i42*m.i57 + 0.00935934*m.i42*
m.i58 + 0.01516418*m.i42*m.i59 + 0.0068076*m.i42*m.i60 + 0.028779*m.i42*m.i61 + 0.0258494*m.i42*
m.i62 + 0.0233604*m.i42*m.i63 + 0.01573382*m.i42*m.i64 + 0.01049188*m.i42*m.i65 + 0.00740748*
m.i42*m.i66 + 0.01082116*m.i42*m.i67 + 0.00777482*m.i42*m.i68 + 0.0240088*m.i42*m.i69 +
0.01102072*m.i42*m.i70 + 0.01820862*m.i42*m.i71 + 0.01298112*m.i42*m.i72 + 0.01234456*m.i42*m.i73
+ 0.0141652*m.i42*m.i74 + 0.00934936*m.i42*m.i75 + 0.00505832*m.i42*m.i76 + 0.01458566*m.i42*
m.i77 + 0.00728638*m.i42*m.i78 + 0.0099359*m.i42*m.i79 + 0.01486474*m.i42*m.i80 + 0.01668502*
m.i42*m.i81 + 0.00373442*m.i42*m.i82 + 0.01190258*m.i42*m.i83 + 0.01201006*m.i42*m.i84 +
0.0151776*m.i42*m.i85 + 0.0145938*m.i42*m.i86 + 0.00824462*m.i42*m.i87 + 0.0160982*m.i42*m.i88 +
0.006593*m.i42*m.i89 + 0.01418496*m.i42*m.i90 + 0.01803698*m.i42*m.i91 + 0.0159653*m.i42*m.i92 +
0.00291508*m.i42*m.i93 + 0.00538746*m.i42*m.i94 + 0.01644022*m.i42*m.i95 + 0.0250208*m.i42*m.i96
+ 0.018306*m.i42*m.i97 + 0.01797718*m.i42*m.i98 + 0.01649756*m.i42*m.i99 + 0.025412*m.i42*m.i100
+ 0.01762524*m.i43*m.i44 + 0.0026577*m.i43*m.i45 + 0.00500594*m.i43*m.i46 + 0.01987672*m.i43*
m.i47 + 0.00486026*m.i43*m.i48 + 0.01054502*m.i43*m.i49 + 0.00887754*m.i43*m.i50 + 0.00693606*
m.i43*m.i51 + 0.01006578*m.i43*m.i52 + 0.01002454*m.i43*m.i53 + 0.0138188*m.i43*m.i54 +
0.00975298*m.i43*m.i55 + 0.01686962*m.i43*m.i56 + 0.00490722*m.i43*m.i57 + 0.00949952*m.i43*m.i58
+ 0.01032096*m.i43*m.i59 + 0.00313858*m.i43*m.i60 + 0.01509816*m.i43*m.i61 + 0.0162044*m.i43*
m.i62 + 0.01875628*m.i43*m.i63 + 0.01240346*m.i43*m.i64 + 0.0085184*m.i43*m.i65 + 0.0097536*m.i43
*m.i66 + 0.00601436*m.i43*m.i67 + 0.0069333*m.i43*m.i68 + 0.01534648*m.i43*m.i69 + 0.00585324*
m.i43*m.i70 + 0.01833662*m.i43*m.i71 + 0.01219044*m.i43*m.i72 + 0.00997222*m.i43*m.i73 +
0.00950324*m.i43*m.i74 + 0.00395808*m.i43*m.i75 + 0.00230734*m.i43*m.i76 + 0.01177946*m.i43*m.i77
+ 0.00120913*m.i43*m.i78 + 0.00451336*m.i43*m.i79 + 0.0087064*m.i43*m.i80 + 0.01415418*m.i43*
m.i81 + 0.00158382*m.i43*m.i82 + 0.01934448*m.i43*m.i83 + 0.01332798*m.i43*m.i84 + 0.0073079*
m.i43*m.i85 + 0.01024086*m.i43*m.i86 + 0.00333288*m.i43*m.i87 + 0.01697646*m.i43*m.i88 +
0.00457426*m.i43*m.i89 + 0.00557218*m.i43*m.i90 + 0.0103559*m.i43*m.i91 + 0.00897022*m.i43*m.i92
+ 0.00315402*m.i43*m.i93 + 0.00504118*m.i43*m.i94 + 0.01075858*m.i43*m.i95 + 0.00678594*m.i43*
m.i96 + 0.01260626*m.i43*m.i97 + 0.0163881*m.i43*m.i98 + 0.01009846*m.i43*m.i99 + 0.01154306*
m.i43*m.i100 + 0.00483446*m.i44*m.i45 + 0.00652268*m.i44*m.i46 + 0.0242272*m.i44*m.i47 +
0.00478826*m.i44*m.i48 + 0.01685648*m.i44*m.i49 + 0.020425*m.i44*m.i50 + 0.00923526*m.i44*m.i51
+ 0.01214276*m.i44*m.i52 + 0.01807778*m.i44*m.i53 + 0.01714928*m.i44*m.i54 + 0.0117815*m.i44*
m.i55 + 0.01675568*m.i44*m.i56 + 0.0065756*m.i44*m.i57 + 0.01226174*m.i44*m.i58 + 0.0107529*m.i44
*m.i59 + 0.00316098*m.i44*m.i60 + 0.0237412*m.i44*m.i61 + 0.023095*m.i44*m.i62 + 0.0261176*m.i44*
m.i63 + 0.01217274*m.i44*m.i64 + 0.01008618*m.i44*m.i65 + 0.0100818*m.i44*m.i66 + 0.01058518*
m.i44*m.i67 + 0.00547734*m.i44*m.i68 + 0.0242058*m.i44*m.i69 + 0.01131642*m.i44*m.i70 + 0.0238346
*m.i44*m.i71 + 0.01469328*m.i44*m.i72 + 0.01153818*m.i44*m.i73 + 0.0107527*m.i44*m.i74 +
0.00664436*m.i44*m.i75 + 0.00643936*m.i44*m.i76 + 0.01819866*m.i44*m.i77 + 0.00401038*m.i44*m.i78
+ 0.00860378*m.i44*m.i79 + 0.01052694*m.i44*m.i80 + 0.01791956*m.i44*m.i81 + 0.001302356*m.i44*
m.i82 + 0.024415*m.i44*m.i83 + 0.01318656*m.i44*m.i84 + 0.00691488*m.i44*m.i85 + 0.0134211*m.i44*
m.i86 + 0.01005166*m.i44*m.i87 + 0.036692*m.i44*m.i88 + 0.00614716*m.i44*m.i89 + 0.0120958*m.i44*
m.i90 + 0.00884752*m.i44*m.i91 + 0.01296164*m.i44*m.i92 + 0.00513894*m.i44*m.i93 + 0.00596534*
m.i44*m.i94 + 0.01196692*m.i44*m.i95 + 0.01664976*m.i44*m.i96 + 0.01462126*m.i44*m.i97 +
0.0157382*m.i44*m.i98 + 0.01533824*m.i44*m.i99 + 0.0188597*m.i44*m.i100 + 0.00317774*m.i45*m.i46
+ 0.00420624*m.i45*m.i47 + 0.00199361*m.i45*m.i48 + 0.0050265*m.i45*m.i49 + 0.00894044*m.i45*
m.i50 + 0.00284776*m.i45*m.i51 + 0.00547162*m.i45*m.i52 + 0.00269966*m.i45*m.i53 + 0.0064379*
m.i45*m.i54 + 0.00472118*m.i45*m.i55 + 0.0042126*m.i45*m.i56 + 0.00394074*m.i45*m.i57 +
0.00265196*m.i45*m.i58 + 0.00448504*m.i45*m.i59 + 0.001797504*m.i45*m.i60 + 0.00867806*m.i45*
m.i61 + 0.00322858*m.i45*m.i62 + 0.00607352*m.i45*m.i63 + 0.00436738*m.i45*m.i64 + 0.00237578*
m.i45*m.i65 + 0.0044976*m.i45*m.i66 + 0.00181419*m.i45*m.i67 + 0.00495262*m.i45*m.i68 +
0.00570214*m.i45*m.i69 + 0.00422674*m.i45*m.i70 + 0.001748284*m.i45*m.i71 + 0.00347868*m.i45*
m.i72 + 0.00586478*m.i45*m.i73 + 0.00333902*m.i45*m.i74 + 0.0046385*m.i45*m.i75 + 0.001228842*
m.i45*m.i76 + 0.00595824*m.i45*m.i77 + 0.0027183*m.i45*m.i78 + 0.00108409*m.i45*m.i79 +
0.00761658*m.i45*m.i80 + 0.0005468*m.i45*m.i81 + 0.001647768*m.i45*m.i82 - 0.00572218*m.i45*m.i83
+ 0.00291394*m.i45*m.i84 + 0.00667112*m.i45*m.i85 + 0.00283124*m.i45*m.i86 + 0.00214236*m.i45*
m.i87 + 0.00913532*m.i45*m.i88 + 0.0031579*m.i45*m.i89 + 0.001671266*m.i45*m.i90 + 0.007457*m.i45
*m.i91 + 0.00539294*m.i45*m.i92 + 0.001548892*m.i45*m.i93 + 0.00325768*m.i45*m.i94 + 0.00415906*
m.i45*m.i95 + 0.00472416*m.i45*m.i96 + 0.00257908*m.i45*m.i97 + 0.00311904*m.i45*m.i98 -
0.00028754*m.i45*m.i99 + 0.00641254*m.i45*m.i100 + 0.00936266*m.i46*m.i47 + 0.00551424*m.i46*
m.i48 + 0.00665328*m.i46*m.i49 + 0.01254298*m.i46*m.i50 + 0.00457552*m.i46*m.i51 + 0.00723508*
m.i46*m.i52 + 0.01013924*m.i46*m.i53 + 0.00835722*m.i46*m.i54 + 0.00612552*m.i46*m.i55 +
0.00568528*m.i46*m.i56 + 0.00506602*m.i46*m.i57 + 0.00547684*m.i46*m.i58 + 0.00630834*m.i46*m.i59
+ 0.0034076*m.i46*m.i60 + 0.01269782*m.i46*m.i61 + 0.01056202*m.i46*m.i62 + 0.00905674*m.i46*
m.i63 + 0.00727642*m.i46*m.i64 + 0.0053986*m.i46*m.i65 + 0.00499194*m.i46*m.i66 + 0.00693256*
m.i46*m.i67 + 0.00384534*m.i46*m.i68 + 0.01113952*m.i46*m.i69 + 0.00571676*m.i46*m.i70 +
0.00918194*m.i46*m.i71 + 0.00582038*m.i46*m.i72 + 0.00587208*m.i46*m.i73 + 0.00927628*m.i46*m.i74
+ 0.00540062*m.i46*m.i75 + 0.00399822*m.i46*m.i76 + 0.00599102*m.i46*m.i77 + 0.00478388*m.i46*
m.i78 + 0.0052496*m.i46*m.i79 + 0.0080323*m.i46*m.i80 + 0.00786638*m.i46*m.i81 + 0.001854684*
m.i46*m.i82 + 0.00407872*m.i46*m.i83 + 0.00621788*m.i46*m.i84 + 0.00606418*m.i46*m.i85 +
0.00669516*m.i46*m.i86 + 0.00483036*m.i46*m.i87 + 0.00889994*m.i46*m.i88 + 0.00341184*m.i46*m.i89
+ 0.00883678*m.i46*m.i90 + 0.00699852*m.i46*m.i91 + 0.00577214*m.i46*m.i92 + 0.00238288*m.i46*
m.i93 + 0.001681122*m.i46*m.i94 + 0.00660328*m.i46*m.i95 + 0.0125098*m.i46*m.i96 + 0.00829924*
m.i46*m.i97 + 0.00843732*m.i46*m.i98 + 0.00930502*m.i46*m.i99 + 0.01141018*m.i46*m.i100 +
0.00622806*m.i47*m.i48 + 0.01275134*m.i47*m.i49 + 0.0219686*m.i47*m.i50 + 0.00559252*m.i47*m.i51
+ 0.014742*m.i47*m.i52 + 0.01293552*m.i47*m.i53 + 0.0202408*m.i47*m.i54 + 0.01276622*m.i47*m.i55
+ 0.0211842*m.i47*m.i56 + 0.00751862*m.i47*m.i57 + 0.01167596*m.i47*m.i58 + 0.0096102*m.i47*
m.i59 + 0.00476024*m.i47*m.i60 + 0.0291008*m.i47*m.i61 + 0.0293252*m.i47*m.i62 + 0.0218568*m.i47*
m.i63 + 0.01597818*m.i47*m.i64 + 0.01230724*m.i47*m.i65 + 0.01074494*m.i47*m.i66 + 0.01192482*
m.i47*m.i67 + 0.0072756*m.i47*m.i68 + 0.0259978*m.i47*m.i69 + 0.01196354*m.i47*m.i70 + 0.0346772*
m.i47*m.i71 + 0.01997802*m.i47*m.i72 + 0.0109755*m.i47*m.i73 + 0.01126216*m.i47*m.i74 +
0.00543986*m.i47*m.i75 + 0.00507998*m.i47*m.i76 + 0.01031016*m.i47*m.i77 + 0.0051788*m.i47*m.i78
+ 0.001275304*m.i47*m.i79 + 0.00993436*m.i47*m.i80 + 0.0302174*m.i47*m.i81 + 0.0025327*m.i47*
m.i82 + 0.0227778*m.i47*m.i83 + 0.01358392*m.i47*m.i84 + 0.01015524*m.i47*m.i85 + 0.01402648*
m.i47*m.i86 + 0.00789154*m.i47*m.i87 + 0.0151434*m.i47*m.i88 + 0.001278866*m.i47*m.i89 +
0.0158996*m.i47*m.i90 + 0.01154264*m.i47*m.i91 + 0.01393698*m.i47*m.i92 + 0.00304714*m.i47*m.i93
+ 0.00512466*m.i47*m.i94 + 0.01429612*m.i47*m.i95 + 0.01681572*m.i47*m.i96 + 0.01931984*m.i47*
m.i97 + 0.0267484*m.i47*m.i98 + 0.01797768*m.i47*m.i99 + 0.0282598*m.i47*m.i100 + 0.00546656*
m.i48*m.i49 + 0.01037534*m.i48*m.i50 + 0.00353598*m.i48*m.i51 + 0.00756044*m.i48*m.i52 +
0.01216498*m.i48*m.i53 + 0.00967664*m.i48*m.i54 + 0.00647364*m.i48*m.i55 + 0.00302706*m.i48*m.i56
+ 0.0053717*m.i48*m.i57 + 0.00577622*m.i48*m.i58 + 0.00544272*m.i48*m.i59 + 0.00352554*m.i48*
m.i60 + 0.01442968*m.i48*m.i61 + 0.0109524*m.i48*m.i62 + 0.00913756*m.i48*m.i63 + 0.00640136*
m.i48*m.i64 + 0.00303604*m.i48*m.i65 + 0.00380586*m.i48*m.i66 + 0.00547728*m.i48*m.i67 +
0.00370642*m.i48*m.i68 + 0.00883124*m.i48*m.i69 + 0.00549652*m.i48*m.i70 + 0.00566248*m.i48*m.i71
+ 0.00467596*m.i48*m.i72 + 0.00529964*m.i48*m.i73 + 0.00953518*m.i48*m.i74 + 0.00623786*m.i48*
m.i75 + 0.00402142*m.i48*m.i76 + 0.00662892*m.i48*m.i77 + 0.004711*m.i48*m.i78 + 0.001686804*
m.i48*m.i79 + 0.00761384*m.i48*m.i80 + 0.0057658*m.i48*m.i81 + 0.00181049*m.i48*m.i82 -
0.00054847*m.i48*m.i83 + 0.0048793*m.i48*m.i84 + 0.00598068*m.i48*m.i85 + 0.00652398*m.i48*m.i86
+ 0.0036324*m.i48*m.i87 + 0.00674584*m.i48*m.i88 + 0.00354232*m.i48*m.i89 + 0.00923644*m.i48*
m.i90 + 0.01247554*m.i48*m.i91 + 0.00613734*m.i48*m.i92 + 0.000820814*m.i48*m.i93 + 0.001893008*
m.i48*m.i94 + 0.00690274*m.i48*m.i95 + 0.01623126*m.i48*m.i96 + 0.00810288*m.i48*m.i97 +
0.00702362*m.i48*m.i98 + 0.01027006*m.i48*m.i99 + 0.01224198*m.i48*m.i100 + 0.01829412*m.i49*
m.i50 + 0.0119479*m.i49*m.i51 + 0.01038228*m.i49*m.i52 + 0.01375438*m.i49*m.i53 + 0.01480194*
m.i49*m.i54 + 0.01103368*m.i49*m.i55 + 0.01464938*m.i49*m.i56 + 0.00724638*m.i49*m.i57 +
0.00857364*m.i49*m.i58 + 0.0149174*m.i49*m.i59 + 0.00407556*m.i49*m.i60 + 0.0214208*m.i49*m.i61
+ 0.01655784*m.i49*m.i62 + 0.01832206*m.i49*m.i63 + 0.0099515*m.i49*m.i64 + 0.01025382*m.i49*
m.i65 + 0.00862324*m.i49*m.i66 + 0.00863512*m.i49*m.i67 + 0.0076467*m.i49*m.i68 + 0.0220404*m.i49
*m.i69 + 0.0095053*m.i49*m.i70 + 0.01307838*m.i49*m.i71 + 0.01047408*m.i49*m.i72 + 0.01294838*
m.i49*m.i73 + 0.01471132*m.i49*m.i74 + 0.00851398*m.i49*m.i75 + 0.00575748*m.i49*m.i76 +
0.0145716*m.i49*m.i77 + 0.00460678*m.i49*m.i78 + 0.01570596*m.i49*m.i79 + 0.00985226*m.i49*m.i80
+ 0.01023644*m.i49*m.i81 + 0.00369278*m.i49*m.i82 + 0.00860988*m.i49*m.i83 + 0.01393008*m.i49*
m.i84 + 0.00839504*m.i49*m.i85 + 0.01483048*m.i49*m.i86 + 0.01071222*m.i49*m.i87 + 0.0344974*
m.i49*m.i88 + 0.00962838*m.i49*m.i89 + 0.01169418*m.i49*m.i90 + 0.01045396*m.i49*m.i91 +
0.0095482*m.i49*m.i92 + 0.00539536*m.i49*m.i93 + 0.00663516*m.i49*m.i94 + 0.01120512*m.i49*m.i95
+ 0.01484196*m.i49*m.i96 + 0.0127009*m.i49*m.i97 + 0.01167858*m.i49*m.i98 + 0.01477446*m.i49*
m.i99 + 0.01842494*m.i49*m.i100 + 0.01663076*m.i50*m.i51 + 0.021828*m.i50*m.i52 + 0.029083*m.i50*
m.i53 + 0.0230518*m.i50*m.i54 + 0.01639088*m.i50*m.i55 + 0.01308142*m.i50*m.i56 + 0.01225642*
m.i50*m.i57 + 0.0094199*m.i50*m.i58 + 0.0222192*m.i50*m.i59 + 0.00884396*m.i50*m.i60 + 0.0415716*
m.i50*m.i61 + 0.032076*m.i50*m.i62 + 0.021259*m.i50*m.i63 + 0.01432872*m.i50*m.i64 + 0.01445944*
m.i50*m.i65 + 0.01098896*m.i50*m.i66 + 0.0219658*m.i50*m.i67 + 0.01066588*m.i50*m.i68 + 0.0354768
*m.i50*m.i69 + 0.01575178*m.i50*m.i70 + 0.01775054*m.i50*m.i71 + 0.01436852*m.i50*m.i72 +
0.01353572*m.i50*m.i73 + 0.01936092*m.i50*m.i74 + 0.01665002*m.i50*m.i75 + 0.00971184*m.i50*m.i76
+ 0.01642836*m.i50*m.i77 + 0.01382168*m.i50*m.i78 + 0.0341934*m.i50*m.i79 + 0.01843884*m.i50*
m.i80 + 0.01940942*m.i50*m.i81 + 0.00527464*m.i50*m.i82 + 0.00829608*m.i50*m.i83 + 0.0138699*
m.i50*m.i84 + 0.01840912*m.i50*m.i85 + 0.0210266*m.i50*m.i86 + 0.0205286*m.i50*m.i87 + 0.0451728*
m.i50*m.i88 + 0.01361116*m.i50*m.i89 + 0.0277252*m.i50*m.i90 + 0.01783032*m.i50*m.i91 +
0.01982086*m.i50*m.i92 + 0.00668064*m.i50*m.i93 + 0.00765962*m.i50*m.i94 + 0.01980832*m.i50*m.i95
+ 0.043863*m.i50*m.i96 + 0.0241266*m.i50*m.i97 + 0.0216094*m.i50*m.i98 + 0.0284306*m.i50*m.i99
+ 0.0308476*m.i50*m.i100 + 0.01058872*m.i51*m.i52 + 0.01279448*m.i51*m.i53 + 0.0112444*m.i51*
m.i54 + 0.00990216*m.i51*m.i55 + 0.00896022*m.i51*m.i56 + 0.00513818*m.i51*m.i57 + 0.00543454*
m.i51*m.i58 + 0.01870256*m.i51*m.i59 + 0.00309084*m.i51*m.i60 + 0.01767624*m.i51*m.i61 +
0.01208918*m.i51*m.i62 + 0.01086364*m.i51*m.i63 + 0.00670046*m.i51*m.i64 + 0.00877154*m.i51*m.i65
+ 0.00557174*m.i51*m.i66 + 0.00887856*m.i51*m.i67 + 0.00260902*m.i51*m.i68 + 0.01536338*m.i51*
m.i69 + 0.00483316*m.i51*m.i70 + 0.00448378*m.i51*m.i71 + 0.0043601*m.i51*m.i72 + 0.00929772*
m.i51*m.i73 + 0.00989476*m.i51*m.i74 + 0.00528028*m.i51*m.i75 + 0.00446022*m.i51*m.i76 +
0.00845848*m.i51*m.i77 + 0.00509916*m.i51*m.i78 + 0.0204202*m.i51*m.i79 + 0.00800384*m.i51*m.i80
+ 0.00529538*m.i51*m.i81 + 0.0038846*m.i51*m.i82 + 0.00772216*m.i51*m.i83 + 0.009979*m.i51*m.i84
+ 0.010097*m.i51*m.i85 + 0.0139755*m.i51*m.i86 + 0.01131734*m.i51*m.i87 + 0.02533*m.i51*m.i88 +
0.00621034*m.i51*m.i89 + 0.01160734*m.i51*m.i90 + 0.00843408*m.i51*m.i91 + 0.00995326*m.i51*m.i92
+ 0.00455616*m.i51*m.i93 + 0.00533468*m.i51*m.i94 + 0.00929878*m.i51*m.i95 + 0.0142337*m.i51*
m.i96 + 0.01066822*m.i51*m.i97 + 0.00526832*m.i51*m.i98 + 0.01737382*m.i51*m.i99 + 0.01465192*
m.i51*m.i100 + 0.01484222*m.i52*m.i53 + 0.0171371*m.i52*m.i54 + 0.01181392*m.i52*m.i55 +
0.00600344*m.i52*m.i56 + 0.00840878*m.i52*m.i57 + 0.0071463*m.i52*m.i58 + 0.01536778*m.i52*m.i59
+ 0.0071369*m.i52*m.i60 + 0.0280962*m.i52*m.i61 + 0.0210708*m.i52*m.i62 + 0.01590808*m.i52*m.i63
+ 0.01317442*m.i52*m.i64 + 0.0091774*m.i52*m.i65 + 0.0068045*m.i52*m.i66 + 0.01047574*m.i52*
m.i67 + 0.00882116*m.i52*m.i68 + 0.01759098*m.i52*m.i69 + 0.00927774*m.i52*m.i70 + 0.01307496*
m.i52*m.i71 + 0.0115876*m.i52*m.i72 + 0.01090888*m.i52*m.i73 + 0.0112976*m.i52*m.i74 + 0.00919952
*m.i52*m.i75 + 0.00611904*m.i52*m.i76 + 0.0126521*m.i52*m.i77 + 0.0063454*m.i52*m.i78 +
0.01337936*m.i52*m.i79 + 0.01210696*m.i52*m.i80 + 0.01264942*m.i52*m.i81 + 0.00476554*m.i52*m.i82
+ 0.01346924*m.i52*m.i83 + 0.01007318*m.i52*m.i84 + 0.0127267*m.i52*m.i85 + 0.01394736*m.i52*
m.i86 + 0.0099746*m.i52*m.i87 + 0.0311922*m.i52*m.i88 + 0.0079236*m.i52*m.i89 + 0.01182038*m.i52*
m.i90 + 0.01651678*m.i52*m.i91 + 0.01241554*m.i52*m.i92 + 0.0030009*m.i52*m.i93 + 0.00533038*
m.i52*m.i94 + 0.0132025*m.i52*m.i95 + 0.0243106*m.i52*m.i96 + 0.01594256*m.i52*m.i97 + 0.01260958
*m.i52*m.i98 + 0.0156343*m.i52*m.i99 + 0.01771086*m.i52*m.i100 + 0.0153737*m.i53*m.i54 +
0.01383672*m.i53*m.i55 + 0.00715324*m.i53*m.i56 + 0.00943676*m.i53*m.i57 + 0.00990018*m.i53*m.i58
+ 0.01573366*m.i53*m.i59 + 0.00657884*m.i53*m.i60 + 0.0319944*m.i53*m.i61 + 0.029398*m.i53*m.i62
+ 0.01378922*m.i53*m.i63 + 0.01107682*m.i53*m.i64 + 0.01095454*m.i53*m.i65 + 0.00681218*m.i53*
m.i66 + 0.01767184*m.i53*m.i67 + 0.00360916*m.i53*m.i68 + 0.0271974*m.i53*m.i69 + 0.01108326*
m.i53*m.i70 + 0.00659666*m.i53*m.i71 + 0.00877032*m.i53*m.i72 + 0.01135242*m.i53*m.i73 +
0.01814298*m.i53*m.i74 + 0.01264072*m.i53*m.i75 + 0.00851402*m.i53*m.i76 + 0.01433306*m.i53*m.i77
+ 0.00973382*m.i53*m.i78 + 0.025286*m.i53*m.i79 + 0.01345344*m.i53*m.i80 + 0.01259382*m.i53*
m.i81 + 0.0027805*m.i53*m.i82 + 0.000307752*m.i53*m.i83 + 0.0107134*m.i53*m.i84 + 0.01054482*
m.i53*m.i85 + 0.0158905*m.i53*m.i86 + 0.01354224*m.i53*m.i87 + 0.0304602*m.i53*m.i88 + 0.0090225*
m.i53*m.i89 + 0.0279162*m.i53*m.i90 + 0.01259072*m.i53*m.i91 + 0.01154418*m.i53*m.i92 +
0.00696904*m.i53*m.i93 + 0.0036836*m.i53*m.i94 + 0.01605638*m.i53*m.i95 + 0.0430698*m.i53*m.i96
+ 0.01780592*m.i53*m.i97 + 0.01137144*m.i53*m.i98 + 0.0256234*m.i53*m.i99 + 0.0212362*m.i53*
m.i100 + 0.01304758*m.i54*m.i55 + 0.01398616*m.i54*m.i56 + 0.00915664*m.i54*m.i57 + 0.01070596*
m.i54*m.i58 + 0.01499*m.i54*m.i59 + 0.0070249*m.i54*m.i60 + 0.0302542*m.i54*m.i61 + 0.0244214*
m.i54*m.i62 + 0.0228504*m.i54*m.i63 + 0.01378888*m.i54*m.i64 + 0.00915648*m.i54*m.i65 + 0.0089268
*m.i54*m.i66 + 0.010488*m.i54*m.i67 + 0.00997224*m.i54*m.i68 + 0.0229576*m.i54*m.i69 + 0.01077794
*m.i54*m.i70 + 0.01825372*m.i54*m.i71 + 0.01517784*m.i54*m.i72 + 0.01258444*m.i54*m.i73 +
0.01361126*m.i54*m.i74 + 0.01029832*m.i54*m.i75 + 0.00657472*m.i54*m.i76 + 0.01463254*m.i54*m.i77
+ 0.00613474*m.i54*m.i78 + 0.01201368*m.i54*m.i79 + 0.013126*m.i54*m.i80 + 0.01505614*m.i54*
m.i81 + 0.00467872*m.i54*m.i82 + 0.01050702*m.i54*m.i83 + 0.01265914*m.i54*m.i84 + 0.01318044*
m.i54*m.i85 + 0.01473222*m.i54*m.i86 + 0.01110614*m.i54*m.i87 + 0.0261814*m.i54*m.i88 +
0.00783796*m.i54*m.i89 + 0.01294844*m.i54*m.i90 + 0.0192808*m.i54*m.i91 + 0.0139507*m.i54*m.i92
+ 0.00351228*m.i54*m.i93 + 0.0068612*m.i54*m.i94 + 0.01527036*m.i54*m.i95 + 0.0205052*m.i54*
m.i96 + 0.01688726*m.i54*m.i97 + 0.01524852*m.i54*m.i98 + 0.0174601*m.i54*m.i99 + 0.0244266*m.i54
*m.i100 + 0.00673562*m.i55*m.i56 + 0.00707698*m.i55*m.i57 + 0.00734322*m.i55*m.i58 + 0.01405048*
m.i55*m.i59 + 0.00334038*m.i55*m.i60 + 0.0222096*m.i55*m.i61 + 0.01523028*m.i55*m.i62 + 0.0102055
*m.i55*m.i63 + 0.01002768*m.i55*m.i64 + 0.01048288*m.i55*m.i65 + 0.00635712*m.i55*m.i66 +
0.00874464*m.i55*m.i67 + 0.00593524*m.i55*m.i68 + 0.01648812*m.i55*m.i69 + 0.0080135*m.i55*m.i70
+ 0.00887592*m.i55*m.i71 + 0.00847214*m.i55*m.i72 + 0.01055314*m.i55*m.i73 + 0.01129422*m.i55*
m.i74 + 0.00699156*m.i55*m.i75 + 0.00627446*m.i55*m.i76 + 0.01024268*m.i55*m.i77 + 0.00531432*
m.i55*m.i78 + 0.0098513*m.i55*m.i79 + 0.01065934*m.i55*m.i80 + 0.00967318*m.i55*m.i81 +
0.00462964*m.i55*m.i82 + 0.00334858*m.i55*m.i83 + 0.01100528*m.i55*m.i84 + 0.00975296*m.i55*m.i85
+ 0.01214742*m.i55*m.i86 + 0.00846042*m.i55*m.i87 + 0.0242638*m.i55*m.i88 + 0.0054702*m.i55*
m.i89 + 0.01124098*m.i55*m.i90 + 0.0118002*m.i55*m.i91 + 0.01077996*m.i55*m.i92 + 0.00250778*
m.i55*m.i93 + 0.00555816*m.i55*m.i94 + 0.01037364*m.i55*m.i95 + 0.0175302*m.i55*m.i96 +
0.01283314*m.i55*m.i97 + 0.01054116*m.i55*m.i98 + 0.01565736*m.i55*m.i99 + 0.01643682*m.i55*
m.i100 + 0.00563824*m.i56*m.i57 + 0.00909602*m.i56*m.i58 + 0.0103611*m.i56*m.i59 + 0.00370386*
m.i56*m.i60 + 0.01345496*m.i56*m.i61 + 0.01240364*m.i56*m.i62 + 0.01894134*m.i56*m.i63 +
0.00842246*m.i56*m.i64 + 0.00913306*m.i56*m.i65 + 0.0128603*m.i56*m.i66 + 0.00789202*m.i56*m.i67
+ 0.0049437*m.i56*m.i68 + 0.0172921*m.i56*m.i69 + 0.00742364*m.i56*m.i70 + 0.0201228*m.i56*m.i71
+ 0.0118952*m.i56*m.i72 + 0.01088666*m.i56*m.i73 + 0.0107701*m.i56*m.i74 + 0.00409754*m.i56*
m.i75 + 0.00366002*m.i56*m.i76 + 0.01236854*m.i56*m.i77 + 0.00300872*m.i56*m.i78 + 0.0135613*
m.i56*m.i79 + 0.00480806*m.i56*m.i80 + 0.01596128*m.i56*m.i81 + 0.00309564*m.i56*m.i82 +
0.01777436*m.i56*m.i83 + 0.01193038*m.i56*m.i84 + 0.00565974*m.i56*m.i85 + 0.01170688*m.i56*m.i86
+ 0.01022376*m.i56*m.i87 + 0.0163427*m.i56*m.i88 + 0.00612568*m.i56*m.i89 + 0.01115784*m.i56*
m.i90 + 0.00381802*m.i56*m.i91 + 0.0089326*m.i56*m.i92 + 0.0075443*m.i56*m.i93 + 0.00818402*m.i56
*m.i94 + 0.00966992*m.i56*m.i95 + 0.00265106*m.i56*m.i96 + 0.01019204*m.i56*m.i97 + 0.01329902*
m.i56*m.i98 + 0.01411634*m.i56*m.i99 + 0.0138779*m.i56*m.i100 + 0.00474894*m.i57*m.i58 +
0.00767974*m.i57*m.i59 + 0.0043561*m.i57*m.i60 + 0.01478228*m.i57*m.i61 + 0.00989558*m.i57*m.i62
+ 0.00895424*m.i57*m.i63 + 0.0066828*m.i57*m.i64 + 0.00578744*m.i57*m.i65 + 0.00498864*m.i57*
m.i66 + 0.00614268*m.i57*m.i67 + 0.0054738*m.i57*m.i68 + 0.01078148*m.i57*m.i69 + 0.00688352*
m.i57*m.i70 + 0.0068114*m.i57*m.i71 + 0.00628102*m.i57*m.i72 + 0.00701898*m.i57*m.i73 +
0.00848154*m.i57*m.i74 + 0.0066742*m.i57*m.i75 + 0.00450208*m.i57*m.i76 + 0.0074907*m.i57*m.i77
+ 0.00457588*m.i57*m.i78 + 0.00668368*m.i57*m.i79 + 0.00806954*m.i57*m.i80 + 0.00702352*m.i57*
m.i81 + 0.0038917*m.i57*m.i82 + 0.000255196*m.i57*m.i83 + 0.00565464*m.i57*m.i84 + 0.00629044*
m.i57*m.i85 + 0.00649918*m.i57*m.i86 + 0.00619514*m.i57*m.i87 + 0.01578988*m.i57*m.i88 +
0.00523946*m.i57*m.i89 + 0.00717944*m.i57*m.i90 + 0.0080494*m.i57*m.i91 + 0.00534064*m.i57*m.i92
+ 0.00276512*m.i57*m.i93 + 0.00412012*m.i57*m.i94 + 0.00715034*m.i57*m.i95 + 0.01300638*m.i57*
m.i96 + 0.00826382*m.i57*m.i97 + 0.0068466*m.i57*m.i98 + 0.00897648*m.i57*m.i99 + 0.01037138*
m.i57*m.i100 + 0.00646004*m.i58*m.i59 + 0.00186599*m.i58*m.i60 + 0.01246886*m.i58*m.i61 +
0.00999352*m.i58*m.i62 + 0.01381952*m.i58*m.i63 + 0.00855014*m.i58*m.i64 + 0.00465434*m.i58*m.i65
+ 0.00825376*m.i58*m.i66 + 0.00576402*m.i58*m.i67 + 0.00273548*m.i58*m.i68 + 0.01035762*m.i58*
m.i69 + 0.004824*m.i58*m.i70 + 0.01355144*m.i58*m.i71 + 0.00700278*m.i58*m.i72 + 0.00707718*m.i58
*m.i73 + 0.00851974*m.i58*m.i74 + 0.00330912*m.i58*m.i75 + 0.00401842*m.i58*m.i76 + 0.00999942*
m.i58*m.i77 + 0.00277578*m.i58*m.i78 - 0.000989722*m.i58*m.i79 + 0.00742188*m.i58*m.i80 +
0.00901096*m.i58*m.i81 + 0.000981242*m.i58*m.i82 + 0.01290728*m.i58*m.i83 + 0.0083181*m.i58*m.i84
+ 0.00517936*m.i58*m.i85 + 0.00723458*m.i58*m.i86 + 0.0044253*m.i58*m.i87 + 0.0137847*m.i58*
m.i88 + 0.001547694*m.i58*m.i89 + 0.00582604*m.i58*m.i90 + 0.00844516*m.i58*m.i91 + 0.00776542*
m.i58*m.i92 + 0.00182761*m.i58*m.i93 + 0.0023829*m.i58*m.i94 + 0.00628056*m.i58*m.i95 +
0.00690478*m.i58*m.i96 + 0.00802988*m.i58*m.i97 + 0.0076502*m.i58*m.i98 + 0.01085276*m.i58*m.i99
+ 0.0112764*m.i58*m.i100 + 0.00476864*m.i59*m.i60 + 0.025812*m.i59*m.i61 + 0.01805478*m.i59*
m.i62 + 0.0109551*m.i59*m.i63 + 0.00938908*m.i59*m.i64 + 0.01178962*m.i59*m.i65 + 0.0076335*m.i59
*m.i66 + 0.01177666*m.i59*m.i67 + 0.0070214*m.i59*m.i68 + 0.0221478*m.i59*m.i69 + 0.007972*m.i59*
m.i70 + 0.0074733*m.i59*m.i71 + 0.0088486*m.i59*m.i72 + 0.01271666*m.i59*m.i73 + 0.0141508*m.i59*
m.i74 + 0.00914726*m.i59*m.i75 + 0.00537448*m.i59*m.i76 + 0.01084216*m.i59*m.i77 + 0.0073258*
m.i59*m.i78 + 0.0246694*m.i59*m.i79 + 0.01112936*m.i59*m.i80 + 0.00816652*m.i59*m.i81 +
0.00597972*m.i59*m.i82 + 0.00662172*m.i59*m.i83 + 0.01458364*m.i59*m.i84 + 0.01429256*m.i59*m.i85
+ 0.01882618*m.i59*m.i86 + 0.01439702*m.i59*m.i87 + 0.034478*m.i59*m.i88 + 0.0080275*m.i59*m.i89
+ 0.01623632*m.i59*m.i90 + 0.01482176*m.i59*m.i91 + 0.01127396*m.i59*m.i92 + 0.00550568*m.i59*
m.i93 + 0.00798042*m.i59*m.i94 + 0.01294416*m.i59*m.i95 + 0.0212862*m.i59*m.i96 + 0.01627426*
m.i59*m.i97 + 0.0106876*m.i59*m.i98 + 0.021021*m.i59*m.i99 + 0.0210024*m.i59*m.i100 + 0.01016558*
m.i60*m.i61 + 0.00950624*m.i60*m.i62 + 0.00759926*m.i60*m.i63 + 0.00405624*m.i60*m.i64 +
0.00408766*m.i60*m.i65 + 0.001012866*m.i60*m.i66 + 0.00434698*m.i60*m.i67 + 0.00457798*m.i60*
m.i68 + 0.0080193*m.i60*m.i69 + 0.0054101*m.i60*m.i70 + 0.0046192*m.i60*m.i71 + 0.00570946*m.i60*
m.i72 + 0.00452172*m.i60*m.i73 + 0.00634618*m.i60*m.i74 + 0.00624388*m.i60*m.i75 + 0.0033187*
m.i60*m.i76 + 0.00483228*m.i60*m.i77 + 0.00344686*m.i60*m.i78 + 0.0083673*m.i60*m.i79 +
0.00518592*m.i60*m.i80 + 0.00542166*m.i60*m.i81 + 0.0031059*m.i60*m.i82 - 0.001025068*m.i60*m.i83
+ 0.0028835*m.i60*m.i84 + 0.00445296*m.i60*m.i85 + 0.00423572*m.i60*m.i86 + 0.0051822*m.i60*
m.i87 + 0.01112192*m.i60*m.i88 + 0.00500464*m.i60*m.i89 + 0.0062184*m.i60*m.i90 + 0.00602*m.i60*
m.i91 + 0.00246398*m.i60*m.i92 + 0.00288384*m.i60*m.i93 + 0.00278724*m.i60*m.i94 + 0.00626372*
m.i60*m.i95 + 0.01170704*m.i60*m.i96 + 0.00615192*m.i60*m.i97 + 0.00462302*m.i60*m.i98 +
0.00471294*m.i60*m.i99 + 0.00588256*m.i60*m.i100 + 0.0418718*m.i61*m.i62 + 0.0230598*m.i61*m.i63
+ 0.01842282*m.i61*m.i64 + 0.01721234*m.i61*m.i65 + 0.00990124*m.i61*m.i66 + 0.0216044*m.i61*
m.i67 + 0.01473812*m.i61*m.i68 + 0.0394464*m.i61*m.i69 + 0.01716988*m.i61*m.i70 + 0.0195513*m.i61
*m.i71 + 0.0219932*m.i61*m.i72 + 0.01943214*m.i61*m.i73 + 0.020134*m.i61*m.i74 + 0.0174732*m.i61*
m.i75 + 0.01174406*m.i61*m.i76 + 0.01834496*m.i61*m.i77 + 0.01109086*m.i61*m.i78 + 0.0264464*
m.i61*m.i79 + 0.01965936*m.i61*m.i80 + 0.0227546*m.i61*m.i81 + 0.00831452*m.i61*m.i82 +
0.00631004*m.i61*m.i83 + 0.01801602*m.i61*m.i84 + 0.01882322*m.i61*m.i85 + 0.026381*m.i61*m.i86
+ 0.0201168*m.i61*m.i87 + 0.0582994*m.i61*m.i88 + 0.01420784*m.i61*m.i89 + 0.0279352*m.i61*m.i90
+ 0.0260044*m.i61*m.i91 + 0.01994278*m.i61*m.i92 + 0.00558188*m.i61*m.i93 + 0.0100806*m.i61*
m.i94 + 0.0228614*m.i61*m.i95 + 0.0472894*m.i61*m.i96 + 0.0277624*m.i61*m.i97 + 0.0233414*m.i61*
m.i98 + 0.0320998*m.i61*m.i99 + 0.037788*m.i61*m.i100 + 0.0226754*m.i62*m.i63 + 0.01497022*m.i62*
m.i64 + 0.0138219*m.i62*m.i65 + 0.00559668*m.i62*m.i66 + 0.01850946*m.i62*m.i67 + 0.01131414*
m.i62*m.i68 + 0.0392412*m.i62*m.i69 + 0.01609634*m.i62*m.i70 + 0.0216048*m.i62*m.i71 + 0.0216526*
m.i62*m.i72 + 0.0150155*m.i62*m.i73 + 0.01738604*m.i62*m.i74 + 0.01374744*m.i62*m.i75 +
0.00779326*m.i62*m.i76 + 0.01429558*m.i62*m.i77 + 0.0081994*m.i62*m.i78 + 0.024889*m.i62*m.i79 +
0.01494124*m.i62*m.i80 + 0.0229898*m.i62*m.i81 + 0.00445144*m.i62*m.i82 + 0.01114552*m.i62*m.i83
+ 0.01793036*m.i62*m.i84 + 0.01444614*m.i62*m.i85 + 0.01879448*m.i62*m.i86 + 0.01466504*m.i62*
m.i87 + 0.0326604*m.i62*m.i88 + 0.01169144*m.i62*m.i89 + 0.0254028*m.i62*m.i90 + 0.01965996*m.i62
*m.i91 + 0.01132102*m.i62*m.i92 + 0.0046546*m.i62*m.i93 + 0.00635342*m.i62*m.i94 + 0.0209304*
m.i62*m.i95 + 0.040751*m.i62*m.i96 + 0.0251822*m.i62*m.i97 + 0.0238578*m.i62*m.i98 + 0.0225858*
m.i62*m.i99 + 0.0313134*m.i62*m.i100 + 0.01545704*m.i63*m.i64 + 0.01086358*m.i63*m.i65 +
0.00996396*m.i63*m.i66 + 0.00982328*m.i63*m.i67 + 0.00892944*m.i63*m.i68 + 0.024956*m.i63*m.i69
+ 0.0125295*m.i63*m.i70 + 0.0274234*m.i63*m.i71 + 0.0136346*m.i63*m.i72 + 0.0143589*m.i63*m.i73
+ 0.01281966*m.i63*m.i74 + 0.009889*m.i63*m.i75 + 0.00617316*m.i63*m.i76 + 0.0195622*m.i63*m.i77
+ 0.00502572*m.i63*m.i78 + 0.00153262*m.i63*m.i79 + 0.01706792*m.i63*m.i80 + 0.01790944*m.i63*
m.i81 + 0.001490592*m.i63*m.i82 + 0.0267338*m.i63*m.i83 + 0.01586496*m.i63*m.i84 + 0.01166282*
m.i63*m.i85 + 0.01568614*m.i63*m.i86 + 0.00753188*m.i63*m.i87 + 0.0417782*m.i63*m.i88 + 0.0112216
*m.i63*m.i89 + 0.00371206*m.i63*m.i90 + 0.01829192*m.i63*m.i91 + 0.01841964*m.i63*m.i92 +
0.00206622*m.i63*m.i93 + 0.00505172*m.i63*m.i94 + 0.01487174*m.i63*m.i95 + 0.01414348*m.i63*m.i96
+ 0.0156802*m.i63*m.i97 + 0.01823426*m.i63*m.i98 + 0.01258764*m.i63*m.i99 + 0.01994098*m.i63*
m.i100 + 0.00854654*m.i64*m.i65 + 0.01079866*m.i64*m.i66 + 0.00602732*m.i64*m.i67 + 0.00921276*
m.i64*m.i68 + 0.01464414*m.i64*m.i69 + 0.00664932*m.i64*m.i70 + 0.0144736*m.i64*m.i71 +
0.00978338*m.i64*m.i72 + 0.00959208*m.i64*m.i73 + 0.0112566*m.i64*m.i74 + 0.00671142*m.i64*m.i75
+ 0.00408206*m.i64*m.i76 + 0.01167568*m.i64*m.i77 + 0.00375274*m.i64*m.i78 + 0.00404336*m.i64*
m.i79 + 0.00963238*m.i64*m.i80 + 0.0122908*m.i64*m.i81 + 0.001806772*m.i64*m.i82 + 0.01577266*
m.i64*m.i83 + 0.01128074*m.i64*m.i84 + 0.0095111*m.i64*m.i85 + 0.0097723*m.i64*m.i86 + 0.00346618
*m.i64*m.i87 + 0.01289324*m.i64*m.i88 + 0.00453186*m.i64*m.i89 + 0.0078486*m.i64*m.i90 +
0.01310134*m.i64*m.i91 + 0.00985686*m.i64*m.i92 + 0.00257788*m.i64*m.i93 + 0.00260324*m.i64*m.i94
+ 0.0108877*m.i64*m.i95 + 0.01349616*m.i64*m.i96 + 0.01306042*m.i64*m.i97 + 0.01405114*m.i64*
m.i98 + 0.0115142*m.i64*m.i99 + 0.01728302*m.i64*m.i100 + 0.0048225*m.i65*m.i66 + 0.00871696*
m.i65*m.i67 + 0.00504014*m.i65*m.i68 + 0.01673796*m.i65*m.i69 + 0.00728674*m.i65*m.i70 +
0.00969202*m.i65*m.i71 + 0.0082057*m.i65*m.i72 + 0.0103704*m.i65*m.i73 + 0.00998004*m.i65*m.i74
+ 0.00672722*m.i65*m.i75 + 0.00633346*m.i65*m.i76 + 0.00774852*m.i65*m.i77 + 0.00440922*m.i65*
m.i78 + 0.01343946*m.i65*m.i79 + 0.00798994*m.i65*m.i80 + 0.01225132*m.i65*m.i81 + 0.00444398*
m.i65*m.i82 + 0.00673302*m.i65*m.i83 + 0.0109598*m.i65*m.i84 + 0.00683186*m.i65*m.i85 +
0.01183874*m.i65*m.i86 + 0.0090907*m.i65*m.i87 + 0.0283952*m.i65*m.i88 + 0.00785096*m.i65*m.i89
+ 0.01125058*m.i65*m.i90 + 0.00510526*m.i65*m.i91 + 0.00837574*m.i65*m.i92 + 0.00385798*m.i65*
m.i93 + 0.00464904*m.i65*m.i94 + 0.00896456*m.i65*m.i95 + 0.0160694*m.i65*m.i96 + 0.0113557*m.i65
*m.i97 + 0.01155766*m.i65*m.i98 + 0.01443876*m.i65*m.i99 + 0.01238186*m.i65*m.i100 + 0.00548536*
m.i66*m.i67 + 0.00630564*m.i66*m.i68 + 0.00939978*m.i66*m.i69 + 0.00431468*m.i66*m.i70 +
0.01542742*m.i66*m.i71 + 0.0071665*m.i66*m.i72 + 0.00755022*m.i66*m.i73 + 0.00838922*m.i66*m.i74
+ 0.00386922*m.i66*m.i75 + 0.001951058*m.i66*m.i76 + 0.01146338*m.i66*m.i77 + 0.001980078*m.i66*
m.i78 + 0.00444902*m.i66*m.i79 + 0.00356762*m.i66*m.i80 + 0.00956806*m.i66*m.i81 - 0.00023183*
m.i66*m.i82 + 0.01703884*m.i66*m.i83 + 0.01002452*m.i66*m.i84 + 0.0062546*m.i66*m.i85 +
0.00563304*m.i66*m.i86 + 0.00514984*m.i66*m.i87 + 0.01908326*m.i66*m.i88 + 0.00457928*m.i66*m.i89
+ 0.003995*m.i66*m.i90 + 0.0080501*m.i66*m.i91 + 0.00810108*m.i66*m.i92 + 0.00328186*m.i66*m.i93
+ 0.00369064*m.i66*m.i94 + 0.0058103*m.i66*m.i95 + 0.00438208*m.i66*m.i96 + 0.00867896*m.i66*
m.i97 + 0.0114927*m.i66*m.i98 + 0.01103938*m.i66*m.i99 + 0.00981454*m.i66*m.i100 + 0.00310364*
m.i67*m.i68 + 0.0195756*m.i67*m.i69 + 0.00833924*m.i67*m.i70 + 0.01122*m.i67*m.i71 + 0.00862168*
m.i67*m.i72 + 0.00711248*m.i67*m.i73 + 0.00958304*m.i67*m.i74 + 0.00671208*m.i67*m.i75 +
0.00667666*m.i67*m.i76 + 0.00639998*m.i67*m.i77 + 0.00746068*m.i67*m.i78 + 0.0164696*m.i67*m.i79
+ 0.00952472*m.i67*m.i80 + 0.01054908*m.i67*m.i81 + 0.00295206*m.i67*m.i82 + 0.00786538*m.i67*
m.i83 + 0.00812566*m.i67*m.i84 + 0.00774908*m.i67*m.i85 + 0.01084866*m.i67*m.i86 + 0.01179554*
m.i67*m.i87 + 0.022894*m.i67*m.i88 + 0.00619526*m.i67*m.i89 + 0.01517056*m.i67*m.i90 + 0.00567344
*m.i67*m.i91 + 0.00901318*m.i67*m.i92 + 0.00388018*m.i67*m.i93 + 0.0036956*m.i67*m.i94 + 0.008896
*m.i67*m.i95 + 0.021896*m.i67*m.i96 + 0.01327636*m.i67*m.i97 + 0.0109*m.i67*m.i98 + 0.0178563*
m.i67*m.i99 + 0.01328366*m.i67*m.i100 + 0.01361686*m.i68*m.i69 + 0.00764086*m.i68*m.i70 +
0.00794036*m.i68*m.i71 + 0.01077146*m.i68*m.i72 + 0.00701056*m.i68*m.i73 + 0.00764336*m.i68*m.i74
+ 0.01085638*m.i68*m.i75 + 0.00267198*m.i68*m.i76 + 0.00622086*m.i68*m.i77 + 0.0026961*m.i68*
m.i78 + 0.01283914*m.i68*m.i79 + 0.00651186*m.i68*m.i80 + 0.00444824*m.i68*m.i81 + 0.00245108*
m.i68*m.i82 - 0.000724804*m.i68*m.i83 + 0.01001432*m.i68*m.i84 + 0.00659112*m.i68*m.i85 +
0.00798872*m.i68*m.i86 + 0.00378278*m.i68*m.i87 + 0.0249894*m.i68*m.i88 + 0.00935338*m.i68*m.i89
+ 0.00406214*m.i68*m.i90 + 0.01547864*m.i68*m.i91 + 0.0026383*m.i68*m.i92 + 0.001956366*m.i68*
m.i93 + 0.00433104*m.i68*m.i94 + 0.0086862*m.i68*m.i95 + 0.00871594*m.i68*m.i96 + 0.00917804*
m.i68*m.i97 + 0.01147728*m.i68*m.i98 + 0.000904318*m.i68*m.i99 + 0.0095902*m.i68*m.i100 +
0.01716134*m.i69*m.i70 + 0.0210128*m.i69*m.i71 + 0.01970512*m.i69*m.i72 + 0.01824406*m.i69*m.i73
+ 0.0202038*m.i69*m.i74 + 0.0166321*m.i69*m.i75 + 0.0080034*m.i69*m.i76 + 0.01785698*m.i69*m.i77
+ 0.00956708*m.i69*m.i78 + 0.0273938*m.i69*m.i79 + 0.01578286*m.i69*m.i80 + 0.01986548*m.i69*
m.i81 + 0.00472512*m.i69*m.i82 + 0.0064477*m.i69*m.i83 + 0.0205866*m.i69*m.i84 + 0.01485404*m.i69
*m.i85 + 0.0219926*m.i69*m.i86 + 0.01726592*m.i69*m.i87 + 0.044296*m.i69*m.i88 + 0.01519388*m.i69
*m.i89 + 0.0245318*m.i69*m.i90 + 0.019668*m.i69*m.i91 + 0.01322886*m.i69*m.i92 + 0.00622812*m.i69
*m.i93 + 0.00886068*m.i69*m.i94 + 0.0207946*m.i69*m.i95 + 0.0369544*m.i69*m.i96 + 0.0252064*m.i69
*m.i97 + 0.0246794*m.i69*m.i98 + 0.0240826*m.i69*m.i99 + 0.0322226*m.i69*m.i100 + 0.01122678*
m.i70*m.i71 + 0.00985708*m.i70*m.i72 + 0.00817346*m.i70*m.i73 + 0.01042594*m.i70*m.i74 +
0.0087512*m.i70*m.i75 + 0.00587552*m.i70*m.i76 + 0.00956692*m.i70*m.i77 + 0.00604702*m.i70*m.i78
+ 0.01012786*m.i70*m.i79 + 0.00894572*m.i70*m.i80 + 0.00937532*m.i70*m.i81 + 0.0040741*m.i70*
m.i82 + 0.001290572*m.i70*m.i83 + 0.00820512*m.i70*m.i84 + 0.00683756*m.i70*m.i85 + 0.00768078*
m.i70*m.i86 + 0.00827048*m.i70*m.i87 + 0.01990564*m.i70*m.i88 + 0.007123*m.i70*m.i89 + 0.00998564
*m.i70*m.i90 + 0.00953688*m.i70*m.i91 + 0.00558782*m.i70*m.i92 + 0.00342686*m.i70*m.i93 +
0.00568486*m.i70*m.i94 + 0.00914938*m.i70*m.i95 + 0.01630104*m.i70*m.i96 + 0.01110616*m.i70*m.i97
+ 0.010247*m.i70*m.i98 + 0.00833958*m.i70*m.i99 + 0.01265252*m.i70*m.i100 + 0.0220254*m.i71*
m.i72 + 0.0095213*m.i71*m.i73 + 0.01209936*m.i71*m.i74 + 0.00527094*m.i71*m.i75 + 0.00557218*
m.i71*m.i76 + 0.01262004*m.i71*m.i77 + 0.0037353*m.i71*m.i78 - 0.000223588*m.i71*m.i79 +
0.00801532*m.i71*m.i80 + 0.0286786*m.i71*m.i81 + 0.000788336*m.i71*m.i82 + 0.0387752*m.i71*m.i83
+ 0.01552284*m.i71*m.i84 + 0.00720994*m.i71*m.i85 + 0.01148132*m.i71*m.i86 + 0.00870698*m.i71*
m.i87 + 0.028675*m.i71*m.i88 + 0.00544718*m.i71*m.i89 + 0.00673884*m.i71*m.i90 + 0.01008984*m.i71
*m.i91 + 0.01241834*m.i71*m.i92 + 0.0025495*m.i71*m.i93 + 0.00280272*m.i71*m.i94 + 0.00947552*
m.i71*m.i95 + 0.0070495*m.i71*m.i96 + 0.0170916*m.i71*m.i97 + 0.0269036*m.i71*m.i98 + 0.01506306*
m.i71*m.i99 + 0.0206782*m.i71*m.i100 + 0.00925426*m.i72*m.i73 + 0.00967792*m.i72*m.i74 +
0.00847338*m.i72*m.i75 + 0.005213*m.i72*m.i76 + 0.00908662*m.i72*m.i77 + 0.00316872*m.i72*m.i78
+ 0.00898138*m.i72*m.i79 + 0.0069179*m.i72*m.i80 + 0.0151281*m.i72*m.i81 + 0.00348424*m.i72*
m.i82 + 0.01111986*m.i72*m.i83 + 0.01165966*m.i72*m.i84 + 0.0064802*m.i72*m.i85 + 0.00959246*
m.i72*m.i86 + 0.0084611*m.i72*m.i87 + 0.0240956*m.i72*m.i88 + 0.00687054*m.i72*m.i89 + 0.0094553*
m.i72*m.i90 + 0.0110757*m.i72*m.i91 + 0.00543508*m.i72*m.i92 + 0.0037306*m.i72*m.i93 + 0.00500972
*m.i72*m.i94 + 0.01005818*m.i72*m.i95 + 0.01294332*m.i72*m.i96 + 0.01344022*m.i72*m.i97 +
0.01593132*m.i72*m.i98 + 0.0093216*m.i72*m.i99 + 0.01640118*m.i72*m.i100 + 0.01198598*m.i73*m.i74
+ 0.00750544*m.i73*m.i75 + 0.0050216*m.i73*m.i76 + 0.01285904*m.i73*m.i77 + 0.00339452*m.i73*
m.i78 + 0.00891788*m.i73*m.i79 + 0.00948614*m.i73*m.i80 + 0.00944098*m.i73*m.i81 + 0.00409826*
m.i73*m.i82 + 0.00488372*m.i73*m.i83 + 0.01210326*m.i73*m.i84 + 0.00827726*m.i73*m.i85 +
0.0117403*m.i73*m.i86 + 0.00812428*m.i73*m.i87 + 0.031499*m.i73*m.i88 + 0.00909586*m.i73*m.i89 +
0.00829156*m.i73*m.i90 + 0.0112196*m.i73*m.i91 + 0.00781298*m.i73*m.i92 + 0.00380884*m.i73*m.i93
+ 0.00624296*m.i73*m.i94 + 0.01005016*m.i73*m.i95 + 0.01437472*m.i73*m.i96 + 0.011277*m.i73*
m.i97 + 0.01058622*m.i73*m.i98 + 0.0121454*m.i73*m.i99 + 0.01373088*m.i73*m.i100 + 0.01080198*
m.i74*m.i75 + 0.00656182*m.i74*m.i76 + 0.01437682*m.i74*m.i77 + 0.00746976*m.i74*m.i78 +
0.0158128*m.i74*m.i79 + 0.01161714*m.i74*m.i80 + 0.01098286*m.i74*m.i81 + 0.00409892*m.i74*m.i82
+ 0.00263806*m.i74*m.i83 + 0.01368742*m.i74*m.i84 + 0.00966578*m.i74*m.i85 + 0.0126469*m.i74*
m.i86 + 0.0097362*m.i74*m.i87 + 0.0236752*m.i74*m.i88 + 0.0087263*m.i74*m.i89 + 0.01653132*m.i74*
m.i90 + 0.01259886*m.i74*m.i91 + 0.0074886*m.i74*m.i92 + 0.00612882*m.i74*m.i93 + 0.00553384*
m.i74*m.i94 + 0.01256076*m.i74*m.i95 + 0.022837*m.i74*m.i96 + 0.01489052*m.i74*m.i97 + 0.0138654*
m.i74*m.i98 + 0.01608016*m.i74*m.i99 + 0.0185439*m.i74*m.i100 + 0.00483222*m.i75*m.i76 +
0.0084646*m.i75*m.i77 + 0.00605234*m.i75*m.i78 + 0.01627408*m.i75*m.i79 + 0.00784142*m.i75*m.i80
+ 0.00564276*m.i75*m.i81 + 0.00324588*m.i75*m.i82 - 0.00767236*m.i75*m.i83 + 0.00699372*m.i75*
m.i84 + 0.00737608*m.i75*m.i85 + 0.00954642*m.i75*m.i86 + 0.00823136*m.i75*m.i87 + 0.0262748*
m.i75*m.i88 + 0.00948902*m.i75*m.i89 + 0.01252876*m.i75*m.i90 + 0.01423104*m.i75*m.i91 +
0.00521492*m.i75*m.i92 + 0.00397698*m.i75*m.i93 + 0.00422896*m.i75*m.i94 + 0.01025216*m.i75*m.i95
+ 0.021456*m.i75*m.i96 + 0.01000128*m.i75*m.i97 + 0.00860654*m.i75*m.i98 + 0.0079023*m.i75*m.i99
+ 0.01223272*m.i75*m.i100 + 0.00508036*m.i76*m.i77 + 0.00440326*m.i76*m.i78 + 0.00722936*m.i76*
m.i79 + 0.00592748*m.i76*m.i80 + 0.00543106*m.i76*m.i81 + 0.00352072*m.i76*m.i82 + 0.00282876*
m.i76*m.i83 + 0.00421804*m.i76*m.i84 + 0.00327576*m.i76*m.i85 + 0.00605002*m.i76*m.i86 +
0.00724932*m.i76*m.i87 + 0.01581762*m.i76*m.i88 + 0.00366428*m.i76*m.i89 + 0.00812736*m.i76*m.i90
+ 0.00388382*m.i76*m.i91 + 0.0047062*m.i76*m.i92 + 0.00287772*m.i76*m.i93 + 0.00297876*m.i76*
m.i94 + 0.00459654*m.i76*m.i95 + 0.01070758*m.i76*m.i96 + 0.0061617*m.i76*m.i97 + 0.00324936*
m.i76*m.i98 + 0.00970994*m.i76*m.i99 + 0.00690694*m.i76*m.i100 + 0.00393188*m.i77*m.i78 +
0.00648912*m.i77*m.i79 + 0.00880144*m.i77*m.i80 + 0.00990674*m.i77*m.i81 + 0.00277832*m.i77*m.i82
+ 0.01369158*m.i77*m.i83 + 0.01108874*m.i77*m.i84 + 0.00804488*m.i77*m.i85 + 0.0100624*m.i77*
m.i86 + 0.00868852*m.i77*m.i87 + 0.0320896*m.i77*m.i88 + 0.00865688*m.i77*m.i89 + 0.00846622*
m.i77*m.i90 + 0.01262084*m.i77*m.i91 + 0.01111726*m.i77*m.i92 + 0.00462154*m.i77*m.i93 +
0.00718072*m.i77*m.i94 + 0.01147082*m.i77*m.i95 + 0.0176254*m.i77*m.i96 + 0.01224072*m.i77*m.i97
+ 0.00939734*m.i77*m.i98 + 0.012534*m.i77*m.i99 + 0.01295098*m.i77*m.i100 + 0.00907912*m.i78*
m.i79 + 0.00720642*m.i78*m.i80 + 0.00426806*m.i78*m.i81 + 0.0028332*m.i78*m.i82 - 0.001354666*
m.i78*m.i83 + 0.00318608*m.i78*m.i84 + 0.00627032*m.i78*m.i85 + 0.00574778*m.i78*m.i86 +
0.00663794*m.i78*m.i87 + 0.00493084*m.i78*m.i88 + 0.00225816*m.i78*m.i89 + 0.01063042*m.i78*m.i90
+ 0.0063342*m.i78*m.i91 + 0.00541402*m.i78*m.i92 + 0.00268782*m.i78*m.i93 + 0.00290288*m.i78*
m.i94 + 0.00588184*m.i78*m.i95 + 0.01436716*m.i78*m.i96 + 0.00728756*m.i78*m.i97 + 0.00442972*
m.i78*m.i98 + 0.00924454*m.i78*m.i99 + 0.00979098*m.i78*m.i100 + 0.0060581*m.i79*m.i80 +
0.00755126*m.i79*m.i81 + 0.00637932*m.i79*m.i82 - 0.00105651*m.i79*m.i83 + 0.01349704*m.i79*m.i84
+ 0.01178354*m.i79*m.i85 + 0.0220208*m.i79*m.i86 + 0.0245836*m.i79*m.i87 + 0.0524002*m.i79*m.i88
+ 0.0230428*m.i79*m.i89 + 0.0314514*m.i79*m.i90 + 0.00636018*m.i79*m.i91 + 0.0061917*m.i79*m.i92
+ 0.01207768*m.i79*m.i93 + 0.00753416*m.i79*m.i94 + 0.01719794*m.i79*m.i95 + 0.0367202*m.i79*
m.i96 + 0.01636496*m.i79*m.i97 + 0.01053626*m.i79*m.i98 + 0.0223148*m.i79*m.i99 + 0.0125583*m.i79
*m.i100 + 0.00731124*m.i80*m.i81 + 0.0043053*m.i80*m.i82 + 0.00250064*m.i80*m.i83 + 0.00942746*
m.i80*m.i84 + 0.01109824*m.i80*m.i85 + 0.0094077*m.i80*m.i86 + 0.00584688*m.i80*m.i87 +
0.01773876*m.i80*m.i88 + 0.00587054*m.i80*m.i89 + 0.0073899*m.i80*m.i90 + 0.01217556*m.i80*m.i91
+ 0.0092825*m.i80*m.i92 + 0.001672258*m.i80*m.i93 + 0.00403362*m.i80*m.i94 + 0.01001412*m.i80*
m.i95 + 0.01641906*m.i80*m.i96 + 0.01159292*m.i80*m.i97 + 0.01062798*m.i80*m.i98 + 0.00967468*
m.i80*m.i99 + 0.0140493*m.i80*m.i100 + 0.00288116*m.i81*m.i82 + 0.022981*m.i81*m.i83 + 0.01105584
*m.i81*m.i84 + 0.00722284*m.i81*m.i85 + 0.01178602*m.i81*m.i86 + 0.00945868*m.i81*m.i87 +
0.024973*m.i81*m.i88 + 0.00575624*m.i81*m.i89 + 0.01415098*m.i81*m.i90 + 0.0066048*m.i81*m.i91 +
0.01072344*m.i81*m.i92 + 0.00322326*m.i81*m.i93 + 0.00351188*m.i81*m.i94 + 0.01127788*m.i81*m.i95
+ 0.01956074*m.i81*m.i96 + 0.01617428*m.i81*m.i97 + 0.0227228*m.i81*m.i98 + 0.01855842*m.i81*
m.i99 + 0.01991896*m.i81*m.i100 - 0.00333172*m.i82*m.i83 + 0.00228114*m.i82*m.i84 + 0.00336158*
m.i82*m.i85 + 0.00354748*m.i82*m.i86 + 0.00514572*m.i82*m.i87 + 0.00636398*m.i82*m.i88 +
0.00276272*m.i82*m.i89 + 0.00394504*m.i82*m.i90 + 0.00242814*m.i82*m.i91 + 0.00151634*m.i82*m.i92
+ 0.00205258*m.i82*m.i93 + 0.00416174*m.i82*m.i94 + 0.0036601*m.i82*m.i95 + 0.00573294*m.i82*
m.i96 + 0.0040347*m.i82*m.i97 + 0.001040396*m.i82*m.i98 + 0.00519918*m.i82*m.i99 + 0.00479088*
m.i82*m.i100 + 0.01497528*m.i83*m.i84 + 0.0032291*m.i83*m.i85 + 0.01011148*m.i83*m.i86 +
0.00471364*m.i83*m.i87 + 0.0246434*m.i83*m.i88 + 0.000996878*m.i83*m.i89 - 0.00262512*m.i83*m.i90
- 0.000789784*m.i83*m.i91 + 0.01304756*m.i83*m.i92 + 0.000531142*m.i83*m.i93 - 0.000443948*m.i83
*m.i94 + 0.00279848*m.i83*m.i95 - 0.0065326*m.i83*m.i96 + 0.01221224*m.i83*m.i97 + 0.01799712*
m.i83*m.i98 + 0.0158385*m.i83*m.i99 + 0.0071337*m.i83*m.i100 + 0.00892568*m.i84*m.i85 +
0.01364388*m.i84*m.i86 + 0.0072533*m.i84*m.i87 + 0.0326884*m.i84*m.i88 + 0.00896504*m.i84*m.i89
+ 0.00823562*m.i84*m.i90 + 0.0125821*m.i84*m.i91 + 0.00787816*m.i84*m.i92 + 0.00249586*m.i84*
m.i93 + 0.00519262*m.i84*m.i94 + 0.01044988*m.i84*m.i95 + 0.01107886*m.i84*m.i96 + 0.0139867*
m.i84*m.i97 + 0.01596046*m.i84*m.i98 + 0.01218826*m.i84*m.i99 + 0.01543212*m.i84*m.i100 +
0.00990954*m.i85*m.i86 + 0.00725662*m.i85*m.i87 + 0.0133432*m.i85*m.i88 + 0.00507396*m.i85*m.i89
+ 0.00930526*m.i85*m.i90 + 0.01462284*m.i85*m.i91 + 0.01055408*m.i85*m.i92 + 0.00190258*m.i85*
m.i93 + 0.00468802*m.i85*m.i94 + 0.0107648*m.i85*m.i95 + 0.01646608*m.i85*m.i96 + 0.01215728*
m.i85*m.i97 + 0.01028698*m.i85*m.i98 + 0.01183266*m.i85*m.i99 + 0.01660366*m.i85*m.i100 +
0.0120373*m.i86*m.i87 + 0.0422718*m.i86*m.i88 + 0.00969238*m.i86*m.i89 + 0.01765146*m.i86*m.i90
+ 0.01429788*m.i86*m.i91 + 0.0124585*m.i86*m.i92 + 0.0040945*m.i86*m.i93 + 0.0046898*m.i86*m.i94
+ 0.01232074*m.i86*m.i95 + 0.0222548*m.i86*m.i96 + 0.0145479*m.i86*m.i97 + 0.0128277*m.i86*m.i98
+ 0.0192244*m.i86*m.i99 + 0.01947568*m.i86*m.i100 + 0.032904*m.i87*m.i88 + 0.0084843*m.i87*m.i89
+ 0.01591916*m.i87*m.i90 + 0.0059879*m.i87*m.i91 + 0.00789644*m.i87*m.i92 + 0.00607862*m.i87*
m.i93 + 0.00667478*m.i87*m.i94 + 0.0088746*m.i87*m.i95 + 0.01963916*m.i87*m.i96 + 0.01115822*
m.i87*m.i97 + 0.0065973*m.i87*m.i98 + 0.01821046*m.i87*m.i99 + 0.01269924*m.i87*m.i100 + 0.04164*
m.i88*m.i89 + 0.01700894*m.i88*m.i90 + 0.0282218*m.i88*m.i91 + 0.0247666*m.i88*m.i92 + 0.00860626
*m.i88*m.i93 + 0.0146832*m.i88*m.i94 + 0.0207292*m.i88*m.i95 + 0.0482992*m.i88*m.i96 + 0.026772*
m.i88*m.i97 + 0.0300758*m.i88*m.i98 + 0.0329128*m.i88*m.i99 + 0.01375988*m.i88*m.i100 +
0.00594302*m.i89*m.i90 + 0.00801468*m.i89*m.i91 + 0.00437824*m.i89*m.i92 + 0.00302882*m.i89*m.i93
+ 0.0041304*m.i89*m.i94 + 0.00803522*m.i89*m.i95 + 0.01620516*m.i89*m.i96 + 0.00836644*m.i89*
m.i97 + 0.01022328*m.i89*m.i98 + 0.0069101*m.i89*m.i99 + 0.00464412*m.i89*m.i100 + 0.01014268*
m.i90*m.i91 + 0.00890216*m.i90*m.i92 + 0.00857494*m.i90*m.i93 + 0.00416286*m.i90*m.i94 +
0.01435266*m.i90*m.i95 + 0.038709*m.i90*m.i96 + 0.01593092*m.i90*m.i97 + 0.0108455*m.i90*m.i98 +
0.0247362*m.i90*m.i99 + 0.0239224*m.i90*m.i100 + 0.01172504*m.i91*m.i92 - 3.25928e-5*m.i91*m.i93
+ 0.00582154*m.i91*m.i94 + 0.01455814*m.i91*m.i95 + 0.0217724*m.i91*m.i96 + 0.01520358*m.i91*
m.i97 + 0.01361584*m.i91*m.i98 + 0.01107608*m.i91*m.i99 + 0.0218082*m.i91*m.i100 + 0.000834202*
m.i92*m.i93 + 0.00361846*m.i92*m.i94 + 0.00964536*m.i92*m.i95 + 0.01621624*m.i92*m.i96 +
0.01139352*m.i92*m.i97 + 0.01032652*m.i92*m.i98 + 0.01663626*m.i92*m.i99 + 0.01551254*m.i92*
m.i100 + 0.00302326*m.i93*m.i94 + 0.0039602*m.i93*m.i95 + 0.0070366*m.i93*m.i96 + 0.0035814*m.i93
*m.i97 + 0.00156313*m.i93*m.i98 + 0.00599576*m.i93*m.i99 + 0.00427812*m.i93*m.i100 + 0.00550244*
m.i94*m.i95 + 0.00558508*m.i94*m.i96 + 0.0059384*m.i94*m.i97 + 0.00357124*m.i94*m.i98 + 0.0064057
*m.i94*m.i99 + 0.00623724*m.i94*m.i100 + 0.0227304*m.i95*m.i96 + 0.01445112*m.i95*m.i97 +
0.01257804*m.i95*m.i98 + 0.01368382*m.i95*m.i99 + 0.01773414*m.i95*m.i100 + 0.0257114*m.i96*m.i97
+ 0.01933344*m.i96*m.i98 + 0.0317874*m.i96*m.i99 + 0.0306278*m.i96*m.i100 + 0.01873902*m.i97*
m.i98 + 0.01912542*m.i97*m.i99 + 0.0219022*m.i97*m.i100 + 0.01388668*m.i98*m.i99 + 0.0207524*
m.i98*m.i100 + 0.0256994*m.i99*m.i100 - m.x101 <= 0)
m.c2 = Constraint(expr= 0.00411438*m.i1 - 0.0186628*m.i2 - 0.0124176*m.i3 - 0.00587102*m.i4 - 0.0137519*m.i5
- 0.0174501*m.i6 - 0.0143449*m.i7 - 0.135908*m.i8 + 0.0183991*m.i9 - 0.00059102*m.i10
- 0.0458625*m.i11 + 0.00263166*m.i12 - 0.00331355*m.i13 - 0.0367972*m.i14 - 0.0139845*m.i15
- 0.0094868*m.i16 + 0.0248532*m.i17 - 0.0094023*m.i18 + 0.0023017*m.i19 - 0.0464684*m.i20
- 0.00593531*m.i21 - 0.00567252*m.i22 - 0.0053525*m.i23 - 0.0195131*m.i24 - 0.00539281*m.i25
+ 0.00014069*m.i26 - 0.0192418*m.i27 - 0.0094094*m.i28 - 0.00628791*m.i29 - 0.0640481*m.i30
+ 0.00479685*m.i31 - 0.00773524*m.i32 - 0.0181879*m.i33 - 0.0252863*m.i34 - 0.0138439*m.i35
- 0.0175713*m.i36 - 0.0087821*m.i37 - 0.0159321*m.i38 - 0.0116042*m.i39 + 0.0157787*m.i40
- 0.0202007*m.i41 - 0.0126018*m.i42 - 0.00304129*m.i43 - 0.00993*m.i44 - 0.0128447*m.i45
- 0.00181865*m.i46 - 0.0158853*m.i47 - 0.00510726*m.i48 - 0.00213898*m.i49 - 0.030707*m.i50
+ 0.00148868*m.i51 - 0.0125947*m.i52 - 0.00471196*m.i53 - 0.0148213*m.i54 - 0.00451418*m.i55
+ 0.00107459*m.i56 - 0.00272748*m.i57 + 0.00192127*m.i58 - 0.00643836*m.i59 + 0.00659625*m.i60
- 0.0160773*m.i61 - 0.0311089*m.i62 - 0.0220835*m.i63 - 0.0123205*m.i64 - 0.00688571*m.i65
- 0.0329356*m.i66 + 0.00327885*m.i67 - 0.009863*m.i68 - 0.0161333*m.i69 - 0.00415196*m.i70
- 0.0234616*m.i71 - 0.00105996*m.i72 + 0.00381383*m.i73 - 0.00073674*m.i74 - 0.0169568*m.i75
- 0.00559808*m.i76 - 0.0098104*m.i77 - 0.00457398*m.i78 - 0.0417583*m.i79 + 0.00283802*m.i80
- 0.0168204*m.i81 - 0.00228309*m.i82 - 0.0197823*m.i83 - 0.0100875*m.i84 - 0.0118258*m.i85
- 0.00342073*m.i86 - 0.00803049*m.i87 + 0.0213439*m.i88 - 0.0213604*m.i89 - 0.0139007*m.i90
- 0.0183623*m.i91 - 0.012037*m.i92 - 0.00197365*m.i93 - 0.0102456*m.i94 - 0.00369496*m.i95
- 0.00582019*m.i96 - 0.00227006*m.i97 - 0.0248562*m.i98 - 0.0205847*m.i99 - 0.0221142*m.i100
>= 0)
m.c3 = Constraint(expr= 52.59*m.i1 + 28.87*m.i2 + 29.19*m.i3 + 46.55*m.i4 + 24.26*m.i5 + 42.53*m.i6 + 40.53*m.i7
+ 79.56*m.i8 + 108.9*m.i9 + 79.06*m.i10 + 20.15*m.i11 + 35.64*m.i12 + 39.55*m.i13 + 14.32*m.i14
+ 26.41*m.i15 + 62.48*m.i16 + 254.3*m.i17 + 32.42*m.i18 + 24.84*m.i19 + 10.1*m.i20 + 21.2*m.i21
+ 40.25*m.i22 + 17.32*m.i23 + 60.92*m.i24 + 54.73*m.i25 + 78.62*m.i26 + 49.24*m.i27
+ 68.19*m.i28 + 50.3*m.i29 + 3.83*m.i30 + 18.27*m.i31 + 59.67*m.i32 + 12.21*m.i33 + 38.09*m.i34
+ 71.72*m.i35 + 23.6*m.i36 + 70.71*m.i37 + 56.98*m.i38 + 34.47*m.i39 + 10.23*m.i40 + 59.19*m.i41
+ 58.61*m.i42 + 445.29*m.i43 + 131.69*m.i44 + 34.24*m.i45 + 43.11*m.i46 + 25.18*m.i47 + 28*m.i48
+ 19.43*m.i49 + 14.33*m.i50 + 28.41*m.i51 + 74.5*m.i52 + 36.54*m.i53 + 38.99*m.i54 + 43.15*m.i55
+ 199.55*m.i56 + 59.07*m.i57 + 123.55*m.i58 + 20.55*m.i59 + 66.72*m.i60 + 37.95*m.i61
+ 27.62*m.i62 + 23.21*m.i63 + 36.09*m.i64 + 23.09*m.i65 + 46.54*m.i66 + 67.89*m.i67
+ 34.83*m.i68 + 11.96*m.i69 + 45.77*m.i70 + 32.91*m.i71 + 77.37*m.i72 + 21.46*m.i73
+ 53.11*m.i74 + 14.29*m.i75 + 61.13*m.i76 + 32.79*m.i77 + 59.84*m.i78 + 6.59*m.i79 + 14.06*m.i80
+ 55.29*m.i81 + 33.33*m.i82 + 4.24*m.i83 + 23.21*m.i84 + 47.85*m.i85 + 48.99*m.i86 + 57.46*m.i87
+ 28.87*m.i88 + 24.6*m.i89 + 22.26*m.i90 + 28.31*m.i91 + 26.67*m.i92 + 48.1*m.i93 + 28.01*m.i94
+ 64.85*m.i95 + 25.54*m.i96 + 31.47*m.i97 + 18.31*m.i98 + 35.06*m.i99 + 8.06*m.i100 >= 5000)
m.c4 = Constraint(expr= 52.59*m.i1 + 28.87*m.i2 + 29.19*m.i3 + 46.55*m.i4 + 24.26*m.i5 + 42.53*m.i6 + 40.53*m.i7
+ 79.56*m.i8 + 108.9*m.i9 + 79.06*m.i10 + 20.15*m.i11 + 35.64*m.i12 + 39.55*m.i13 + 14.32*m.i14
+ 26.41*m.i15 + 62.48*m.i16 + 254.3*m.i17 + 32.42*m.i18 + 24.84*m.i19 + 10.1*m.i20 + 21.2*m.i21
+ 40.25*m.i22 + 17.32*m.i23 + 60.92*m.i24 + 54.73*m.i25 + 78.62*m.i26 + 49.24*m.i27
+ 68.19*m.i28 + 50.3*m.i29 + 3.83*m.i30 + 18.27*m.i31 + 59.67*m.i32 + 12.21*m.i33 + 38.09*m.i34
+ 71.72*m.i35 + 23.6*m.i36 + 70.71*m.i37 + 56.98*m.i38 + 34.47*m.i39 + 10.23*m.i40 + 59.19*m.i41
+ 58.61*m.i42 + 445.29*m.i43 + 131.69*m.i44 + 34.24*m.i45 + 43.11*m.i46 + 25.18*m.i47 + 28*m.i48
+ 19.43*m.i49 + 14.33*m.i50 + 28.41*m.i51 + 74.5*m.i52 + 36.54*m.i53 + 38.99*m.i54 + 43.15*m.i55
+ 199.55*m.i56 + 59.07*m.i57 + 123.55*m.i58 + 20.55*m.i59 + 66.72*m.i60 + 37.95*m.i61
+ 27.62*m.i62 + 23.21*m.i63 + 36.09*m.i64 + 23.09*m.i65 + 46.54*m.i66 + 67.89*m.i67
+ 34.83*m.i68 + 11.96*m.i69 + 45.77*m.i70 + 32.91*m.i71 + 77.37*m.i72 + 21.46*m.i73
+ 53.11*m.i74 + 14.29*m.i75 + 61.13*m.i76 + 32.79*m.i77 + 59.84*m.i78 + 6.59*m.i79 + 14.06*m.i80
+ 55.29*m.i81 + 33.33*m.i82 + 4.24*m.i83 + 23.21*m.i84 + 47.85*m.i85 + 48.99*m.i86 + 57.46*m.i87
+ 28.87*m.i88 + 24.6*m.i89 + 22.26*m.i90 + 28.31*m.i91 + 26.67*m.i92 + 48.1*m.i93 + 28.01*m.i94
+ 64.85*m.i95 + 25.54*m.i96 + 31.47*m.i97 + 18.31*m.i98 + 35.06*m.i99 + 8.06*m.i100 <= 5500)
|
[
"christoph.neumann@kit.edu"
] |
christoph.neumann@kit.edu
|
8d6d28f03e7dba2a24a1999e76fb628096a9fb19
|
486173e490129cec10b15c36903af3d13cfb0950
|
/FP-growth/fpGrowthTest.py
|
96ee73f6f17be8d5471447071182a3d3d5beda46
|
[] |
no_license
|
Hsingmin/MLinAction_on_Python2
|
ce3592297cbddf4e7a5c6525b6491b1b37b87ca5
|
ac5c5f8a167d3b4a5f7c7ee9e3409136db423ac0
|
refs/heads/master
| 2021-07-25T10:06:02.933608
| 2017-11-04T08:55:08
| 2017-11-04T08:55:08
| 103,387,222
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,117
|
py
|
# fpGrowthTest.py
import fpGrowth
from numpy import *
'''
# FP-Tree node create test
rootNode = fpGrowth.treeNode('pyramid', 9, None)
rootNode.children['eye'] = fpGrowth.treeNode('eye', 13, None)
rootNode.children['phoenix'] = fpGrowth.treeNode('phoenix', 3, None)
rootNode.disp()
'''
simData = fpGrowth.loadSimpleData()
print('simData : ' , simData)
initSet = fpGrowth.createInitSet(simData)
print('initSet : ', initSet)
simFPTree, simHeaderTable = fpGrowth.createTree(initSet, 3)
simFPTree.disp()
freqItems = []
fpGrowth.mineTree(simFPTree, simHeaderTable, 3, set([]), freqItems)
print '============ news clicks digging =========== '
parseData = [line.split() for line in open('kosarak.dat').readlines()]
initSet = fpGrowth.createInitSet(parseData)
newFPTree, newFPHeaderTable = fpGrowth.createTree(initSet, 100000)
newFreqList = []
fpGrowth.mineTree(newFPTree, newFPHeaderTable, 100000, set([]), newFreqList)
print 'len(newFreqList = )', len(newFreqList)
print '--------- newFreqList --------'
print newFreqList
|
[
"alfred_bit@sina.cn"
] |
alfred_bit@sina.cn
|
41706151fef1d1cd9a44b39dd33883328f4d028f
|
e6dab5aa1754ff13755a1f74a28a201681ab7e1c
|
/.parts/lib/django-1.4/tests/regressiontests/queryset_pickle/__init__.py
|
65c7eab6fc54959a02377165342999a088249224
|
[] |
no_license
|
ronkagan/Euler_1
|
67679203a9510147320f7c6513eefd391630703e
|
022633cc298475c4f3fd0c6e2bde4f4728713995
|
refs/heads/master
| 2021-01-06T20:45:52.901025
| 2014-09-06T22:34:16
| 2014-09-06T22:34:16
| 23,744,842
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 115
|
py
|
/home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.4/tests/regressiontests/queryset_pickle/__init__.py
|
[
"ron.y.kagan@gmail.com"
] |
ron.y.kagan@gmail.com
|
c0814e43f0e2e81f7a2ea1755c40e656f739b742
|
38c10c01007624cd2056884f25e0d6ab85442194
|
/third_party/skia/gyp/tools.gyp
|
a4f8cfd62ab9ccea85060ad32dc63ec9fff9cb5e
|
[
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-public-domain"
] |
permissive
|
zenoalbisser/chromium
|
6ecf37b6c030c84f1b26282bc4ef95769c62a9b2
|
e71f21b9b4b9b839f5093301974a45545dad2691
|
refs/heads/master
| 2022-12-25T14:23:18.568575
| 2016-07-14T21:49:52
| 2016-07-23T08:02:51
| 63,980,627
| 0
| 2
|
BSD-3-Clause
| 2022-12-12T12:43:41
| 2016-07-22T20:14:04
| null |
UTF-8
|
Python
| false
| false
| 19,668
|
gyp
|
# Copyright 2015 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# GYP file to build various tools.
#
# To build on Linux:
# ./gyp_skia tools.gyp && make tools
#
{
'includes': [
'apptype_console.gypi',
],
'targets': [
{
# Build all executable targets defined below.
'target_name': 'tools',
'type': 'none',
'dependencies': [
'bitmap_region_decoder',
'chrome_fuzz',
'filter',
'gpuveto',
'imgblur',
'imgconv',
'imgslice',
'lua_app',
'lua_pictures',
'pinspect',
'render_pdfs',
'skdiff',
'skhello',
'skpdiff',
'skpinfo',
'skpmaker',
'test_image_decoder',
'test_public_includes',
'whitelist_typefaces',
],
'conditions': [
['skia_shared_lib',
{
'dependencies': [
'sklua', # This can only be built if skia is built as a shared library
],
},
],
],
},
{
'target_name': 'bitmap_region_decoder',
'type': 'static_library',
'sources': [
'../tools/SkBitmapRegionCanvas.cpp',
'../tools/SkBitmapRegionCodec.cpp',
'../tools/SkBitmapRegionDecoderInterface.cpp',
'../tools/SkBitmapRegionSampler.cpp',
],
'include_dirs': [
'../include/private',
'../src/codec',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'chrome_fuzz',
'type': 'executable',
'sources': [
'../tools/chrome_fuzz.cpp',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'crash_handler',
'type': 'static_library',
'sources': [ '../tools/CrashHandler.cpp' ],
'dependencies': [ 'skia_lib.gyp:skia_lib' ],
'direct_dependent_settings': {
'include_dirs': [ '../tools' ],
},
'conditions': [
[ 'skia_is_bot', {
'defines': [ 'SK_CRASH_HANDLER' ],
}],
],
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [ 'Dbghelp.lib' ],
}
},
}
},
{
'target_name': 'resources',
'type': 'static_library',
'sources': [ '../tools/Resources.cpp' ],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
'direct_dependent_settings': {
'include_dirs': [ '../tools', ],
},
},
{
'target_name': 'sk_tool_utils',
'type': 'static_library',
'sources': [
'../tools/sk_tool_utils.cpp',
'../tools/sk_tool_utils_font.cpp',
],
'include_dirs': [
'../include/private',
'../src/fonts',
'../src/core',
],
'dependencies': [
'resources',
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
'direct_dependent_settings': {
'include_dirs': [ '../tools', ],
},
},
{
'target_name' : 'timer',
'type': 'static_library',
'sources': [ '../tools/timer/Timer.cpp' ],
'direct_dependent_settings': {
'include_dirs': ['../tools/timer'],
},
'dependencies': [ 'skia_lib.gyp:skia_lib' ],
},
{
'target_name': 'skdiff',
'type': 'executable',
'sources': [
'../tools/skdiff.cpp',
'../tools/skdiff.h',
'../tools/skdiff_html.cpp',
'../tools/skdiff_html.h',
'../tools/skdiff_main.cpp',
'../tools/skdiff_utils.cpp',
'../tools/skdiff_utils.h',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
'xcode_settings': {
'conditions': [
[ 'skia_osx_deployment_target==""', {
'MACOSX_DEPLOYMENT_TARGET': '10.7', # -mmacos-version-min, passed in env to ld.
}, {
'MACOSX_DEPLOYMENT_TARGET': '<(skia_osx_deployment_target)',
}],
],
'CLANG_CXX_LIBRARY': 'libc++',
},
},
{
'target_name': 'skpdiff',
'type': 'executable',
'sources': [
'../tools/skpdiff/skpdiff_main.cpp',
'../tools/skpdiff/SkDiffContext.cpp',
'../tools/skpdiff/SkImageDiffer.cpp',
'../tools/skpdiff/SkPMetric.cpp',
'../tools/skpdiff/skpdiff_util.cpp',
],
'include_dirs': [
'../include/private',
'../src/core/', # needed for SkTLList.h
'../tools/', # needed for picture_utils::replace_char
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
'tools.gyp:picture_utils',
],
'cflags': [
'-O3',
],
'conditions': [
[ 'skia_os in ["linux", "freebsd", "openbsd", "solaris", "chromeos"]', {
'link_settings': {
'libraries': [
'-lrt',
'-pthread',
],
},
}],
['skia_opencl', {
'sources': [
'../tools/skpdiff/SkCLImageDiffer.cpp',
'../tools/skpdiff/SkDifferentPixelsMetric_opencl.cpp',
],
'conditions': [
[ 'skia_os == "mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/OpenCL.framework',
]
}
}, {
'link_settings': {
'libraries': [
'-lOpenCL',
],
},
}],
],
}, { # !skia_opencl
'sources': [
'../tools/skpdiff/SkDifferentPixelsMetric_cpu.cpp',
],
}],
],
},
{
'target_name': 'skpmaker',
'type': 'executable',
'sources': [
'../tools/skpmaker.cpp',
],
'include_dirs': [
'../include/private',
'../src/core',
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'skimagediff',
'type': 'executable',
'sources': [
'../tools/skdiff.cpp',
'../tools/skdiff.h',
'../tools/skdiff_html.cpp',
'../tools/skdiff_html.h',
'../tools/skdiff_image.cpp',
'../tools/skdiff_utils.cpp',
'../tools/skdiff_utils.h',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'skhello',
'type': 'executable',
'dependencies': [
'flags.gyp:flags',
'pdf.gyp:pdf',
'skia_lib.gyp:skia_lib',
],
'sources': [
'../tools/skhello.cpp',
],
},
{
'target_name': 'skpinfo',
'type': 'executable',
'sources': [
'../tools/skpinfo.cpp',
],
'include_dirs': [
'../include/private',
'../src/core/',
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'imgblur',
'type': 'executable',
'sources': [
'../tools/imgblur.cpp',
],
'include_dirs': [
'../include/core',
],
'dependencies': [
'flags.gyp:flags',
'flags.gyp:flags_common',
'skia_lib.gyp:skia_lib',
'tools.gyp:sk_tool_utils',
],
},
{
'target_name': 'imgslice',
'type': 'executable',
'sources': [
'../tools/imgslice.cpp',
],
'include_dirs': [
'../include/core',
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'lazy_decode_bitmap',
'type': 'static_library',
'sources': [ '../tools/LazyDecodeBitmap.cpp' ],
'include_dirs': [
'../include/private',
'../src/core',
'../src/lazy',
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib'
],
},
{
'target_name': 'gpuveto',
'type': 'executable',
'sources': [
'../tools/gpuveto.cpp',
],
'include_dirs': [
'../include/private',
'../src/core/',
'../src/images',
],
'dependencies': [
'lazy_decode_bitmap',
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'lua_app',
'type': 'executable',
'sources': [
'../tools/lua/lua_app.cpp',
'../src/utils/SkLua.cpp',
],
'include_dirs': [
'../include/private',
# Lua exposes GrReduceClip which in turn requires src/core for SkTLList
'../src/gpu/',
'../src/core/',
],
'dependencies': [
'effects.gyp:effects',
'images.gyp:images',
'lua.gyp:lua',
'pdf.gyp:pdf',
'ports.gyp:ports',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'lua_pictures',
'type': 'executable',
'sources': [
'../tools/lua/lua_pictures.cpp',
'../src/utils/SkLuaCanvas.cpp',
'../src/utils/SkLua.cpp',
],
'include_dirs': [
'../include/private',
# Lua exposes GrReduceClip which in turn requires src/core for SkTLList
'../src/gpu/',
'../src/core/',
],
'dependencies': [
'lazy_decode_bitmap',
'effects.gyp:effects',
'flags.gyp:flags',
'images.gyp:images',
'lua.gyp:lua',
'tools.gyp:picture_utils',
'pdf.gyp:pdf',
'ports.gyp:ports',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'picture_renderer',
'type': 'static_library',
'sources': [
'../tools/PictureRenderer.h',
'../tools/PictureRenderer.cpp',
],
'include_dirs': [
'../include/private',
'../src/core',
'../src/images',
'../src/lazy',
'../src/pipe/utils/',
'../src/utils/',
],
'dependencies': [
'lazy_decode_bitmap',
'flags.gyp:flags',
'jsoncpp.gyp:jsoncpp',
'skia_lib.gyp:skia_lib',
'tools.gyp:picture_utils',
],
'conditions': [
['skia_gpu == 1',
{
'include_dirs' : [
'../src/gpu',
],
'dependencies': [
'gputest.gyp:skgputest',
],
'export_dependent_settings': [
'gputest.gyp:skgputest',
],
},
],
],
},
{
'target_name': 'render_pdfs',
'type': 'executable',
'sources': [
'../tools/render_pdfs_main.cpp',
],
'include_dirs': [
'../include/private',
'../src/core',
'../src/pipe/utils/',
'../src/utils/',
],
'dependencies': [
'flags.gyp:flags',
'pdf.gyp:pdf',
'skia_lib.gyp:skia_lib',
'tools.gyp:picture_utils',
'tools.gyp:proc_stats',
],
'conditions': [
['skia_win_debuggers_path and skia_os == "win"',
{
'dependencies': [
'tools.gyp:win_dbghelp',
],
},
],
# VS static libraries don't have a linker option. We must set a global
# project linker option, or add it to each executable.
['skia_win_debuggers_path and skia_os == "win" and '
'skia_arch_type == "x86_64"',
{
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'<(skia_win_debuggers_path)/x64/DbgHelp.lib',
],
},
},
},
],
['skia_win_debuggers_path and skia_os == "win" and '
'skia_arch_type == "x86"',
{
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'<(skia_win_debuggers_path)/DbgHelp.lib',
],
},
},
},
],
],
},
{
'target_name': 'picture_utils',
'type': 'static_library',
'sources': [
'../tools/picture_utils.cpp',
'../tools/picture_utils.h',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
'direct_dependent_settings': {
'include_dirs': [
'../tools/',
],
},
},
{
'target_name': 'pinspect',
'type': 'executable',
'sources': [
'../tools/pinspect.cpp',
],
'dependencies': [
'lazy_decode_bitmap',
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'imgconv',
'type': 'executable',
'sources': [
'../tools/imgconv.cpp',
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'filter',
'type': 'executable',
'include_dirs' : [
'../include/private',
'../src/core',
'../src/utils/debugger',
],
'sources': [
'../tools/filtermain.cpp',
'../src/utils/debugger/SkDrawCommand.h',
'../src/utils/debugger/SkDrawCommand.cpp',
'../src/utils/debugger/SkDebugCanvas.h',
'../src/utils/debugger/SkDebugCanvas.cpp',
'../src/utils/debugger/SkObjectParser.h',
'../src/utils/debugger/SkObjectParser.cpp',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
'tools.gyp:picture_utils',
],
},
{
'target_name': 'test_image_decoder',
'type': 'executable',
'sources': [
'../tools/test_image_decoder.cpp',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'proc_stats',
'type': 'static_library',
'sources': [
'../tools/ProcStats.h',
'../tools/ProcStats.cpp',
],
'direct_dependent_settings': {
'include_dirs': [ '../tools', ],
},
},
{
'target_name': 'whitelist_typefaces',
'type': 'executable',
'sources': [
'../tools/whitelist_typefaces.cpp',
],
'dependencies': [
'skia_lib.gyp:skia_lib',
],
},
{
'target_name': 'test_public_includes',
'type': 'static_library',
# Ensure that our public headers don't have unused params so that clients
# (e.g. Android) that include us can build with these warnings enabled
'cflags!': [ '-Wno-unused-parameter' ],
'variables': {
'includes_to_test': [
'<(skia_include_path)/animator',
'<(skia_include_path)/c',
'<(skia_include_path)/config',
'<(skia_include_path)/core',
'<(skia_include_path)/effects',
'<(skia_include_path)/gpu',
'<(skia_include_path)/images',
'<(skia_include_path)/pathops',
'<(skia_include_path)/pipe',
'<(skia_include_path)/ports',
'<(skia_include_path)/svg/parser',
'<(skia_include_path)/utils',
'<(skia_include_path)/views',
'<(skia_include_path)/xml',
],
'paths_to_ignore': [
'<(skia_include_path)/gpu/gl/GrGLConfig_chrome.h',
'<(skia_include_path)/ports/SkAtomics_std.h',
'<(skia_include_path)/ports/SkAtomics_atomic.h',
'<(skia_include_path)/ports/SkAtomics_sync.h',
'<(skia_include_path)/ports/SkFontMgr_fontconfig.h',
'<(skia_include_path)/ports/SkTypeface_mac.h',
'<(skia_include_path)/ports/SkTypeface_win.h',
'<(skia_include_path)/utils/ios',
'<(skia_include_path)/utils/mac',
'<(skia_include_path)/utils/win',
'<(skia_include_path)/utils/SkDebugUtils.h',
'<(skia_include_path)/utils/SkJSONCPP.h',
'<(skia_include_path)/views/animated',
'<(skia_include_path)/views/SkOSWindow_Android.h',
'<(skia_include_path)/views/SkOSWindow_iOS.h',
'<(skia_include_path)/views/SkOSWindow_Mac.h',
'<(skia_include_path)/views/SkOSWindow_SDL.h',
'<(skia_include_path)/views/SkOSWindow_Unix.h',
'<(skia_include_path)/views/SkOSWindow_Win.h',
'<(skia_include_path)/views/SkWindow.h',
],
},
'include_dirs': [
'<@(includes_to_test)',
],
'sources': [
# unused_param_test.cpp is generated by the action below.
'<(INTERMEDIATE_DIR)/test_public_includes.cpp',
],
'actions': [
{
'action_name': 'generate_includes_cpp',
'inputs': [
'../tools/generate_includes_cpp.py',
'<@(includes_to_test)',
# This causes the gyp generator on mac to fail
#'<@(paths_to_ignore)',
],
'outputs': [
'<(INTERMEDIATE_DIR)/test_public_includes.cpp',
],
'action': ['python', '../tools/generate_includes_cpp.py',
'--ignore', '<(paths_to_ignore)',
'<@(_outputs)', '<@(includes_to_test)'],
},
],
},
],
'conditions': [
['skia_shared_lib',
{
'targets': [
{
'target_name': 'sklua',
'product_name': 'skia',
'product_prefix': '',
'product_dir': '<(PRODUCT_DIR)/',
'type': 'shared_library',
'sources': [
'../src/utils/SkLuaCanvas.cpp',
'../src/utils/SkLua.cpp',
],
'include_dirs': [
'../include/private',
# Lua exposes GrReduceClip which in turn requires src/core for SkTLList
'../src/gpu/',
'../src/core/',
'../third_party/lua/src/',
],
'dependencies': [
'lua.gyp:lua',
'pdf.gyp:pdf',
'skia_lib.gyp:skia_lib',
],
'conditions': [
['skia_os != "win"',
{
'ldflags': [
'-Wl,-rpath,\$$ORIGIN,--enable-new-dtags',
],
},
],
],
},
],
},
],
['skia_win_debuggers_path and skia_os == "win"',
{
'targets': [
{
'target_name': 'win_dbghelp',
'type': 'static_library',
'defines': [
'SK_CDB_PATH="<(skia_win_debuggers_path)"',
],
'sources': [
'../tools/win_dbghelp.h',
'../tools/win_dbghelp.cpp',
],
},
],
},
],
['skia_os == "win"',
{
'targets': [
{
'target_name': 'win_lcid',
'type': 'executable',
'sources': [
'../tools/win_lcid.cpp',
],
},
],
},
],
['skia_os == "mac"',
{
'targets': [
{
'target_name': 'create_test_font',
'type': 'executable',
'sources': [
'../tools/create_test_font.cpp',
],
'include_dirs': [
'../include/private',
'../src/core',
],
'dependencies': [
'flags.gyp:flags',
'skia_lib.gyp:skia_lib',
'resources',
],
},
],
},
],
],
}
|
[
"zeno.albisser@hemispherian.com"
] |
zeno.albisser@hemispherian.com
|
f9198d9eb339474258efaac2ded39e65e899ec24
|
b8e249f2bf0aa175899090128f7a77fb34aa2c1b
|
/apps/users/migrations/0002_auto_20190523_2209.py
|
ad3a4736f1152245525812b35261e78189162d03
|
[] |
no_license
|
dojo-ninja-gold/ng-server
|
80d8568fa960e882df9e1a6fff7e020e93ff2990
|
fcd69744a2ebf99f0c24b3136ba7a2d8a4c683e1
|
refs/heads/master
| 2023-05-03T21:05:54.026847
| 2019-05-24T22:29:51
| 2019-05-24T22:29:51
| 187,918,381
| 0
| 0
| null | 2023-04-21T20:32:36
| 2019-05-21T21:49:40
|
Python
|
UTF-8
|
Python
| false
| false
| 830
|
py
|
# Generated by Django 2.2.1 on 2019-05-23 22:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='first_name',
field=models.CharField(default='', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='user',
name='last_name',
field=models.CharField(default='', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='user',
name='pw_hash',
field=models.CharField(default='password', max_length=500),
preserve_default=False,
),
]
|
[
"wes@tao.team"
] |
wes@tao.team
|
1d35b0c6b7a5c4252763588c948c81d9b77ad15b
|
b458b2cf3011a73def66605b296144049909cd48
|
/tests/my_trade.py
|
e749520a049723eff15fa850405a79187d1d6f1f
|
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
shihliu/python-binance
|
8c5607a78a4794f9b42fe90092a149f4050d4710
|
c44f8a315df32c8b5d54750c27703060ec9060aa
|
refs/heads/master
| 2021-08-22T02:47:10.423523
| 2017-11-29T04:34:30
| 2017-11-29T04:34:30
| 111,865,384
| 0
| 0
| null | 2017-11-24T01:57:33
| 2017-11-24T01:57:33
| null |
UTF-8
|
Python
| false
| false
| 1,044
|
py
|
from binance.client import Client
import json
client = Client('yq67cDjrCxGl6eeKMyTeiK1zkeArFpu8v4uB4b6TWDQdgjDlH0KjmXfHBZ1NjvJj', 'DxE7Wugo75EK8mLmybY76dbZW6tROpyNjBRd9NHsEOXqBaKq6Awgul4390xwRUdc')
my_trade = client.get_my_trades(symbol='QSPETH')
all_buy_price = all_buy_amount= 0.0
all_sell_price = all_sell_amount= 0.0
for i in my_trade:
if i["isBuyer"] is True:
all_buy_price = all_buy_price + float(i["price"]) * float(i["qty"])
all_buy_amount = all_buy_amount + float(i["qty"])
else:
all_sell_price = all_buy_price + float(i["price"]) * float(i["qty"])
all_sell_amount = all_buy_amount + float(i["qty"])
avg_buy_price = all_buy_price / all_buy_amount
print "my total buy price is %f" %all_buy_price
print "my total buy amount is %f" %all_buy_amount
print "average buy price is %f" %avg_buy_price
avg_sell_price = all_sell_price / all_sell_amount
print "my total sell price is %f" %all_sell_price
print "my total sell amount is %f" %all_sell_amount
print "average sell price is %f" %avg_sell_price
|
[
"root@dhcp-129-210.nay.redhat.com"
] |
root@dhcp-129-210.nay.redhat.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.