hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e9f668b9ca060060d4949971143a55425febaef0
| 1,323
|
py
|
Python
|
hack/examples/python/sentiments/sentiments.py
|
margarytaSadovets/nuclio
|
37bf21900d543a6340edf9374475b104ea963459
|
[
"Apache-2.0"
] | 1
|
2018-01-02T18:48:27.000Z
|
2018-01-02T18:48:27.000Z
|
hack/examples/python/sentiments/sentiments.py
|
ilaykav/nuclio
|
23a65b9f5c9e00afccbfbc62cd2a4dd2cc8a75dd
|
[
"Apache-2.0"
] | null | null | null |
hack/examples/python/sentiments/sentiments.py
|
ilaykav/nuclio
|
23a65b9f5c9e00afccbfbc62cd2a4dd2cc8a75dd
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The Nuclio Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# uses vader lib (will be installed automatically via build commands) to identify sentiments in the body string
# return score result in the form of: {'neg': 0.0, 'neu': 0.323, 'pos': 0.677, 'compound': 0.6369}
#
# @nuclio.configure
#
# function.yaml:
# apiVersion: "nuclio.io/v1beta1"
# kind: "Function"
# spec:
# runtime: "python"
#
# build:
# commands:
# - "pip install requests vaderSentiment"
#
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
def handler(context, event):
body = event.body.decode('utf-8')
context.logger.debug_with('Analyzing ', 'sentence', body)
analyzer = SentimentIntensityAnalyzer()
score = analyzer.polarity_scores(body)
return str(score)
| 30.767442
| 111
| 0.721844
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,010
| 0.763416
|
e9f7cea197f517cec2cbd809a57d3dcde8bc48fa
| 1,636
|
py
|
Python
|
crawler/src/map_client/kakao_map_client.py
|
HVHO/holiday-pharmacy
|
e641dca93ed0cc0e3ffa28f54a1da6a86c1cfe22
|
[
"MIT"
] | null | null | null |
crawler/src/map_client/kakao_map_client.py
|
HVHO/holiday-pharmacy
|
e641dca93ed0cc0e3ffa28f54a1da6a86c1cfe22
|
[
"MIT"
] | null | null | null |
crawler/src/map_client/kakao_map_client.py
|
HVHO/holiday-pharmacy
|
e641dca93ed0cc0e3ffa28f54a1da6a86c1cfe22
|
[
"MIT"
] | null | null | null |
import requests
class KakaoMapClient:
def __init__(self, kakao_auth_key):
self.kakao_map_url = "https://dapi.kakao.com/v2/local/search/address.json"
self.kakao_auth_key = kakao_auth_key
def get_latitude_and_longitudes(self, pharmacies):
searched_pharamacies = []
for pharmacy in pharmacies:
(latitude, longitude) = self.get_latitude_and_longitude(pharmacy[1])
pharmacy.append(latitude)
pharmacy.append(longitude)
searched_pharamacies.append(pharmacy)
return searched_pharamacies
def get_latitude_and_longitude(self, addr):
# parse addr by road name
parsed_addr=""
for idx, entity in reversed(list(enumerate(addr.split()))):
if ("로" in entity) or ("길" in entity):
parsed_addr = " ".join(addr.split()[:(idx+2)])
print("searching address : " + parsed_addr)
query_pharams = {
'query': parsed_addr,
'analyze_type': 'similar',
'page': 1,
'size': 10
}
auth_header = {
'Authorization': self.kakao_auth_key
}
response = requests.get(self.kakao_map_url, params=query_pharams, headers=auth_header)
latitude = "0"
longitude = "0"
try:
response_addr = response.json()["documents"][0]["address"]
latitude=response_addr["y"]
longitude=response_addr["x"]
except:
print("no info : " + parsed_addr)
# print("longitude: " + longitude + "latitude: " + latitude)
return (latitude, longitude)
| 33.387755
| 94
| 0.590465
| 1,622
| 0.989024
| 0
| 0
| 0
| 0
| 0
| 0
| 276
| 0.168293
|
e9f9eaf439178a9738f5c3bed675e41c46a5be64
| 404
|
py
|
Python
|
main.py
|
Alenx58/python-mysql-elasticsearch
|
a5deb16dcfce6d37c9c4a076f7ec6ff84ca967c3
|
[
"MIT"
] | 1
|
2021-04-27T06:32:18.000Z
|
2021-04-27T06:32:18.000Z
|
main.py
|
Alenx58/python-mysql-elasticsearch
|
a5deb16dcfce6d37c9c4a076f7ec6ff84ca967c3
|
[
"MIT"
] | null | null | null |
main.py
|
Alenx58/python-mysql-elasticsearch
|
a5deb16dcfce6d37c9c4a076f7ec6ff84ca967c3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Author : Alenx.Hai <alenx.hai@gmail.com>
# created time: 2020/12/21-10:49 上午
import asyncio
from src.mysql_elastic import MySQLElasticSearch
@asyncio.coroutine
def main():
elastic = MySQLElasticSearch()
yield elastic.put_data()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait([main()]))
| 21.263158
| 51
| 0.707921
| 0
| 0
| 75
| 0.183824
| 94
| 0.230392
| 0
| 0
| 135
| 0.330882
|
e9fbb4ffd34a72b02bcdf9fee23d69719622bfd4
| 397
|
py
|
Python
|
PythonDesafios/d107/teste.py
|
adaatii/Python-Curso-em-Video-
|
30b37713b3685469558babb93b557b53210f010c
|
[
"MIT"
] | null | null | null |
PythonDesafios/d107/teste.py
|
adaatii/Python-Curso-em-Video-
|
30b37713b3685469558babb93b557b53210f010c
|
[
"MIT"
] | null | null | null |
PythonDesafios/d107/teste.py
|
adaatii/Python-Curso-em-Video-
|
30b37713b3685469558babb93b557b53210f010c
|
[
"MIT"
] | null | null | null |
# Crie um módulo chamado moeda.py que tenha as funções incorporadas aumentar(), diminuir(), dobro() e metade(). Faça
# também um programa que importe esse módulo e use algumas dessas funções.
import moeda
p = float(input('Digite o preço: '))
print(f'A metade do {p} é R${moeda.metade(p)}')
print(f'O dobro de {p} é R${moeda.dobro(p)}')
print(f'Aumentando 10%, temos R${moeda.aumentar(p, 10)}')
| 36.090909
| 116
| 0.702771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 347
| 0.85049
|
e9fc3b08d76230c48ce220e58abe719b3c7d3fe9
| 3,024
|
py
|
Python
|
homeassistant/components/homekit/covers.py
|
mfrueh/home-assistant
|
5d64628b5bf4713016883282fd54de9c7d5089d0
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/homekit/covers.py
|
mfrueh/home-assistant
|
5d64628b5bf4713016883282fd54de9c7d5089d0
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/homekit/covers.py
|
mfrueh/home-assistant
|
5d64628b5bf4713016883282fd54de9c7d5089d0
|
[
"Apache-2.0"
] | null | null | null |
"""Class to hold all cover accessories."""
import logging
from homeassistant.components.cover import ATTR_CURRENT_POSITION
from homeassistant.helpers.event import async_track_state_change
from . import TYPES
from .accessories import HomeAccessory, add_preload_service
from .const import (
SERV_WINDOW_COVERING, CHAR_CURRENT_POSITION,
CHAR_TARGET_POSITION, CHAR_POSITION_STATE)
_LOGGER = logging.getLogger(__name__)
@TYPES.register('Window')
class Window(HomeAccessory):
"""Generate a Window accessory for a cover entity.
The cover entity must support: set_cover_position.
"""
def __init__(self, hass, entity_id, display_name):
"""Initialize a Window accessory object."""
super().__init__(display_name, entity_id, 'WINDOW')
self._hass = hass
self._entity_id = entity_id
self.current_position = None
self.homekit_target = None
self.serv_cover = add_preload_service(self, SERV_WINDOW_COVERING)
self.char_current_position = self.serv_cover. \
get_characteristic(CHAR_CURRENT_POSITION)
self.char_target_position = self.serv_cover. \
get_characteristic(CHAR_TARGET_POSITION)
self.char_position_state = self.serv_cover. \
get_characteristic(CHAR_POSITION_STATE)
self.char_target_position.setter_callback = self.move_cover
def run(self):
"""Method called be object after driver is started."""
state = self._hass.states.get(self._entity_id)
self.update_cover_position(new_state=state)
async_track_state_change(
self._hass, self._entity_id, self.update_cover_position)
def move_cover(self, value):
"""Move cover to value if call came from HomeKit."""
if value != self.current_position:
_LOGGER.debug("%s: Set position to %d", self._entity_id, value)
self.homekit_target = value
if value > self.current_position:
self.char_position_state.set_value(1)
elif value < self.current_position:
self.char_position_state.set_value(0)
self._hass.services.call(
'cover', 'set_cover_position',
{'entity_id': self._entity_id, 'position': value})
def update_cover_position(self, entity_id=None, old_state=None,
new_state=None):
"""Update cover position after state changed."""
if new_state is None:
return
current_position = new_state.attributes[ATTR_CURRENT_POSITION]
if current_position is None:
return
self.current_position = int(current_position)
self.char_current_position.set_value(self.current_position)
if self.homekit_target is None or \
abs(self.current_position - self.homekit_target) < 6:
self.char_target_position.set_value(self.current_position)
self.char_position_state.set_value(2)
self.homekit_target = None
| 36.878049
| 75
| 0.67791
| 2,568
| 0.849206
| 0
| 0
| 2,594
| 0.857804
| 0
| 0
| 441
| 0.145833
|
e9fce1f0a0567c478c06135a1b26bb39e2c00202
| 5,888
|
py
|
Python
|
plotter/hysplit_reader_long.py
|
yosukefk/plotter
|
16127ee7fc3105c717e92875ee3d61477bd41533
|
[
"MIT"
] | null | null | null |
plotter/hysplit_reader_long.py
|
yosukefk/plotter
|
16127ee7fc3105c717e92875ee3d61477bd41533
|
[
"MIT"
] | 6
|
2021-05-25T15:51:27.000Z
|
2021-08-18T20:39:41.000Z
|
plotter/hysplit_reader_long.py
|
yosukefk/plotter
|
16127ee7fc3105c717e92875ee3d61477bd41533
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import datetime
import pytz
from pathlib import Path
import warnings
from io import IOBase
from . import calpost_reader
calpost_cat = calpost_reader.calpost_cat
def hysplit_reader_long(f, tslice=slice(None, None), x=None, y=None, z=None,
rdx_map=None):
"""reads hysplit output file, returns dict of numpy arrays
:param FileIO f: either (1)opened hysplit output file, (2) hysplit output filename or (3) list of (1) or (2)
:param slice tslice: slice of time index
:param list x: list of x coords
:param list y: list of y coords
:return: dict, with ['v'] has data as 3d array (t, y, x)
:rtype: dict
"""
print(type(f))
if isinstance(f, IOBase):
raise ValueError('plese pass filename, not FileIO...')
# assume file name passed if 'f' is string
if isinstance(f, (str, Path)):
df = pd.read_csv(f, sep=r'\s+')
return hysplit_reader_long(df, tslice, x, y, z, rdx_map)
# list of files may have different time period and locations. So
# first they are grouped by time perod, then each chunk got read.
# then they got joined with the time stiching routine aware of
# spin-up time
if isinstance(f, list):
lines = [next(pd.read_csv(fn, sep=r'\s+', nrows=1).itertuples()) for fn in f]
# Pandas(Index=0, JDAY=268.208, YR1=19, MO1=9, DA1=25, HR1=5, MN1=0,
# YR2=19, MO2=9, DA2=25, HR2=5, MN2=1, Pol=1, Lev=1, Station=1,
# Value=0.0)
print(lines)
dtes = [datetime.datetime(_.YR1, _.MO1, _.DA1, _.HR1,
_.MN1).replace(tzinfo=pytz.utc).astimezone(pytz.timezone('Etc/GMT+6'))
for _ in lines]
df_fnames = pd.DataFrame({'fname': f, 'datetime': dtes})
df_fnames.to_csv('fnames.csv')
# group the file names by the datetime
dct_fnames = {}
for fn,dte in zip(f, dtes):
dct_fnames.setdefault(dte, []).append(fn)
file_dates = list(dct_fnames.keys())
dat = []
for dte,fnames in dct_fnames.items():
dfs = [pd.read_csv(fn, sep=r'\s+') for fn in fnames]
df = pd.concat(dfs)
dat.append( hysplit_reader_long(df, tslice, x, y, z, rdx_map) )
dat = calpost_cat(dat, use_later_files=True)
dat['ts'] = dat['ts'][tslice]
dat['v'] = dat['v'][tslice]
return dat
# now i should be getting dataframe
df = f
units = '???'
print('dt')
# extremely slow!
#df['Datetime'] = [datetime.datetime(_.YR1, _.MO1, _.DA1, _.HR1,
# _.MN1).replace(tzinfo=pytz.utc).astimezone(pytz.timezone('Etc/GMT+6'))
# for _ in df.itertuples()]
df['Datetime'] = pd.to_datetime(df[['YR1', 'MO1', 'DA1', 'HR1', 'MN1']].assign(
YR1= lambda df: df['YR1'] + 2000).rename(
columns={'YR1':'year', 'MO1':'month', 'DA1': 'day', 'HR1': 'hour', 'MN1': 'minute'}),
utc=True).dt.tz_convert('Etc/GMT+6')
# bad idea!
#df['Datetime_tup'] = [_ for _ in df[['YR1', 'MO1', 'DA1', 'HR1',
# 'MN1']].itertuples(index=False)]
df = df[['Datetime', 'Lev', 'Station', 'Value']]
#grouped = df.groupby(['Datetime', 'Lev', 'Station'])
nrec = len(df.index)
print('set_index')
df = df[['Datetime', 'Lev', 'Station', 'Value']].set_index(
['Datetime', 'Station', 'Lev'] )
print('dt')
ts = df.index.levels[0]
#xxx = pd.DataFrame(ts, columns=('year', 'month', 'day', 'hour',
# 'minute'))
#print(xxx)
#xxx = xxx.assign(year=lambda x: x['year']+2000)
#print(xxx)
#
#ts = pd.to_datetime(
# pd.DataFrame(
# ts,
# columns=('year', 'month', 'day', 'hour', 'minute')
# ).assign(
# year=lambda x: x['year']+2000
# ))
#print(ts)
print('cont')
stations = df.index.levels[1]
nz = len(df.index.levels[2])
nsta = len(df.index.levels[1])
nt = len(df.index.levels[0])
print('nt,nz,nsta,nrec=', nt, nz, nsta, nrec)
# ........ bad idea
#assert nt * nz * nsta == nrec
if not nt * nz * nsta == nrec:
print(f'expected {nt*nz*nsta} rec, got {nrec}, short by {nt*nz*nsta-nrec}')
print(' f:', f)
print(' rng:', df.index.levels[0][0], df.index.levels[0][-1])
print('unstack')
df = df.unstack().unstack()
df.columns = df.columns.droplevel()
if rdx_map:
x = rdx_map.x
y = rdx_map.y
nx = len(x)
ny = len(y)
grid = rdx_map.grid
v = df.to_numpy()
if rdx_map.coverage == 'full, c-order' and nsta==nx*ny:
v = v.reshape(nt, nz, ny, nx)
elif rdx_map.coverage == 'full, f-order' and nsta==nx*ny:
raise NotImplementedError(
'qa first! receptor def = "{}", '.format(rdx_map.coverage))
v = v.reshape(nt, nz, nx, ny)
v = np.swapaxes(v, -1, -2)
elif rdx_map.coverage in ('full, c-order', 'full, f-order', 'full, random', 'patial, random'):
rdx = np.arange(nt*nz) + 1
mymap = rdx_map.get_index(stations).to_numpy()
mymap = mymap[:, ::-1]
vv = np.empty((nt, nz, ny, nx))
vv[...] = np.nan
v = v.reshape(nt , nz, -1)
for tt,t in zip(vv, v):
for zz, z in zip(tt, t):
for ji,p in zip(mymap,z):
zz[tuple(ji)] = p
v = vv
else:
raise ValueError('rdx_map is mandatory for now')
#dct = {'v': v, 'ts': ts, 'units': units, 'df': f, 'name': None}
dct = {'v': v, 'ts': ts, 'units': units, 'name': None}
dct.update( {'x': x, 'y': y, 'grid': grid, })
del df
return dct
| 33.078652
| 112
| 0.529552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,356
| 0.400136
|
e9fce58b8db982ac1059efc2000a44b8a6f0d6b6
| 1,094
|
py
|
Python
|
tests/UserTest/test_user_db.py
|
brijeshb42/flask-web
|
a859fb68fe0eedf5ee872767d107f95a4e6f4856
|
[
"MIT"
] | 14
|
2015-02-20T18:31:33.000Z
|
2020-12-23T02:33:05.000Z
|
tests/UserTest/test_user_db.py
|
brijeshb42/flask-web
|
a859fb68fe0eedf5ee872767d107f95a4e6f4856
|
[
"MIT"
] | 2
|
2015-02-21T18:49:12.000Z
|
2015-10-06T18:10:30.000Z
|
tests/UserTest/test_user_db.py
|
brijeshb42/yapper
|
a859fb68fe0eedf5ee872767d107f95a4e6f4856
|
[
"MIT"
] | 10
|
2015-02-21T11:06:57.000Z
|
2022-02-21T01:25:34.000Z
|
import unittest
from yapper import create_app, db
from yapper.blueprints.user.models import User, Role
class TestUserAddToDb(unittest.TestCase):
def setUp(self):
self.app = create_app('test')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_role_gets_id(self):
role = Role(name='admin')
self.assertTrue(role.id is None)
db.session.add(role)
db.session.commit()
self.assertFalse(role.id is None)
def test_user_gets_role_and_id(self):
role = Role(name='administrator')
self.assertTrue(role.id is None)
user = User(email='b2@gmail.com', password='1234', role=role)
self.assertTrue(user.id is None)
db.session.add(user)
db.session.commit()
self.assertFalse(role.id is None)
self.assertFalse(user.id is None)
self.assertTrue(user.role_id == role.id)
self.assertTrue(user.is_admin())
| 30.388889
| 69
| 0.637112
| 988
| 0.903108
| 0
| 0
| 0
| 0
| 0
| 0
| 48
| 0.043876
|
e9fd5e9401ba6d04c5d4bf4d42d343bc34357a32
| 2,880
|
py
|
Python
|
CIM16/IEC61970/Generation/Production/StartIgnFuelCurve.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM16/IEC61970/Generation/Production/StartIgnFuelCurve.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | null | null | null |
CIM16/IEC61970/Generation/Production/StartIgnFuelCurve.py
|
MaximeBaudette/PyCIM
|
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
|
[
"MIT"
] | 1
|
2021-04-02T18:04:49.000Z
|
2021-04-02T18:04:49.000Z
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM16.IEC61970.Core.Curve import Curve
class StartIgnFuelCurve(Curve):
"""The quantity of ignition fuel (Y-axis) used to restart and repay the auxiliary power consumed versus the number of hours (X-axis) the unit was off lineThe quantity of ignition fuel (Y-axis) used to restart and repay the auxiliary power consumed versus the number of hours (X-axis) the unit was off line
"""
def __init__(self, ignitionFuelType="oil", StartupModel=None, *args, **kw_args):
"""Initialises a new 'StartIgnFuelCurve' instance.
@param ignitionFuelType: Type of ignition fuel Values are: "oil", "coal", "lignite", "gas"
@param StartupModel: The unit's startup model may have a startup ignition fuel curve
"""
#: Type of ignition fuel Values are: "oil", "coal", "lignite", "gas"
self.ignitionFuelType = ignitionFuelType
self._StartupModel = None
self.StartupModel = StartupModel
super(StartIgnFuelCurve, self).__init__(*args, **kw_args)
_attrs = ["ignitionFuelType"]
_attr_types = {"ignitionFuelType": str}
_defaults = {"ignitionFuelType": "oil"}
_enums = {"ignitionFuelType": "FuelType"}
_refs = ["StartupModel"]
_many_refs = []
def getStartupModel(self):
"""The unit's startup model may have a startup ignition fuel curve
"""
return self._StartupModel
def setStartupModel(self, value):
if self._StartupModel is not None:
self._StartupModel._StartIgnFuelCurve = None
self._StartupModel = value
if self._StartupModel is not None:
self._StartupModel.StartIgnFuelCurve = None
self._StartupModel._StartIgnFuelCurve = self
StartupModel = property(getStartupModel, setStartupModel)
| 45
| 309
| 0.720833
| 1,733
| 0.601736
| 0
| 0
| 0
| 0
| 0
| 0
| 1,900
| 0.659722
|
e9fe99f79d22866cd1c3d457b72379bf7128ed8c
| 122,030
|
py
|
Python
|
Line/beijing_aqi.py
|
Hansz00/pyecharts-gallery
|
a0a16d980e9d4f7d355c5ada938614579ee8d461
|
[
"MIT"
] | 1
|
2020-08-25T07:41:10.000Z
|
2020-08-25T07:41:10.000Z
|
Line/beijing_aqi.py
|
Hansz00/pyecharts-gallery
|
a0a16d980e9d4f7d355c5ada938614579ee8d461
|
[
"MIT"
] | null | null | null |
Line/beijing_aqi.py
|
Hansz00/pyecharts-gallery
|
a0a16d980e9d4f7d355c5ada938614579ee8d461
|
[
"MIT"
] | 1
|
2022-03-10T09:05:44.000Z
|
2022-03-10T09:05:44.000Z
|
import pyecharts.options as opts
from pyecharts.charts import Line
"""
Gallery 使用 pyecharts 1.1.0
参考地址: https://echarts.baidu.com/examples/editor.html?c=line-aqi
目前无法实现的功能:
1、dataZoom 放大的时候无法固定 Y 轴的上下限
"""
all_data = [
["2000-06-05", 116],
["2000-06-06", 129],
["2000-06-07", 135],
["2000-06-08", 86],
["2000-06-09", 73],
["2000-06-10", 85],
["2000-06-11", 73],
["2000-06-12", 68],
["2000-06-13", 92],
["2000-06-14", 130],
["2000-06-15", 245],
["2000-06-16", 139],
["2000-06-17", 115],
["2000-06-18", 111],
["2000-06-19", 309],
["2000-06-20", 206],
["2000-06-21", 137],
["2000-06-22", 128],
["2000-06-23", 85],
["2000-06-24", 94],
["2000-06-25", 71],
["2000-06-26", 106],
["2000-06-27", 84],
["2000-06-28", 93],
["2000-06-29", 85],
["2000-06-30", 73],
["2000-07-01", 83],
["2000-07-02", 125],
["2000-07-03", 107],
["2000-07-04", 82],
["2000-07-05", 44],
["2000-07-06", 72],
["2000-07-07", 106],
["2000-07-08", 107],
["2000-07-09", 66],
["2000-07-10", 91],
["2000-07-11", 92],
["2000-07-12", 113],
["2000-07-13", 107],
["2000-07-14", 131],
["2000-07-15", 111],
["2000-07-16", 64],
["2000-07-17", 69],
["2000-07-18", 88],
["2000-07-19", 77],
["2000-07-20", 83],
["2000-07-21", 111],
["2000-07-22", 57],
["2000-07-23", 55],
["2000-07-24", 60],
["2000-07-25", 44],
["2000-07-26", 127],
["2000-07-27", 114],
["2000-07-28", 86],
["2000-07-29", 73],
["2000-07-30", 52],
["2000-07-31", 69],
["2000-08-01", 86],
["2000-08-02", 118],
["2000-08-03", 56],
["2000-08-04", 91],
["2000-08-05", 121],
["2000-08-06", 127],
["2000-08-07", 78],
["2000-08-08", 79],
["2000-08-09", 46],
["2000-08-10", 108],
["2000-08-11", 80],
["2000-08-12", 79],
["2000-08-13", 69],
["2000-08-14", 80],
["2000-08-15", 105],
["2000-08-16", 119],
["2000-08-17", 105],
["2000-08-18", 55],
["2000-08-19", 74],
["2000-08-20", 41],
["2000-08-21", 62],
["2000-08-22", 104],
["2000-08-23", 118],
["2000-08-24", 121],
["2000-08-25", 126],
["2000-08-26", 99],
["2000-08-27", 92],
["2000-08-28", 75],
["2000-08-29", 91],
["2000-08-30", 94],
["2000-08-31", 69],
["2000-09-01", 93],
["2000-09-02", 124],
["2000-09-03", 120],
["2000-09-04", 93],
["2000-09-05", 26],
["2000-09-06", 32],
["2000-09-07", 70],
["2000-09-08", 89],
["2000-09-10", 117],
["2000-09-11", 144],
["2000-09-12", 111],
["2000-09-13", 120],
["2000-09-14", 97],
["2000-09-15", 108],
["2000-09-17", 74],
["2000-09-18", 105],
["2000-09-19", 127],
["2000-09-20", 143],
["2000-09-21", 62],
["2000-09-22", 80],
["2000-09-23", 136],
["2000-09-24", 29],
["2000-09-25", 91],
["2000-09-26", 93],
["2000-09-27", 114],
["2000-09-28", 45],
["2000-09-29", 102],
["2000-09-30", 111],
["2000-10-01", 93],
["2000-10-02", 117],
["2000-10-03", 78],
["2000-10-04", 76],
["2000-10-05", 100],
["2000-10-06", 75],
["2000-10-07", 169],
["2000-10-08", 59],
["2000-10-09", 89],
["2000-10-10", 91],
["2000-10-11", 75],
["2000-10-12", 28],
["2000-10-13", 47],
["2000-10-14", 92],
["2000-10-16", 72],
["2000-10-17", 149],
["2000-10-18", 86],
["2000-10-19", 88],
["2000-10-20", 104],
["2000-10-21", 91],
["2000-10-22", 88],
["2000-10-23", 55],
["2000-10-24", 63],
["2000-10-25", 41],
["2000-10-26", 85],
["2000-10-27", 99],
["2000-10-28", 121],
["2000-10-29", 96],
["2000-10-30", 90],
["2000-11-01", 80],
["2000-11-02", 116],
["2000-11-03", 207],
["2000-11-04", 306],
["2000-11-05", 283],
["2000-11-06", 200],
["2000-11-07", 93],
["2000-11-08", 49],
["2000-11-09", 78],
["2000-11-10", 40],
["2000-11-11", 74],
["2000-11-12", 67],
["2000-11-13", 118],
["2000-11-14", 196],
["2000-11-15", 101],
["2000-11-16", 59],
["2000-11-17", 83],
["2000-11-18", 83],
["2000-11-19", 124],
["2000-11-20", 57],
["2000-11-21", 78],
["2000-11-22", 113],
["2000-11-23", 172],
["2000-11-24", 129],
["2000-11-25", 103],
["2000-11-26", 75],
["2000-11-27", 125],
["2000-11-28", 121],
["2000-11-29", 204],
["2000-11-30", 141],
["2000-12-01", 106],
["2000-12-02", 146],
["2000-12-03", 95],
["2000-12-04", 149],
["2000-12-05", 71],
["2000-12-07", 157],
["2000-12-08", 141],
["2000-12-09", 197],
["2000-12-10", 43],
["2000-12-11", 81],
["2000-12-12", 109],
["2000-12-13", 118],
["2000-12-15", 115],
["2000-12-16", 92],
["2000-12-17", 123],
["2000-12-18", 147],
["2000-12-19", 59],
["2000-12-20", 103],
["2000-12-21", 146],
["2000-12-22", 137],
["2000-12-23", 74],
["2000-12-24", 64],
["2000-12-25", 67],
["2000-12-26", 107],
["2000-12-27", 101],
["2000-12-28", 79],
["2000-12-29", 137],
["2000-12-30", 165],
["2000-12-31", 81],
["2001-01-01", 100],
["2001-01-02", 126],
["2001-01-03", 56],
["2001-01-05", 108],
["2001-01-06", 88],
["2001-01-07", 78],
["2001-01-08", 105],
["2001-01-09", 77],
["2001-01-10", 105],
["2001-01-11", 93],
["2001-01-12", 107],
["2001-01-13", 128],
["2001-01-14", 53],
["2001-01-15", 81],
["2001-01-16", 128],
["2001-01-17", 179],
["2001-01-18", 225],
["2001-01-19", 116],
["2001-01-20", 153],
["2001-01-21", 161],
["2001-01-22", 149],
["2001-01-23", 115],
["2001-01-24", 136],
["2001-01-25", 101],
["2001-01-26", 109],
["2001-01-27", 108],
["2001-01-28", 86],
["2001-01-29", 101],
["2001-01-30", 109],
["2001-01-31", 139],
["2001-02-01", 110],
["2001-02-02", 113],
["2001-02-03", 130],
["2001-02-04", 62],
["2001-02-05", 88],
["2001-02-06", 105],
["2001-02-07", 87],
["2001-02-08", 140],
["2001-02-09", 116],
["2001-02-10", 100],
["2001-02-11", 83],
["2001-02-12", 102],
["2001-02-13", 106],
["2001-02-14", 157],
["2001-02-15", 131],
["2001-02-16", 77],
["2001-02-17", 101],
["2001-02-18", 148],
["2001-02-19", 227],
["2001-02-20", 105],
["2001-02-21", 155],
["2001-02-22", 293],
["2001-02-23", 99],
["2001-02-24", 57],
["2001-02-25", 97],
["2001-02-26", 104],
["2001-02-27", 117],
["2001-02-28", 125],
["2001-03-01", 216],
["2001-03-02", 149],
["2001-03-03", 256],
["2001-03-04", 172],
["2001-03-05", 113],
["2001-03-06", 338],
["2001-03-07", 57],
["2001-03-08", 48],
["2001-03-10", 111],
["2001-03-11", 87],
["2001-03-12", 175],
["2001-03-13", 186],
["2001-03-14", 201],
["2001-03-15", 76],
["2001-03-16", 131],
["2001-03-17", 127],
["2001-03-18", 128],
["2001-03-19", 152],
["2001-03-20", 144],
["2001-03-21", 162],
["2001-03-22", 500],
["2001-03-24", 358],
["2001-03-25", 128],
["2001-03-26", 54],
["2001-03-27", 57],
["2001-03-28", 54],
["2001-03-29", 80],
["2001-03-30", 71],
["2001-03-31", 73],
["2001-04-01", 139],
["2001-04-02", 224],
["2001-04-03", 107],
["2001-04-04", 150],
["2001-04-05", 180],
["2001-04-06", 77],
["2001-04-07", 95],
["2001-04-08", 194],
["2001-04-09", 143],
["2001-04-10", 205],
["2001-04-11", 129],
["2001-04-12", 64],
["2001-04-13", 61],
["2001-04-14", 79],
["2001-04-15", 121],
["2001-04-16", 130],
["2001-04-17", 150],
["2001-04-18", 205],
["2001-04-19", 154],
["2001-04-20", 81],
["2001-04-21", 140],
["2001-04-22", 119],
["2001-04-23", 156],
["2001-04-24", 72],
["2001-04-25", 108],
["2001-04-26", 124],
["2001-04-27", 94],
["2001-04-28", 157],
["2001-04-29", 100],
["2001-04-30", 158],
["2001-05-01", 277],
["2001-05-02", 332],
["2001-05-03", 303],
["2001-05-04", 238],
["2001-05-05", 500],
["2001-05-06", 99],
["2001-05-07", 93],
["2001-05-08", 104],
["2001-05-09", 74],
["2001-05-10", 68],
["2001-05-11", 90],
["2001-05-12", 114],
["2001-05-13", 142],
["2001-05-14", 126],
["2001-05-15", 185],
["2001-05-16", 402],
["2001-05-17", 189],
["2001-05-17", 189],
["2001-05-17", 189],
["2001-05-18", 112],
["2001-05-19", 137],
["2001-05-20", 158],
["2001-05-21", 158],
["2001-05-22", 116],
["2001-05-23", 132],
["2001-05-24", 110],
["2001-05-25", 82],
["2001-05-26", 56],
["2001-05-27", 54],
["2001-05-28", 71],
["2001-05-29", 101],
["2001-05-30", 57],
["2001-05-31", 88],
["2001-06-01", 99],
["2001-06-02", 84],
["2001-06-03", 139],
["2001-06-04", 132],
["2001-06-05", 141],
["2001-06-07", 159],
["2001-06-08", 131],
["2001-06-09", 180],
["2001-06-10", 164],
["2001-06-11", 134],
["2001-06-12", 163],
["2001-06-13", 105],
["2001-06-14", 74],
["2001-06-15", 50],
["2001-06-16", 60],
["2001-06-17", 82],
["2001-06-18", 111],
["2001-06-19", 89],
["2001-06-20", 81],
["2001-06-21", 76],
["2001-06-22", 70],
["2001-06-23", 74],
["2001-06-24", 99],
["2001-06-25", 91],
["2001-06-26", 113],
["2001-06-27", 93],
["2001-06-28", 69],
["2001-06-29", 74],
["2001-06-30", 75],
["2001-07-01", 108],
["2001-07-02", 115],
["2001-07-03", 86],
["2001-07-04", 67],
["2001-07-05", 68],
["2001-07-06", 74],
["2001-07-07", 69],
["2001-07-08", 95],
["2001-07-09", 99],
["2001-07-10", 92],
["2001-07-11", 84],
["2001-07-12", 77],
["2001-07-13", 69],
["2001-07-14", 62],
["2001-07-15", 83],
["2001-07-16", 101],
["2001-07-17", 98],
["2001-07-18", 89],
["2001-07-19", 82],
["2001-07-20", 105],
["2001-07-21", 79],
["2001-07-22", 48],
["2001-07-23", 119],
["2001-07-24", 126],
["2001-07-25", 44],
["2001-07-26", 42],
["2001-07-27", 86],
["2001-07-28", 68],
["2001-07-29", 93],
["2001-07-30", 89],
["2001-07-31", 76],
["2001-08-01", 54],
["2001-08-02", 53],
["2001-08-03", 35],
["2001-08-04", 65],
["2001-08-05", 108],
["2001-08-06", 114],
["2001-08-07", 90],
["2001-08-08", 63],
["2001-08-09", 79],
["2001-08-10", 102],
["2001-08-11", 100],
["2001-08-12", 107],
["2001-08-13", 81],
["2001-08-14", 79],
["2001-08-15", 116],
["2001-08-16", 98],
["2001-08-17", 96],
["2001-08-18", 94],
["2001-08-19", 63],
["2001-08-20", 39],
["2001-08-21", 81],
["2001-08-22", 73],
["2001-08-23", 66],
["2001-08-24", 52],
["2001-08-25", 64],
["2001-08-26", 61],
["2001-08-27", 83],
["2001-08-28", 85],
["2001-08-29", 99],
["2001-08-30", 97],
["2001-08-31", 93],
["2001-09-01", 86],
["2001-09-02", 105],
["2001-09-03", 98],
["2001-09-04", 109],
["2001-09-05", 92],
["2001-09-06", 68],
["2001-09-07", 92],
["2001-09-08", 72],
["2001-09-09", 64],
["2001-09-10", 88],
["2001-09-11", 97],
["2001-09-12", 102],
["2001-09-13", 103],
["2001-09-14", 120],
["2001-09-15", 94],
["2001-09-16", 95],
["2001-09-17", 93],
["2001-09-18", 56],
["2001-09-19", 98],
["2001-09-20", 81],
["2001-09-21", 100],
["2001-09-22", 75],
["2001-09-23", 84],
["2001-09-24", 91],
["2001-09-25", 70],
["2001-09-26", 96],
["2001-09-27", 128],
["2001-09-28", 92],
["2001-09-29", 107],
["2001-09-30", 95],
["2001-10-01", 63],
["2001-10-02", 115],
["2001-10-03", 69],
["2001-10-04", 47],
["2001-10-05", 86],
["2001-10-06", 122],
["2001-10-07", 104],
["2001-10-08", 122],
["2001-10-09", 49],
["2001-10-10", 36],
["2001-10-11", 83],
["2001-10-12", 107],
["2001-10-13", 126],
["2001-10-14", 126],
["2001-10-15", 78],
["2001-10-16", 72],
["2001-10-17", 76],
["2001-10-18", 87],
["2001-10-19", 143],
["2001-10-20", 259],
["2001-10-21", 183],
["2001-10-22", 276],
["2001-10-23", 232],
["2001-10-24", 167],
["2001-10-25", 105],
["2001-10-26", 129],
["2001-10-27", 140],
["2001-10-28", 61],
["2001-10-29", 85],
["2001-10-30", 155],
["2001-11-01", 38],
["2001-11-02", 106],
["2001-11-03", 134],
["2001-11-04", 57],
["2001-11-05", 51],
["2001-11-06", 68],
["2001-11-07", 129],
["2001-11-08", 158],
["2001-11-09", 85],
["2001-11-10", 121],
["2001-11-11", 161],
["2001-11-12", 94],
["2001-11-13", 58],
["2001-11-14", 57],
["2001-11-15", 71],
["2001-11-16", 105],
["2001-11-17", 66],
["2001-11-18", 117],
["2001-11-19", 87],
["2001-11-20", 88],
["2001-11-21", 131],
["2001-11-22", 151],
["2001-11-23", 310],
["2001-11-24", 161],
["2001-11-25", 23],
["2001-11-26", 52],
["2001-11-27", 82],
["2001-11-28", 128],
["2001-11-29", 115],
["2001-11-30", 63],
["2001-12-02", 102],
["2001-12-03", 96],
["2001-12-04", 107],
["2001-12-05", 89],
["2001-12-06", 59],
["2001-12-07", 100],
["2001-12-08", 136],
["2001-12-09", 137],
["2001-12-10", 119],
["2001-12-11", 112],
["2001-12-12", 186],
["2001-12-13", 192],
["2001-12-14", 83],
["2001-12-15", 97],
["2001-12-16", 113],
["2001-12-18", 89],
["2001-12-19", 106],
["2001-12-20", 119],
["2001-12-21", 62],
["2001-12-22", 79],
["2001-12-23", 58],
["2001-12-24", 61],
["2001-12-25", 64],
["2001-12-26", 108],
["2001-12-27", 101],
["2001-12-28", 82],
["2001-12-29", 85],
["2001-12-30", 98],
["2001-12-31", 132],
["2002-01-01", 88],
["2002-01-02", 97],
["2002-01-03", 116],
["2002-01-04", 111],
["2002-01-05", 81],
["2002-01-06", 78],
["2002-01-07", 138],
["2002-01-08", 100],
["2002-01-09", 157],
["2002-01-10", 349],
["2002-01-11", 196],
["2002-01-12", 190],
["2002-01-13", 100],
["2002-01-14", 103],
["2002-01-15", 160],
["2002-01-16", 97],
["2002-01-17", 103],
["2002-01-18", 123],
["2002-01-19", 137],
["2002-01-20", 268],
["2002-01-21", 52],
["2002-01-22", 44],
["2002-01-23", 66],
["2002-01-24", 106],
["2002-01-25", 94],
["2002-01-26", 96],
["2002-01-27", 58],
["2002-01-28", 62],
["2002-01-29", 56],
["2002-01-30", 62],
["2002-01-31", 109],
["2002-02-01", 96],
["2002-02-02", 95],
["2002-02-03", 126],
["2002-02-04", 161],
["2002-02-05", 138],
["2002-02-06", 106],
["2002-02-07", 99],
["2002-02-08", 113],
["2002-02-09", 80],
["2002-02-10", 90],
["2002-02-11", 86],
["2002-02-12", 142],
["2002-02-13", 93],
["2002-02-14", 125],
["2002-02-15", 135],
["2002-02-16", 138],
["2002-02-17", 111],
["2002-02-18", 70],
["2002-02-19", 101],
["2002-02-20", 153],
["2002-02-21", 146],
["2002-02-22", 97],
["2002-02-23", 82],
["2002-02-24", 99],
["2002-02-25", 131],
["2002-02-26", 88],
["2002-02-27", 74],
["2002-02-28", 96],
["2002-03-01", 133],
["2002-03-02", 105],
["2002-03-03", 86],
["2002-03-04", 105],
["2002-03-05", 89],
["2002-03-06", 70],
["2002-03-07", 87],
["2002-03-08", 109],
["2002-03-09", 161],
["2002-03-10", 83],
["2002-03-11", 129],
["2002-03-12", 107],
["2002-03-13", 89],
["2002-03-14", 186],
["2002-03-15", 108],
["2002-03-16", 500],
["2002-03-17", 188],
["2002-03-18", 102],
["2002-03-19", 139],
["2002-03-20", 155],
["2002-03-21", 500],
["2002-03-22", 370],
["2002-03-23", 164],
["2002-03-24", 105],
["2002-03-25", 156],
["2002-03-26", 180],
["2002-03-27", 105],
["2002-03-28", 126],
["2002-03-29", 120],
["2002-03-30", 122],
["2002-03-31", 118],
["2002-04-01", 188],
["2002-04-02", 260],
["2002-04-03", 296],
["2002-04-04", 118],
["2002-04-05", 132],
["2002-04-06", 80],
["2002-04-07", 500],
["2002-04-08", 500],
["2002-04-09", 253],
["2002-04-10", 67],
["2002-04-11", 110],
["2002-04-13", 133],
["2002-04-14", 246],
["2002-04-15", 324],
["2002-04-16", 225],
["2002-04-17", 120],
["2002-04-18", 121],
["2002-04-19", 131],
["2002-04-20", 148],
["2002-04-21", 174],
["2002-04-22", 106],
["2002-04-23", 32],
["2002-04-24", 86],
["2002-04-25", 92],
["2002-04-26", 117],
["2002-04-27", 110],
["2002-04-28", 90],
["2002-04-29", 86],
["2002-04-30", 106],
["2002-05-01", 84],
["2002-05-02", 76],
["2002-05-03", 92],
["2002-05-04", 85],
["2002-05-05", 79],
["2002-05-07", 92],
["2002-05-08", 99],
["2002-05-09", 105],
["2002-05-10", 105],
["2002-05-11", 78],
["2002-05-12", 125],
["2002-05-13", 113],
["2002-05-14", 90],
["2002-05-15", 89],
["2002-05-16", 99],
["2002-05-17", 94],
["2002-05-18", 109],
["2002-05-19", 105],
["2002-05-20", 115],
["2002-05-21", 110],
["2002-05-22", 54],
["2002-05-23", 76],
["2002-05-24", 83],
["2002-05-25", 75],
["2002-05-26", 89],
["2002-05-27", 97],
["2002-05-28", 113],
["2002-05-29", 106],
["2002-05-30", 86],
["2002-05-31", 108],
["2002-06-01", 115],
["2002-06-02", 106],
["2002-06-03", 99],
["2002-06-04", 151],
["2002-06-05", 118],
["2002-06-06", 139],
["2002-06-07", 161],
["2002-06-08", 77],
["2002-06-09", 72],
["2002-06-10", 36],
["2002-06-11", 81],
["2002-06-12", 67],
["2002-06-13", 56],
["2002-06-14", 73],
["2002-06-15", 75],
["2002-06-16", 80],
["2002-06-17", 122],
["2002-06-19", 142],
["2002-06-20", 77],
["2002-06-21", 68],
["2002-06-22", 77],
["2002-06-23", 50],
["2002-06-24", 51],
["2002-06-25", 40],
["2002-06-26", 46],
["2002-06-27", 65],
["2002-06-28", 110],
["2002-06-29", 104],
["2002-06-30", 85],
["2002-07-01", 126],
["2002-07-02", 88],
["2002-07-03", 112],
["2002-07-04", 108],
["2002-07-05", 98],
["2002-07-06", 88],
["2002-07-07", 68],
["2002-07-08", 87],
["2002-07-09", 83],
["2002-07-10", 87],
["2002-07-11", 127],
["2002-07-12", 111],
["2002-07-13", 108],
["2002-07-14", 91],
["2002-07-15", 89],
["2002-07-16", 75],
["2002-07-17", 88],
["2002-07-18", 76],
["2002-07-19", 62],
["2002-07-20", 55],
["2002-07-21", 66],
["2002-07-22", 67],
["2002-07-23", 62],
["2002-07-24", 113],
["2002-07-25", 81],
["2002-07-26", 66],
["2002-07-27", 86],
["2002-07-28", 47],
["2002-07-29", 44],
["2002-07-30", 79],
["2002-07-31", 137],
["2002-08-01", 160],
["2002-08-02", 89],
["2002-08-03", 96],
["2002-08-04", 63],
["2002-08-05", 53],
["2002-08-06", 50],
["2002-08-07", 44],
["2002-08-08", 74],
["2002-08-09", 64],
["2002-08-10", 72],
["2002-08-11", 94],
["2002-08-12", 71],
["2002-08-13", 124],
["2002-08-14", 129],
["2002-08-15", 155],
["2002-08-16", 156],
["2002-08-17", 125],
["2002-08-18", 130],
["2002-08-19", 66],
["2002-08-20", 91],
["2002-08-21", 114],
["2002-08-22", 112],
["2002-08-23", 102],
["2002-08-24", 72],
["2002-08-25", 76],
["2002-08-26", 77],
["2002-08-27", 86],
["2002-08-28", 92],
["2002-08-29", 108],
["2002-08-30", 100],
["2002-08-31", 122],
["2002-09-01", 164],
["2002-09-02", 111],
["2002-09-03", 52],
["2002-09-04", 70],
["2002-09-05", 59],
["2002-09-06", 82],
["2002-09-07", 96],
["2002-09-08", 92],
["2002-09-09", 124],
["2002-09-10", 98],
["2002-09-11", 45],
["2002-09-12", 37],
["2002-09-13", 81],
["2002-09-14", 90],
["2002-09-15", 98],
["2002-09-16", 97],
["2002-09-17", 111],
["2002-09-18", 125],
["2002-09-19", 83],
["2002-09-20", 41],
["2002-09-21", 87],
["2002-09-22", 56],
["2002-09-23", 72],
["2002-09-25", 182],
["2002-09-26", 183],
["2002-09-27", 70],
["2002-09-28", 44],
["2002-09-29", 62],
["2002-09-30", 100],
["2002-10-01", 121],
["2002-10-02", 62],
["2002-10-03", 70],
["2002-10-04", 99],
["2002-10-05", 89],
["2002-10-06", 52],
["2002-10-07", 37],
["2002-10-08", 64],
["2002-10-09", 135],
["2002-10-10", 232],
["2002-10-11", 365],
["2002-10-12", 198],
["2002-10-13", 53],
["2002-10-14", 121],
["2002-10-15", 83],
["2002-10-16", 100],
["2002-10-17", 169],
["2002-10-18", 75],
["2002-10-20", 72],
["2002-10-21", 51],
["2002-10-22", 50],
["2002-10-23", 95],
["2002-10-24", 88],
["2002-10-26", 59],
["2002-10-27", 30],
["2002-10-28", 48],
["2002-10-29", 109],
["2002-10-30", 146],
["2002-10-31", 76],
["2002-11-01", 33],
["2002-11-02", 52],
["2002-11-03", 54],
["2002-11-04", 70],
["2002-11-05", 107],
["2002-11-06", 96],
["2002-11-07", 76],
["2002-11-08", 37],
["2002-11-09", 94],
["2002-11-10", 182],
["2002-11-11", 452],
["2002-11-12", 66],
["2002-11-13", 56],
["2002-11-14", 80],
["2002-11-15", 85],
["2002-11-16", 104],
["2002-11-17", 43],
["2002-11-18", 52],
["2002-11-19", 115],
["2002-11-20", 143],
["2002-11-21", 75],
["2002-11-22", 110],
["2002-11-23", 134],
["2002-11-24", 129],
["2002-11-25", 153],
["2002-11-26", 54],
["2002-11-27", 114],
["2002-11-28", 145],
["2002-11-29", 87],
["2002-11-30", 138],
["2002-12-01", 198],
["2002-12-02", 273],
["2002-12-03", 395],
["2002-12-04", 498],
["2002-12-05", 97],
["2002-12-06", 112],
["2002-12-07", 97],
["2002-12-08", 86],
["2002-12-09", 97],
["2002-12-10", 99],
["2002-12-12", 151],
["2002-12-13", 135],
["2002-12-14", 193],
["2002-12-15", 153],
["2002-12-16", 95],
["2002-12-17", 91],
["2002-12-18", 137],
["2002-12-19", 98],
["2002-12-20", 77],
["2002-12-21", 95],
["2002-12-22", 96],
["2002-12-23", 83],
["2002-12-24", 71],
["2002-12-25", 53],
["2002-12-26", 69],
["2002-12-27", 75],
["2002-12-28", 106],
["2002-12-29", 90],
["2002-12-30", 106],
["2002-12-31", 64],
["2003-01-01", 105],
["2003-01-02", 100],
["2003-01-03", 69],
["2003-01-04", 55],
["2003-01-05", 65],
["2003-01-06", 112],
["2003-01-07", 83],
["2003-01-08", 131],
["2003-01-09", 151],
["2003-01-10", 93],
["2003-01-11", 97],
["2003-01-12", 104],
["2003-01-13", 92],
["2003-01-14", 53],
["2003-01-15", 105],
["2003-01-16", 159],
["2003-01-17", 106],
["2003-01-18", 89],
["2003-01-19", 88],
["2003-01-20", 87],
["2003-01-21", 99],
["2003-01-22", 117],
["2003-01-23", 72],
["2003-01-24", 109],
["2003-01-25", 91],
["2003-01-26", 100],
["2003-01-27", 48],
["2003-01-28", 58],
["2003-01-29", 65],
["2003-01-30", 105],
["2003-01-31", 87],
["2003-02-01", 148],
["2003-02-02", 109],
["2003-02-03", 96],
["2003-02-04", 87],
["2003-02-05", 56],
["2003-02-06", 105],
["2003-02-07", 126],
["2003-02-08", 164],
["2003-02-09", 113],
["2003-02-10", 54],
["2003-02-11", 47],
["2003-02-12", 93],
["2003-02-13", 83],
["2003-02-14", 91],
["2003-02-15", 135],
["2003-02-16", 65],
["2003-02-17", 100],
["2003-02-18", 147],
["2003-02-19", 56],
["2003-02-20", 89],
["2003-02-21", 107],
["2003-02-22", 99],
["2003-02-23", 124],
["2003-02-24", 152],
["2003-02-25", 115],
["2003-02-26", 87],
["2003-02-27", 76],
["2003-02-28", 93],
["2003-03-01", 172],
["2003-03-02", 235],
["2003-03-03", 65],
["2003-03-04", 55],
["2003-03-05", 93],
["2003-03-06", 96],
["2003-03-07", 127],
["2003-03-08", 71],
["2003-03-09", 88],
["2003-03-10", 81],
["2003-03-11", 115],
["2003-03-12", 54],
["2003-03-13", 94],
["2003-03-14", 92],
["2003-03-15", 98],
["2003-03-17", 73],
["2003-03-18", 69],
["2003-03-19", 156],
["2003-03-20", 93],
["2003-03-21", 37],
["2003-03-22", 92],
["2003-03-23", 114],
["2003-03-24", 124],
["2003-03-25", 108],
["2003-03-26", 106],
["2003-03-27", 39],
["2003-03-28", 66],
["2003-03-29", 126],
["2003-03-30", 282],
["2003-03-31", 136],
["2003-04-01", 92],
["2003-04-02", 54],
["2003-04-03", 81],
["2003-04-04", 89],
["2003-04-05", 115],
["2003-04-06", 108],
["2003-04-07", 100],
["2003-04-08", 55],
["2003-04-09", 75],
["2003-04-10", 88],
["2003-04-11", 94],
["2003-04-12", 143],
["2003-04-13", 62],
["2003-04-14", 138],
["2003-04-15", 187],
["2003-04-16", 157],
["2003-04-17", 154],
["2003-04-18", 56],
["2003-04-19", 54],
["2003-04-20", 57],
["2003-04-21", 46],
["2003-04-22", 82],
["2003-04-24", 179],
["2003-04-25", 138],
["2003-04-26", 147],
["2003-04-28", 147],
["2003-04-29", 106],
["2003-04-30", 95],
["2003-05-01", 107],
["2003-05-02", 102],
["2003-05-03", 120],
["2003-05-04", 117],
["2003-05-05", 87],
["2003-05-06", 71],
["2003-05-07", 58],
["2003-05-08", 95],
["2003-05-09", 117],
["2003-05-10", 142],
["2003-05-11", 104],
["2003-05-12", 124],
["2003-05-13", 100],
["2003-05-14", 82],
["2003-05-15", 77],
["2003-05-16", 70],
["2003-05-17", 34],
["2003-05-18", 60],
["2003-05-19", 83],
["2003-05-20", 107],
["2003-05-21", 126],
["2003-05-22", 93],
["2003-05-23", 100],
["2003-05-24", 96],
["2003-05-25", 87],
["2003-05-26", 116],
["2003-05-27", 111],
["2003-05-28", 90],
["2003-05-29", 68],
["2003-05-30", 96],
["2003-05-31", 86],
["2003-06-01", 131],
["2003-06-02", 110],
["2003-06-03", 119],
["2003-06-04", 126],
["2003-06-05", 67],
["2003-06-06", 86],
["2003-06-07", 81],
["2003-06-08", 104],
["2003-06-09", 71],
["2003-06-10", 35],
["2003-06-11", 57],
["2003-06-12", 56],
["2003-06-13", 57],
["2003-06-14", 40],
["2003-06-15", 72],
["2003-06-16", 96],
["2003-06-17", 137],
["2003-06-18", 180],
["2003-06-19", 171],
["2003-06-20", 167],
["2003-06-21", 173],
["2003-06-22", 124],
["2003-06-23", 79],
["2003-06-24", 29],
["2003-06-25", 76],
["2003-06-26", 96],
["2003-06-27", 89],
["2003-06-28", 67],
["2003-06-29", 51],
["2003-06-30", 92],
["2003-07-01", 94],
["2003-07-02", 100],
["2003-07-03", 129],
["2003-07-04", 128],
["2003-07-05", 44],
["2003-07-06", 64],
["2003-07-07", 59],
["2003-07-08", 75],
["2003-07-09", 41],
["2003-07-10", 85],
["2003-07-11", 91],
["2003-07-12", 125],
["2003-07-13", 108],
["2003-07-14", 116],
["2003-07-15", 135],
["2003-07-16", 111],
["2003-07-17", 95],
["2003-07-18", 79],
["2003-07-19", 75],
["2003-07-20", 104],
["2003-07-21", 82],
["2003-07-22", 80],
["2003-07-23", 99],
["2003-07-24", 110],
["2003-07-25", 96],
["2003-07-26", 163],
["2003-07-27", 126],
["2003-07-28", 69],
["2003-07-29", 98],
["2003-07-30", 68],
["2003-07-31", 75],
["2003-08-01", 109],
["2003-08-02", 75],
["2003-08-03", 102],
["2003-08-04", 115],
["2003-08-05", 110],
["2003-08-06", 93],
["2003-08-07", 80],
["2003-08-08", 65],
["2003-08-09", 64],
["2003-08-10", 64],
["2003-08-11", 58],
["2003-08-13", 123],
["2003-08-14", 87],
["2003-08-15", 88],
["2003-08-16", 89],
["2003-08-17", 86],
["2003-08-18", 91],
["2003-08-19", 132],
["2003-08-20", 85],
["2003-08-21", 96],
["2003-08-22", 90],
["2003-08-23", 78],
["2003-08-24", 79],
["2003-08-25", 76],
["2003-08-26", 84],
["2003-08-27", 88],
["2003-08-28", 57],
["2003-08-29", 44],
["2003-08-30", 78],
["2003-08-31", 95],
["2003-09-01", 93],
["2003-09-02", 86],
["2003-09-03", 108],
["2003-09-04", 124],
["2003-09-05", 70],
["2003-09-06", 113],
["2003-09-07", 82],
["2003-09-08", 111],
["2003-09-09", 59],
["2003-09-10", 60],
["2003-09-11", 89],
["2003-09-12", 132],
["2003-09-13", 133],
["2003-09-14", 112],
["2003-09-15", 69],
["2003-09-16", 132],
["2003-09-17", 75],
["2003-09-18", 37],
["2003-09-19", 37],
["2003-09-20", 79],
["2003-09-21", 89],
["2003-09-22", 121],
["2003-09-23", 74],
["2003-09-24", 88],
["2003-09-26", 66],
["2003-09-27", 32],
["2003-09-28", 73],
["2003-09-29", 92],
["2003-09-30", 57],
["2003-10-01", 67],
["2003-10-02", 34],
["2003-10-03", 45],
["2003-10-05", 115],
["2003-10-06", 153],
["2003-10-07", 127],
["2003-10-08", 116],
["2003-10-09", 152],
["2003-10-10", 130],
["2003-10-11", 24],
["2003-10-12", 17],
["2003-10-13", 60],
["2003-10-14", 56],
["2003-10-15", 51],
["2003-10-16", 56],
["2003-10-17", 80],
["2003-10-18", 56],
["2003-10-19", 98],
["2003-10-20", 145],
["2003-10-21", 121],
["2003-10-22", 41],
["2003-10-23", 86],
["2003-10-24", 121],
["2003-10-25", 69],
["2003-10-26", 116],
["2003-10-27", 165],
["2003-10-29", 120],
["2003-10-30", 171],
["2003-10-31", 289],
["2003-11-01", 500],
["2003-11-02", 181],
["2003-11-03", 28],
["2003-11-04", 92],
["2003-11-05", 146],
["2003-11-06", 44],
["2003-11-07", 22],
["2003-11-08", 25],
["2003-11-09", 51],
["2003-11-10", 74],
["2003-11-11", 51],
["2003-11-12", 106],
["2003-11-13", 149],
["2003-11-14", 213],
["2003-11-15", 130],
["2003-11-16", 32],
["2003-11-17", 116],
["2003-11-18", 162],
["2003-11-19", 173],
["2003-11-20", 118],
["2003-11-21", 20],
["2003-11-22", 85],
["2003-11-23", 161],
["2003-11-24", 186],
["2003-11-25", 147],
["2003-11-26", 57],
["2003-11-27", 88],
["2003-11-28", 107],
["2003-11-29", 159],
["2003-11-30", 147],
["2003-12-01", 153],
["2003-12-02", 135],
["2003-12-03", 99],
["2003-12-04", 92],
["2003-12-05", 109],
["2003-12-06", 99],
["2003-12-07", 57],
["2003-12-08", 64],
["2003-12-09", 79],
["2003-12-10", 143],
["2003-12-11", 93],
["2003-12-12", 52],
["2003-12-13", 95],
["2003-12-14", 141],
["2003-12-15", 59],
["2003-12-16", 109],
["2003-12-17", 58],
["2003-12-18", 60],
["2003-12-19", 52],
["2003-12-20", 71],
["2003-12-21", 110],
["2003-12-22", 107],
["2003-12-23", 114],
["2003-12-24", 98],
["2003-12-25", 96],
["2003-12-26", 48],
["2003-12-27", 89],
["2003-12-28", 130],
["2003-12-29", 90],
["2003-12-30", 106],
["2003-12-31", 111],
["2004-01-01", 128],
["2004-01-02", 83],
["2004-01-03", 60],
["2004-01-04", 109],
["2004-01-05", 137],
["2004-01-06", 147],
["2004-01-07", 99],
["2004-01-08", 73],
["2004-01-09", 126],
["2004-01-10", 73],
["2004-01-11", 72],
["2004-01-12", 87],
["2004-01-13", 85],
["2004-01-14", 115],
["2004-01-15", 121],
["2004-01-16", 97],
["2004-01-17", 109],
["2004-01-18", 74],
["2004-01-19", 52],
["2004-01-20", 49],
["2004-01-21", 41],
["2004-01-22", 64],
["2004-01-23", 80],
["2004-01-24", 38],
["2004-01-25", 58],
["2004-01-26", 106],
["2004-01-27", 57],
["2004-01-28", 106],
["2004-01-29", 111],
["2004-01-31", 118],
["2004-02-01", 109],
["2004-02-02", 53],
["2004-02-03", 50],
["2004-02-04", 59],
["2004-02-06", 56],
["2004-02-07", 68],
["2004-02-08", 52],
["2004-02-09", 68],
["2004-02-10", 130],
["2004-02-11", 95],
["2004-02-12", 103],
["2004-02-13", 124],
["2004-02-14", 95],
["2004-02-15", 92],
["2004-02-16", 95],
["2004-02-17", 135],
["2004-02-18", 242],
["2004-02-19", 451],
["2004-02-20", 140],
["2004-02-21", 109],
["2004-02-23", 88],
["2004-02-24", 164],
["2004-02-25", 145],
["2004-02-26", 46],
["2004-02-27", 85],
["2004-02-28", 125],
["2004-02-29", 54],
["2004-03-01", 83],
["2004-03-02", 73],
["2004-03-03", 60],
["2004-03-04", 85],
["2004-03-05", 73],
["2004-03-06", 51],
["2004-03-07", 56],
["2004-03-08", 108],
["2004-03-09", 179],
["2004-03-10", 446],
["2004-03-11", 84],
["2004-03-13", 104],
["2004-03-14", 87],
["2004-03-15", 143],
["2004-03-16", 206],
["2004-03-17", 77],
["2004-03-19", 114],
["2004-03-20", 87],
["2004-03-21", 92],
["2004-03-22", 165],
["2004-03-23", 104],
["2004-03-24", 33],
["2004-03-25", 88],
["2004-03-26", 137],
["2004-03-27", 151],
["2004-03-28", 338],
["2004-03-29", 239],
["2004-03-30", 139],
["2004-03-31", 79],
["2004-04-01", 123],
["2004-04-02", 64],
["2004-04-03", 51],
["2004-04-05", 133],
["2004-04-06", 93],
["2004-04-07", 39],
["2004-04-08", 111],
["2004-04-09", 145],
["2004-04-10", 193],
["2004-04-11", 131],
["2004-04-12", 131],
["2004-04-13", 108],
["2004-04-14", 95],
["2004-04-15", 141],
["2004-04-16", 186],
["2004-04-17", 156],
["2004-04-18", 260],
["2004-04-19", 138],
["2004-04-20", 133],
["2004-04-21", 107],
["2004-04-22", 143],
["2004-04-23", 61],
["2004-04-24", 109],
["2004-04-25", 151],
["2004-04-26", 63],
["2004-04-27", 63],
["2004-04-28", 79],
["2004-04-29", 138],
["2004-04-30", 47],
["2004-05-01", 67],
["2004-05-02", 84],
["2004-05-03", 95],
["2004-05-04", 73],
["2004-05-05", 89],
["2004-05-06", 91],
["2004-05-07", 152],
["2004-05-08", 189],
["2004-05-09", 92],
["2004-05-10", 97],
["2004-05-11", 107],
["2004-05-12", 81],
["2004-05-13", 89],
["2004-05-14", 93],
["2004-05-15", 92],
["2004-05-16", 50],
["2004-05-17", 61],
["2004-05-18", 66],
["2004-05-19", 77],
["2004-05-21", 56],
["2004-05-22", 65],
["2004-05-23", 86],
["2004-05-24", 134],
["2004-05-25", 141],
["2004-05-26", 30],
["2004-05-27", 83],
["2004-05-28", 111],
["2004-05-29", 56],
["2004-05-30", 66],
["2004-05-31", 56],
["2004-06-01", 100],
["2004-06-02", 109],
["2004-06-03", 118],
["2004-06-04", 107],
["2004-06-05", 74],
["2004-06-06", 58],
["2004-06-07", 88],
["2004-06-08", 100],
["2004-06-09", 109],
["2004-06-10", 125],
["2004-06-11", 114],
["2004-06-12", 110],
["2004-06-13", 118],
["2004-06-14", 135],
["2004-06-15", 147],
["2004-06-16", 99],
["2004-06-17", 29],
["2004-06-18", 75],
["2004-06-19", 73],
["2004-06-20", 97],
["2004-06-21", 102],
["2004-06-22", 93],
["2004-06-23", 78],
["2004-06-24", 58],
["2004-06-25", 61],
["2004-06-26", 100],
["2004-06-27", 106],
["2004-06-28", 139],
["2004-06-29", 152],
["2004-06-30", 49],
["2004-07-01", 46],
["2004-07-02", 85],
["2004-07-03", 97],
["2004-07-04", 58],
["2004-07-05", 56],
["2004-07-06", 59],
["2004-07-07", 74],
["2004-07-08", 63],
["2004-07-09", 59],
["2004-07-10", 91],
["2004-07-11", 70],
["2004-07-12", 53],
["2004-07-13", 55],
["2004-07-14", 67],
["2004-07-15", 97],
["2004-07-16", 123],
["2004-07-17", 118],
["2004-07-18", 100],
["2004-07-19", 80],
["2004-07-20", 135],
["2004-07-21", 67],
["2004-07-22", 70],
["2004-07-23", 105],
["2004-07-24", 55],
["2004-07-25", 78],
["2004-07-26", 78],
["2004-07-27", 59],
["2004-07-28", 111],
["2004-07-29", 78],
["2004-07-30", 30],
["2004-07-31", 78],
["2004-08-01", 91],
["2004-08-02", 119],
["2004-08-03", 95],
["2004-08-04", 73],
["2004-08-05", 76],
["2004-08-06", 89],
["2004-08-07", 117],
["2004-08-08", 145],
["2004-08-09", 143],
["2004-08-10", 84],
["2004-08-11", 84],
["2004-08-12", 51],
["2004-08-13", 31],
["2004-08-14", 83],
["2004-08-15", 76],
["2004-08-16", 51],
["2004-08-17", 67],
["2004-08-18", 75],
["2004-08-19", 68],
["2004-08-20", 80],
["2004-08-21", 99],
["2004-08-22", 70],
["2004-08-23", 60],
["2004-08-24", 105],
["2004-08-25", 122],
["2004-08-26", 100],
["2004-08-27", 125],
["2004-08-28", 70],
["2004-08-29", 57],
["2004-08-30", 79],
["2004-08-31", 68],
["2004-09-01", 61],
["2004-09-02", 67],
["2004-09-03", 77],
["2004-09-04", 64],
["2004-09-05", 96],
["2004-09-06", 101],
["2004-09-07", 24],
["2004-09-08", 61],
["2004-09-09", 80],
["2004-09-10", 85],
["2004-09-11", 88],
["2004-09-12", 95],
["2004-09-13", 101],
["2004-09-14", 140],
["2004-09-15", 34],
["2004-09-16", 81],
["2004-09-17", 89],
["2004-09-18", 86],
["2004-09-19", 71],
["2004-09-20", 94],
["2004-09-21", 40],
["2004-09-22", 84],
["2004-09-23", 122],
["2004-09-24", 197],
["2004-09-25", 179],
["2004-09-26", 111],
["2004-09-27", 114],
["2004-09-29", 134],
["2004-09-30", 141],
["2004-10-01", 17],
["2004-10-02", 59],
["2004-10-03", 83],
["2004-10-04", 118],
["2004-10-05", 153],
["2004-10-06", 166],
["2004-10-07", 325],
["2004-10-08", 402],
["2004-10-09", 263],
["2004-10-10", 374],
["2004-10-11", 127],
["2004-10-12", 37],
["2004-10-13", 62],
["2004-10-14", 67],
["2004-10-15", 99],
["2004-10-16", 116],
["2004-10-17", 110],
["2004-10-18", 126],
["2004-10-19", 149],
["2004-10-20", 110],
["2004-10-21", 56],
["2004-10-22", 59],
["2004-10-23", 97],
["2004-10-24", 146],
["2004-10-25", 142],
["2004-10-26", 34],
["2004-10-27", 79],
["2004-10-28", 154],
["2004-10-29", 191],
["2004-10-30", 219],
["2004-10-31", 157],
["2004-11-01", 35],
["2004-11-02", 39],
["2004-11-03", 124],
["2004-11-04", 164],
["2004-11-05", 56],
["2004-11-06", 92],
["2004-11-07", 133],
["2004-11-08", 173],
["2004-11-09", 86],
["2004-11-10", 77],
["2004-11-11", 62],
["2004-11-12", 45],
["2004-11-13", 93],
["2004-11-14", 160],
["2004-11-15", 54],
["2004-11-16", 67],
["2004-11-17", 65],
["2004-11-18", 99],
["2004-11-19", 97],
["2004-11-20", 47],
["2004-11-21", 93],
["2004-11-22", 165],
["2004-11-23", 156],
["2004-11-24", 89],
["2004-11-25", 41],
["2004-11-26", 53],
["2004-11-27", 89],
["2004-11-28", 99],
["2004-11-29", 81],
["2004-11-30", 139],
["2004-12-01", 275],
["2004-12-02", 270],
["2004-12-03", 330],
["2004-12-04", 97],
["2004-12-05", 37],
["2004-12-06", 97],
["2004-12-07", 89],
["2004-12-08", 170],
["2004-12-09", 248],
["2004-12-10", 97],
["2004-12-11", 181],
["2004-12-12", 123],
["2004-12-13", 89],
["2004-12-14", 198],
["2004-12-15", 305],
["2004-12-16", 86],
["2004-12-17", 92],
["2004-12-18", 143],
["2004-12-19", 82],
["2004-12-20", 23],
["2004-12-21", 81],
["2004-12-22", 88],
["2004-12-23", 75],
["2004-12-24", 99],
["2004-12-25", 150],
["2004-12-26", 97],
["2004-12-27", 44],
["2004-12-28", 49],
["2004-12-29", 61],
["2004-12-30", 80],
["2004-12-31", 45],
["2005-01-01", 63],
["2005-01-02", 118],
["2005-01-03", 100],
["2005-01-04", 52],
["2005-01-05", 104],
["2005-01-06", 147],
["2005-01-07", 48],
["2005-01-08", 56],
["2005-01-09", 44],
["2005-01-10", 96],
["2005-01-11", 67],
["2005-01-12", 52],
["2005-01-13", 83],
["2005-01-14", 65],
["2005-01-15", 67],
["2005-01-16", 87],
["2005-01-17", 111],
["2005-01-18", 47],
["2005-01-19", 55],
["2005-01-20", 57],
["2005-01-21", 85],
["2005-01-22", 119],
["2005-01-23", 174],
["2005-01-24", 143],
["2005-01-25", 95],
["2005-01-26", 115],
["2005-01-27", 173],
["2005-01-28", 163],
["2005-01-29", 95],
["2005-01-30", 50],
["2005-01-31", 69],
["2005-02-01", 69],
["2005-02-02", 47],
["2005-02-03", 96],
["2005-02-04", 79],
["2005-02-05", 46],
["2005-02-06", 68],
["2005-02-07", 71],
["2005-02-08", 68],
["2005-02-09", 84],
["2005-02-10", 38],
["2005-02-11", 71],
["2005-02-12", 102],
["2005-02-13", 122],
["2005-02-14", 153],
["2005-02-15", 150],
["2005-02-16", 69],
["2005-02-17", 105],
["2005-02-18", 60],
["2005-02-19", 42],
["2005-02-20", 47],
["2005-02-21", 87],
["2005-02-22", 102],
["2005-02-23", 30],
["2005-02-24", 55],
["2005-02-25", 46],
["2005-02-26", 64],
["2005-02-27", 95],
["2005-02-28", 61],
["2005-03-01", 64],
["2005-03-02", 74],
["2005-03-03", 57],
["2005-03-04", 46],
["2005-03-05", 58],
["2005-03-06", 114],
["2005-03-07", 108],
["2005-03-08", 82],
["2005-03-09", 80],
["2005-03-10", 110],
["2005-03-11", 67],
["2005-03-12", 59],
["2005-03-13", 36],
["2005-03-14", 69],
["2005-03-15", 99],
["2005-03-16", 120],
["2005-03-17", 109],
["2005-03-18", 52],
["2005-03-19", 96],
["2005-03-20", 119],
["2005-03-21", 94],
["2005-03-22", 151],
["2005-03-23", 90],
["2005-03-24", 63],
["2005-03-25", 99],
["2005-03-26", 133],
["2005-03-27", 161],
["2005-03-28", 141],
["2005-03-29", 48],
["2005-03-30", 122],
["2005-03-31", 113],
["2005-04-01", 83],
["2005-04-02", 82],
["2005-04-03", 82],
["2005-04-04", 116],
["2005-04-05", 332],
["2005-04-06", 352],
["2005-04-07", 156],
["2005-04-08", 100],
["2005-04-09", 64],
["2005-04-10", 64],
["2005-04-11", 95],
["2005-04-12", 92],
["2005-04-13", 90],
["2005-04-14", 179],
["2005-04-15", 88],
["2005-04-16", 213],
["2005-04-17", 143],
["2005-04-18", 159],
["2005-04-19", 132],
["2005-04-20", 173],
["2005-04-21", 69],
["2005-04-22", 58],
["2005-04-23", 107],
["2005-04-24", 106],
["2005-04-25", 73],
["2005-04-26", 115],
["2005-04-27", 122],
["2005-04-28", 418],
["2005-04-29", 98],
["2005-04-30", 138],
["2005-05-01", 183],
["2005-05-02", 122],
["2005-05-03", 139],
["2005-05-04", 160],
["2005-05-05", 97],
["2005-05-06", 48],
["2005-05-07", 80],
["2005-05-08", 130],
["2005-05-09", 63],
["2005-05-10", 62],
["2005-05-11", 86],
["2005-05-12", 110],
["2005-05-13", 81],
["2005-05-14", 85],
["2005-05-15", 113],
["2005-05-16", 83],
["2005-05-17", 49],
["2005-05-18", 51],
["2005-05-19", 53],
["2005-05-20", 80],
["2005-05-21", 120],
["2005-05-22", 46],
["2005-05-23", 59],
["2005-05-24", 82],
["2005-05-25", 88],
["2005-05-26", 107],
["2005-05-27", 83],
["2005-05-28", 120],
["2005-05-29", 100],
["2005-05-30", 109],
["2005-05-31", 95],
["2005-06-01", 93],
["2005-06-02", 54],
["2005-06-03", 58],
["2005-06-04", 77],
["2005-06-05", 75],
["2005-06-06", 53],
["2005-06-07", 86],
["2005-06-08", 96],
["2005-06-09", 81],
["2005-06-10", 85],
["2005-06-11", 136],
["2005-06-12", 106],
["2005-06-13", 94],
["2005-06-14", 69],
["2005-06-15", 56],
["2005-06-16", 83],
["2005-06-17", 79],
["2005-06-18", 92],
["2005-06-19", 116],
["2005-06-20", 131],
["2005-06-21", 113],
["2005-06-22", 116],
["2005-06-23", 120],
["2005-06-24", 148],
["2005-06-25", 141],
["2005-06-26", 79],
["2005-06-27", 52],
["2005-06-28", 84],
["2005-06-29", 86],
["2005-06-30", 100],
["2005-07-01", 97],
["2005-07-02", 76],
["2005-07-03", 87],
["2005-07-04", 64],
["2005-07-05", 63],
["2005-07-06", 70],
["2005-07-07", 89],
["2005-07-08", 98],
["2005-07-09", 91],
["2005-07-10", 79],
["2005-07-11", 69],
["2005-07-12", 81],
["2005-07-13", 93],
["2005-07-14", 93],
["2005-07-15", 97],
["2005-07-17", 150],
["2005-07-18", 103],
["2005-07-19", 114],
["2005-07-20", 125],
["2005-07-21", 104],
["2005-07-22", 79],
["2005-07-23", 51],
["2005-07-24", 23],
["2005-07-25", 75],
["2005-07-26", 109],
["2005-07-27", 73],
["2005-07-28", 63],
["2005-07-29", 57],
["2005-07-30", 95],
["2005-07-31", 79],
["2005-08-01", 81],
["2005-08-02", 68],
["2005-08-03", 72],
["2005-08-04", 46],
["2005-08-05", 63],
["2005-08-06", 86],
["2005-08-07", 71],
["2005-08-08", 72],
["2005-08-09", 62],
["2005-08-10", 60],
["2005-08-11", 146],
["2005-08-12", 141],
["2005-08-13", 63],
["2005-08-14", 98],
["2005-08-15", 100],
["2005-08-16", 46],
["2005-08-17", 26],
["2005-08-18", 53],
["2005-08-19", 59],
["2005-08-20", 79],
["2005-08-21", 110],
["2005-08-22", 91],
["2005-08-23", 97],
["2005-08-24", 90],
["2005-08-25", 85],
["2005-08-26", 110],
["2005-08-27", 94],
["2005-08-28", 154],
["2005-08-29", 136],
["2005-08-30", 113],
["2005-08-31", 152],
["2005-09-01", 118],
["2005-09-02", 42],
["2005-09-03", 68],
["2005-09-04", 80],
["2005-09-05", 90],
["2005-09-06", 99],
["2005-09-07", 98],
["2005-09-08", 83],
["2005-09-09", 141],
["2005-09-10", 164],
["2005-09-11", 182],
["2005-09-12", 107],
["2005-09-13", 76],
["2005-09-14", 62],
["2005-09-15", 104],
["2005-09-16", 78],
["2005-09-17", 73],
["2005-09-18", 66],
["2005-09-19", 99],
["2005-09-20", 92],
["2005-09-21", 71],
["2005-09-22", 60],
["2005-09-23", 110],
["2005-09-24", 112],
["2005-09-25", 134],
["2005-09-26", 168],
["2005-09-27", 97],
["2005-09-28", 115],
["2005-09-29", 100],
["2005-09-30", 47],
["2005-10-01", 88],
["2005-10-02", 72],
["2005-10-03", 70],
["2005-10-04", 77],
["2005-10-05", 103],
["2005-10-06", 136],
["2005-10-07", 82],
["2005-10-08", 42],
["2005-10-09", 93],
["2005-10-10", 167],
["2005-10-11", 152],
["2005-10-12", 183],
["2005-10-13", 155],
["2005-10-14", 50],
["2005-10-15", 73],
["2005-10-16", 120],
["2005-10-17", 57],
["2005-10-18", 96],
["2005-10-19", 94],
["2005-10-20", 151],
["2005-10-21", 96],
["2005-10-22", 92],
["2005-10-23", 135],
["2005-10-24", 139],
["2005-10-25", 99],
["2005-10-26", 176],
["2005-10-27", 156],
["2005-10-28", 24],
["2005-10-29", 48],
["2005-10-30", 54],
["2005-10-31", 97],
["2005-11-01", 134],
["2005-11-02", 252],
["2005-11-03", 334],
["2005-11-04", 330],
["2005-11-05", 472],
["2005-11-06", 191],
["2005-11-07", 141],
["2005-11-08", 45],
["2005-11-09", 104],
["2005-11-10", 156],
["2005-11-11", 79],
["2005-11-12", 95],
["2005-11-13", 70],
["2005-11-14", 80],
["2005-11-15", 60],
["2005-11-16", 104],
["2005-11-17", 160],
["2005-11-18", 184],
["2005-11-19", 126],
["2005-11-20", 91],
["2005-11-21", 73],
["2005-11-22", 134],
["2005-11-23", 76],
["2005-11-24", 108],
["2005-11-25", 127],
["2005-11-26", 131],
["2005-11-27", 163],
["2005-11-28", 220],
["2005-11-29", 73],
["2005-11-30", 154],
["2005-12-01", 97],
["2005-12-02", 58],
["2005-12-03", 99],
["2005-12-04", 61],
["2005-12-05", 60],
["2005-12-06", 37],
["2005-12-07", 39],
["2005-12-08", 72],
["2005-12-09", 121],
["2005-12-10", 99],
["2005-12-11", 44],
["2005-12-12", 49],
["2005-12-13", 40],
["2005-12-14", 53],
["2005-12-15", 50],
["2005-12-16", 49],
["2005-12-17", 44],
["2005-12-18", 77],
["2005-12-19", 129],
["2005-12-20", 114],
["2005-12-21", 57],
["2005-12-22", 86],
["2005-12-23", 120],
["2005-12-24", 102],
["2005-12-25", 146],
["2005-12-26", 61],
["2005-12-27", 57],
["2005-12-28", 122],
["2005-12-29", 113],
["2005-12-30", 157],
["2005-12-31", 76],
["2006-01-01", 108],
["2006-01-02", 100],
["2006-01-03", 119],
["2006-01-04", 69],
["2006-01-05", 53],
["2006-01-06", 54],
["2006-01-07", 62],
["2006-01-08", 100],
["2006-01-09", 103],
["2006-01-10", 147],
["2006-01-11", 100],
["2006-01-12", 110],
["2006-01-13", 98],
["2006-01-14", 107],
["2006-01-15", 252],
["2006-01-16", 243],
["2006-01-17", 116],
["2006-01-18", 110],
["2006-01-19", 181],
["2006-01-20", 273],
["2006-01-21", 310],
["2006-01-22", 136],
["2006-01-23", 110],
["2006-01-24", 146],
["2006-01-25", 119],
["2006-01-26", 157],
["2006-01-27", 153],
["2006-01-28", 69],
["2006-01-29", 143],
["2006-01-30", 54],
["2006-01-31", 65],
["2006-02-01", 83],
["2006-02-02", 75],
["2006-02-03", 56],
["2006-02-04", 73],
["2006-02-05", 114],
["2006-02-06", 138],
["2006-02-07", 61],
["2006-02-08", 34],
["2006-02-09", 70],
["2006-02-10", 93],
["2006-02-11", 99],
["2006-02-12", 110],
["2006-02-13", 228],
["2006-02-14", 178],
["2006-02-15", 64],
["2006-02-16", 93],
["2006-02-17", 59],
["2006-02-18", 87],
["2006-02-19", 95],
["2006-02-20", 133],
["2006-02-21", 215],
["2006-02-22", 75],
["2006-02-23", 93],
["2006-02-24", 74],
["2006-02-25", 112],
["2006-02-26", 54],
["2006-02-27", 81],
["2006-02-28", 83],
["2006-03-01", 62],
["2006-03-02", 49],
["2006-03-03", 89],
["2006-03-04", 154],
["2006-03-05", 99],
["2006-03-06", 80],
["2006-03-07", 90],
["2006-03-08", 98],
["2006-03-09", 91],
["2006-03-10", 408],
["2006-03-11", 95],
["2006-03-12", 85],
["2006-03-13", 90],
["2006-03-14", 88],
["2006-03-15", 109],
["2006-03-16", 91],
["2006-03-17", 135],
["2006-03-18", 256],
["2006-03-19", 84],
["2006-03-20", 226],
["2006-03-21", 197],
["2006-03-22", 181],
["2006-03-23", 66],
["2006-03-24", 97],
["2006-03-25", 206],
["2006-03-26", 99],
["2006-03-27", 347],
["2006-03-28", 98],
["2006-03-29", 124],
["2006-03-30", 92],
["2006-03-31", 96],
["2006-04-01", 183],
["2006-04-02", 122],
["2006-04-03", 187],
["2006-04-04", 162],
["2006-04-05", 99],
["2006-04-06", 78],
["2006-04-07", 158],
["2006-04-08", 186],
["2006-04-09", 500],
["2006-04-10", 500],
["2006-04-11", 166],
["2006-04-12", 95],
["2006-04-13", 60],
["2006-04-14", 149],
["2006-04-15", 128],
["2006-04-16", 84],
["2006-04-17", 500],
["2006-04-18", 168],
["2006-04-19", 319],
["2006-04-20", 79],
["2006-04-21", 123],
["2006-04-22", 145],
["2006-04-23", 203],
["2006-04-24", 94],
["2006-04-25", 128],
["2006-04-26", 210],
["2006-04-27", 98],
["2006-04-28", 99],
["2006-04-29", 131],
["2006-04-30", 165],
["2006-05-01", 432],
["2006-05-02", 94],
["2006-05-03", 92],
["2006-05-04", 147],
["2006-05-05", 95],
["2006-05-06", 93],
["2006-05-07", 138],
["2006-05-08", 123],
["2006-05-09", 79],
["2006-05-10", 71],
["2006-05-11", 61],
["2006-05-12", 63],
["2006-05-13", 44],
["2006-05-14", 93],
["2006-05-15", 95],
["2006-05-16", 98],
["2006-05-17", 500],
["2006-05-18", 168],
["2006-05-19", 240],
["2006-05-20", 82],
["2006-05-21", 96],
["2006-05-22", 96],
["2006-05-23", 95],
["2006-05-24", 84],
["2006-05-25", 91],
["2006-05-26", 78],
["2006-05-27", 32],
["2006-05-28", 51],
["2006-05-29", 84],
["2006-05-30", 98],
["2006-05-31", 118],
["2006-06-01", 96],
["2006-06-02", 112],
["2006-06-03", 69],
["2006-06-04", 100],
["2006-06-05", 137],
["2006-06-06", 147],
["2006-06-07", 86],
["2006-06-08", 65],
["2006-06-09", 92],
["2006-06-10", 39],
["2006-06-11", 61],
["2006-06-12", 96],
["2006-06-13", 77],
["2006-06-14", 43],
["2006-06-15", 78],
["2006-06-16", 86],
["2006-06-17", 50],
["2006-06-18", 68],
["2006-06-19", 97],
["2006-06-20", 84],
["2006-06-21", 152],
["2006-06-22", 118],
["2006-06-23", 123],
["2006-06-24", 76],
["2006-06-25", 68],
["2006-06-26", 84],
["2006-06-27", 75],
["2006-06-28", 90],
["2006-06-29", 66],
["2006-06-30", 42],
["2006-07-01", 57],
["2006-07-02", 52],
["2006-07-03", 81],
["2006-07-04", 75],
["2006-07-05", 97],
["2006-07-06", 60],
["2006-07-07", 65],
["2006-07-08", 67],
["2006-07-09", 82],
["2006-07-10", 99],
["2006-07-11", 66],
["2006-07-12", 72],
["2006-07-13", 44],
["2006-07-14", 78],
["2006-07-15", 70],
["2006-07-16", 69],
["2006-07-17", 58],
["2006-07-18", 43],
["2006-07-19", 55],
["2006-07-20", 74],
["2006-07-21", 76],
["2006-07-22", 36],
["2006-07-23", 72],
["2006-07-24", 61],
["2006-07-25", 46],
["2006-07-26", 50],
["2006-07-27", 65],
["2006-07-28", 98],
["2006-07-29", 115],
["2006-07-30", 138],
["2006-07-31", 88],
["2006-08-01", 47],
["2006-08-02", 39],
["2006-08-03", 61],
["2006-08-04", 64],
["2006-08-05", 74],
["2006-08-06", 100],
["2006-08-07", 82],
["2006-08-08", 84],
["2006-08-09", 64],
["2006-08-10", 89],
["2006-08-11", 75],
["2006-08-12", 98],
["2006-08-13", 69],
["2006-08-14", 27],
["2006-08-15", 70],
["2006-08-16", 84],
["2006-08-17", 91],
["2006-08-18", 85],
["2006-08-19", 97],
["2006-08-20", 77],
["2006-08-21", 45],
["2006-08-22", 69],
["2006-08-23", 67],
["2006-08-24", 99],
["2006-08-25", 131],
["2006-08-26", 69],
["2006-08-27", 66],
["2006-08-28", 93],
["2006-08-29", 62],
["2006-08-30", 59],
["2006-08-31", 64],
["2006-09-01", 89],
["2006-09-02", 100],
["2006-09-03", 109],
["2006-09-04", 28],
["2006-09-05", 71],
["2006-09-06", 87],
["2006-09-07", 112],
["2006-09-08", 71],
["2006-09-09", 37],
["2006-09-10", 67],
["2006-09-11", 86],
["2006-09-12", 89],
["2006-09-13", 100],
["2006-09-14", 107],
["2006-09-15", 109],
["2006-09-16", 116],
["2006-09-17", 134],
["2006-09-18", 100],
["2006-09-19", 132],
["2006-09-20", 151],
["2006-09-21", 99],
["2006-09-22", 95],
["2006-09-23", 118],
["2006-09-24", 121],
["2006-09-25", 119],
["2006-09-26", 49],
["2006-09-27", 91],
["2006-09-28", 98],
["2006-09-29", 70],
["2006-09-30", 100],
["2006-10-01", 139],
["2006-10-02", 152],
["2006-10-03", 143],
["2006-10-04", 73],
["2006-10-05", 99],
["2006-10-06", 194],
["2006-10-07", 100],
["2006-10-08", 91],
["2006-10-09", 77],
["2006-10-10", 131],
["2006-10-11", 65],
["2006-10-12", 73],
["2006-10-13", 121],
["2006-10-14", 135],
["2006-10-15", 100],
["2006-10-16", 146],
["2006-10-17", 59],
["2006-10-18", 86],
["2006-10-19", 121],
["2006-10-20", 71],
["2006-10-21", 97],
["2006-10-22", 87],
["2006-10-23", 46],
["2006-10-24", 99],
["2006-10-25", 139],
["2006-10-26", 64],
["2006-10-27", 99],
["2006-10-28", 163],
["2006-10-29", 77],
["2006-10-30", 130],
["2006-10-31", 154],
["2006-11-01", 96],
["2006-11-02", 98],
["2006-11-03", 140],
["2006-11-04", 180],
["2006-11-05", 48],
["2006-11-06", 93],
["2006-11-07", 94],
["2006-11-08", 148],
["2006-11-09", 61],
["2006-11-10", 89],
["2006-11-11", 72],
["2006-11-12", 136],
["2006-11-13", 98],
["2006-11-14", 37],
["2006-11-15", 78],
["2006-11-16", 99],
["2006-11-17", 100],
["2006-11-18", 129],
["2006-11-19", 147],
["2006-11-20", 249],
["2006-11-21", 414],
["2006-11-22", 97],
["2006-11-23", 74],
["2006-11-24", 153],
["2006-11-25", 124],
["2006-11-26", 129],
["2006-11-27", 47],
["2006-11-28", 58],
["2006-11-29", 61],
["2006-11-30", 96],
["2006-12-01", 88],
["2006-12-02", 49],
["2006-12-03", 66],
["2006-12-04", 111],
["2006-12-05", 94],
["2006-12-06", 78],
["2006-12-07", 86],
["2006-12-08", 97],
["2006-12-09", 81],
["2006-12-10", 105],
["2006-12-11", 256],
["2006-12-12", 500],
["2006-12-13", 88],
["2006-12-14", 118],
["2006-12-15", 94],
["2006-12-16", 76],
["2006-12-17", 52],
["2006-12-18", 100],
["2006-12-19", 140],
["2006-12-20", 180],
["2006-12-21", 180],
["2006-12-22", 88],
["2006-12-23", 95],
["2006-12-24", 85],
["2006-12-25", 136],
["2006-12-26", 160],
["2006-12-27", 80],
["2006-12-28", 47],
["2006-12-29", 90],
["2006-12-30", 157],
["2006-12-31", 139],
["2007-01-01", 158],
["2007-01-02", 150],
["2007-01-03", 133],
["2007-01-04", 170],
["2007-01-05", 322],
["2007-01-06", 73],
["2007-01-07", 47],
["2007-01-08", 45],
["2007-01-09", 86],
["2007-01-10", 98],
["2007-01-11", 75],
["2007-01-12", 56],
["2007-01-13", 74],
["2007-01-14", 102],
["2007-01-15", 170],
["2007-01-16", 64],
["2007-01-17", 84],
["2007-01-18", 52],
["2007-01-19", 93],
["2007-01-20", 147],
["2007-01-21", 98],
["2007-01-22", 58],
["2007-01-23", 96],
["2007-01-24", 118],
["2007-01-25", 140],
["2007-01-26", 68],
["2007-01-27", 55],
["2007-01-28", 55],
["2007-01-29", 114],
["2007-01-30", 85],
["2007-01-31", 76],
["2007-02-01", 50],
["2007-02-02", 100],
["2007-02-03", 115],
["2007-02-04", 93],
["2007-02-05", 175],
["2007-02-06", 67],
["2007-02-07", 110],
["2007-02-08", 99],
["2007-02-09", 67],
["2007-02-10", 61],
["2007-02-11", 55],
["2007-02-12", 103],
["2007-02-13", 181],
["2007-02-14", 74],
["2007-02-15", 75],
["2007-02-16", 97],
["2007-02-17", 98],
["2007-02-18", 115],
["2007-02-19", 99],
["2007-02-20", 160],
["2007-02-21", 200],
["2007-02-22", 173],
["2007-02-23", 78],
["2007-02-24", 75],
["2007-02-25", 123],
["2007-02-26", 169],
["2007-02-27", 172],
["2007-02-28", 108],
["2007-03-01", 98],
["2007-03-02", 85],
["2007-03-03", 87],
["2007-03-04", 28],
["2007-03-05", 34],
["2007-03-06", 35],
["2007-03-07", 51],
["2007-03-08", 54],
["2007-03-09", 105],
["2007-03-10", 75],
["2007-03-11", 34],
["2007-03-12", 68],
["2007-03-13", 133],
["2007-03-14", 157],
["2007-03-15", 106],
["2007-03-16", 78],
["2007-03-17", 100],
["2007-03-18", 121],
["2007-03-19", 119],
["2007-03-21", 138],
["2007-03-22", 145],
["2007-03-23", 202],
["2007-03-24", 192],
["2007-03-25", 79],
["2007-03-26", 78],
["2007-03-27", 84],
["2007-03-28", 98],
["2007-03-29", 99],
["2007-03-30", 66],
["2007-03-31", 103],
["2007-04-01", 63],
["2007-04-02", 48],
["2007-04-03", 40],
["2007-04-04", 95],
["2007-04-05", 110],
["2007-04-06", 148],
["2007-04-07", 46],
["2007-04-08", 43],
["2007-04-09", 96],
["2007-04-10", 133],
["2007-04-11", 88],
["2007-04-12", 107],
["2007-04-13", 55],
["2007-04-14", 74],
["2007-04-15", 72],
["2007-04-16", 81],
["2007-04-17", 74],
["2007-04-18", 100],
["2007-04-19", 173],
["2007-04-20", 155],
["2007-04-21", 62],
["2007-04-22", 58],
["2007-04-23", 81],
["2007-04-24", 78],
["2007-04-25", 72],
["2007-04-26", 90],
["2007-04-27", 113],
["2007-04-28", 115],
["2007-04-29", 190],
["2007-04-30", 151],
["2007-05-01", 61],
["2007-05-02", 87],
["2007-05-03", 96],
["2007-05-04", 97],
["2007-05-05", 123],
["2007-05-06", 91],
["2007-05-07", 139],
["2007-05-08", 147],
["2007-05-09", 98],
["2007-05-10", 116],
["2007-05-11", 116],
["2007-05-12", 99],
["2007-05-13", 100],
["2007-05-14", 72],
["2007-05-15", 97],
["2007-05-16", 100],
["2007-05-17", 84],
["2007-05-18", 58],
["2007-05-19", 60],
["2007-05-20", 98],
["2007-05-21", 82],
["2007-05-22", 116],
["2007-05-23", 60],
["2007-05-24", 169],
["2007-05-25", 250],
["2007-05-26", 98],
["2007-05-27", 118],
["2007-05-28", 96],
["2007-05-29", 98],
["2007-05-30", 126],
["2007-05-31", 119],
["2007-06-01", 75],
["2007-06-02", 107],
["2007-06-03", 99],
["2007-06-04", 84],
["2007-06-05", 99],
["2007-06-06", 146],
["2007-06-07", 195],
["2007-06-08", 194],
["2007-06-09", 134],
["2007-06-10", 97],
["2007-06-11", 136],
["2007-06-12", 168],
["2007-06-13", 142],
["2007-06-14", 52],
["2007-06-15", 91],
["2007-06-16", 98],
["2007-06-17", 123],
["2007-06-18", 138],
["2007-06-19", 202],
["2007-06-20", 151],
["2007-06-21", 123],
["2007-06-22", 85],
["2007-06-23", 121],
["2007-06-24", 97],
["2007-06-25", 72],
["2007-06-26", 98],
["2007-06-27", 135],
["2007-06-28", 52],
["2007-06-29", 95],
["2007-06-30", 87],
["2007-07-01", 28],
["2007-07-02", 77],
["2007-07-03", 99],
["2007-07-04", 82],
["2007-07-06", 145],
["2007-07-07", 80],
["2007-07-08", 75],
["2007-07-09", 115],
["2007-07-10", 58],
["2007-07-11", 65],
["2007-07-12", 78],
["2007-07-13", 74],
["2007-07-14", 83],
["2007-07-15", 93],
["2007-07-16", 96],
["2007-07-17", 169],
["2007-07-18", 98],
["2007-07-19", 47],
["2007-07-20", 76],
["2007-07-21", 98],
["2007-07-22", 99],
["2007-07-23", 117],
["2007-07-24", 99],
["2007-07-25", 119],
["2007-07-26", 151],
["2007-07-27", 150],
["2007-07-28", 98],
["2007-07-29", 80],
["2007-07-30", 138],
["2007-07-31", 26],
["2007-08-01", 52],
["2007-08-02", 42],
["2007-08-03", 70],
["2007-08-04", 85],
["2007-08-05", 98],
["2007-08-06", 107],
["2007-08-07", 93],
["2007-08-08", 88],
["2007-08-09", 86],
["2007-08-10", 79],
["2007-08-11", 74],
["2007-08-12", 66],
["2007-08-13", 56],
["2007-08-14", 76],
["2007-08-15", 86],
["2007-08-16", 115],
["2007-08-17", 91],
["2007-08-18", 93],
["2007-08-19", 95],
["2007-08-20", 95],
["2007-08-21", 116],
["2007-08-22", 88],
["2007-08-23", 77],
["2007-08-24", 83],
["2007-08-25", 95],
["2007-08-26", 78],
["2007-08-27", 49],
["2007-08-28", 78],
["2007-08-29", 64],
["2007-08-30", 75],
["2007-08-31", 98],
["2007-09-01", 108],
["2007-09-02", 95],
["2007-09-03", 73],
["2007-09-04", 77],
["2007-09-05", 94],
["2007-09-06", 100],
["2007-09-07", 98],
["2007-09-08", 94],
["2007-09-09", 98],
["2007-09-10", 142],
["2007-09-11", 171],
["2007-09-12", 133],
["2007-09-13", 97],
["2007-09-14", 58],
["2007-09-15", 66],
["2007-09-16", 99],
["2007-09-17", 138],
["2007-09-18", 60],
["2007-09-19", 24],
["2007-09-20", 62],
["2007-09-21", 79],
["2007-09-22", 99],
["2007-09-23", 97],
["2007-09-24", 98],
["2007-09-25", 95],
["2007-09-26", 80],
["2007-09-27", 40],
["2007-09-28", 63],
["2007-09-29", 80],
["2007-09-30", 64],
["2007-10-01", 75],
["2007-10-02", 52],
["2007-10-03", 78],
["2007-10-04", 94],
["2007-10-05", 34],
["2007-10-06", 48],
["2007-10-07", 28],
["2007-10-08", 22],
["2007-10-09", 44],
["2007-10-10", 69],
["2007-10-11", 88],
["2007-10-12", 119],
["2007-10-13", 95],
["2007-10-14", 35],
["2007-10-15", 53],
["2007-10-16", 66],
["2007-10-17", 95],
["2007-10-18", 82],
["2007-10-19", 49],
["2007-10-20", 60],
["2007-10-21", 98],
["2007-10-22", 100],
["2007-10-23", 89],
["2007-10-24", 96],
["2007-10-25", 143],
["2007-10-26", 184],
["2007-10-27", 179],
["2007-10-28", 27],
["2007-10-29", 47],
["2007-10-30", 121],
["2007-10-31", 95],
["2007-11-01", 18],
["2007-11-02", 83],
["2007-11-03", 57],
["2007-11-04", 76],
["2007-11-05", 119],
["2007-11-06", 172],
["2007-11-07", 253],
["2007-11-08", 186],
["2007-11-09", 35],
["2007-11-10", 54],
["2007-11-11", 90],
["2007-11-12", 159],
["2007-11-13", 153],
["2007-11-14", 58],
["2007-11-15", 31],
["2007-11-16", 76],
["2007-11-17", 112],
["2007-11-18", 45],
["2007-11-19", 88],
["2007-11-20", 83],
["2007-11-21", 98],
["2007-11-22", 144],
["2007-11-23", 119],
["2007-11-24", 117],
["2007-11-25", 269],
["2007-11-26", 55],
["2007-11-27", 85],
["2007-11-28", 100],
["2007-11-29", 81],
["2007-11-30", 78],
["2007-12-01", 136],
["2007-12-02", 96],
["2007-12-03", 71],
["2007-12-04", 68],
["2007-12-05", 88],
["2007-12-06", 129],
["2007-12-07", 54],
["2007-12-08", 77],
["2007-12-09", 118],
["2007-12-11", 110],
["2007-12-12", 46],
["2007-12-13", 56],
["2007-12-14", 91],
["2007-12-15", 59],
["2007-12-16", 89],
["2007-12-17", 78],
["2007-12-18", 104],
["2007-12-19", 155],
["2007-12-20", 153],
["2007-12-21", 114],
["2007-12-22", 166],
["2007-12-23", 98],
["2007-12-24", 124],
["2007-12-25", 280],
["2007-12-26", 269],
["2007-12-27", 421],
["2007-12-28", 500],
["2007-12-29", 156],
["2007-12-30", 72],
["2007-12-31", 58],
["2008-01-01", 32],
["2008-01-02", 57],
["2008-01-03", 75],
["2008-01-04", 90],
["2008-01-05", 147],
["2008-01-06", 146],
["2008-01-07", 115],
["2008-01-08", 121],
["2008-01-09", 94],
["2008-01-10", 95],
["2008-01-11", 113],
["2008-01-12", 46],
["2008-01-13", 39],
["2008-01-14", 87],
["2008-01-15", 119],
["2008-01-16", 72],
["2008-01-17", 80],
["2008-01-18", 122],
["2008-01-19", 149],
["2008-01-20", 134],
["2008-01-21", 66],
["2008-01-22", 79],
["2008-01-23", 51],
["2008-01-24", 50],
["2008-01-25", 54],
["2008-01-26", 67],
["2008-01-27", 70],
["2008-01-28", 77],
["2008-01-29", 48],
["2008-01-30", 44],
["2008-01-31", 45],
["2008-02-01", 57],
["2008-02-02", 64],
["2008-02-03", 52],
["2008-02-04", 65],
["2008-02-05", 83],
["2008-02-06", 35],
["2008-02-08", 37],
["2008-02-09", 38],
["2008-02-10", 64],
["2008-02-11", 61],
["2008-02-12", 64],
["2008-02-13", 55],
["2008-02-14", 55],
["2008-02-15", 68],
["2008-02-16", 69],
["2008-02-17", 70],
["2008-02-18", 72],
["2008-02-19", 111],
["2008-02-20", 88],
["2008-02-21", 152],
["2008-02-22", 160],
["2008-02-23", 85],
["2008-02-25", 65],
["2008-02-26", 78],
["2008-02-27", 75],
["2008-02-28", 84],
["2008-02-29", 82],
["2008-03-01", 82],
["2008-03-02", 126],
["2008-03-03", 46],
["2008-03-04", 55],
["2008-03-05", 86],
["2008-03-06", 80],
["2008-03-08", 129],
["2008-03-09", 158],
["2008-03-10", 238],
["2008-03-11", 174],
["2008-03-12", 128],
["2008-03-13", 99],
["2008-03-14", 82],
["2008-03-15", 110],
["2008-03-16", 72],
["2008-03-17", 126],
["2008-03-18", 304],
["2008-03-19", 286],
["2008-03-20", 147],
["2008-03-21", 98],
["2008-03-22", 120],
["2008-03-23", 69],
["2008-03-24", 76],
["2008-03-25", 52],
["2008-03-26", 46],
["2008-03-27", 55],
["2008-03-28", 74],
["2008-03-29", 59],
["2008-03-30", 81],
["2008-03-31", 53],
["2008-04-01", 90],
["2008-04-02", 63],
["2008-04-03", 55],
["2008-04-04", 88],
["2008-04-05", 145],
["2008-04-06", 161],
["2008-04-07", 131],
["2008-04-08", 177],
["2008-04-09", 93],
["2008-04-10", 94],
["2008-04-11", 65],
["2008-04-12", 79],
["2008-04-13", 71],
["2008-04-14", 98],
["2008-04-15", 129],
["2008-04-16", 173],
["2008-04-17", 159],
["2008-04-18", 139],
["2008-04-19", 138],
["2008-04-20", 97],
["2008-04-21", 19],
["2008-04-22", 32],
["2008-04-23", 43],
["2008-04-24", 76],
["2008-04-25", 100],
["2008-04-26", 72],
["2008-04-27", 79],
["2008-04-28", 94],
["2008-04-29", 176],
["2008-04-30", 155],
["2008-05-01", 140],
["2008-05-02", 144],
["2008-05-03", 185],
["2008-05-04", 32],
["2008-05-05", 81],
["2008-05-06", 134],
["2008-05-07", 138],
["2008-05-08", 95],
["2008-05-09", 89],
["2008-05-10", 62],
["2008-05-11", 54],
["2008-05-12", 24],
["2008-05-13", 57],
["2008-05-14", 87],
["2008-05-15", 77],
["2008-05-16", 107],
["2008-05-17", 117],
["2008-05-18", 91],
["2008-05-19", 83],
["2008-05-20", 112],
["2008-05-21", 408],
["2008-05-22", 153],
["2008-05-23", 186],
["2008-05-24", 161],
["2008-05-25", 121],
["2008-05-26", 138],
["2008-05-27", 463],
["2008-05-28", 253],
["2008-05-29", 395],
["2008-05-30", 95],
["2008-05-31", 115],
["2008-06-01", 92],
["2008-06-02", 50],
["2008-06-03", 74],
["2008-06-05", 78],
["2008-06-06", 94],
["2008-06-07", 81],
["2008-06-08", 126],
["2008-06-09", 97],
["2008-06-10", 100],
["2008-06-11", 80],
["2008-06-12", 89],
["2008-06-13", 105],
["2008-06-14", 96],
["2008-06-15", 93],
["2008-06-16", 84],
["2008-06-17", 55],
["2008-06-18", 61],
["2008-06-19", 120],
["2008-06-20", 165],
["2008-06-21", 81],
["2008-06-22", 125],
["2008-06-23", 81],
["2008-06-24", 75],
["2008-06-25", 109],
["2008-06-26", 87],
["2008-06-27", 88],
["2008-06-28", 89],
["2008-06-30", 98],
["2008-07-01", 72],
["2008-07-02", 61],
["2008-07-03", 92],
["2008-07-04", 100],
["2008-07-05", 66],
["2008-07-06", 39],
["2008-07-07", 69],
["2008-07-08", 98],
["2008-07-09", 62],
["2008-07-10", 85],
["2008-07-11", 112],
["2008-07-12", 74],
["2008-07-13", 59],
["2008-07-14", 84],
["2008-07-15", 31],
["2008-07-16", 66],
["2008-07-17", 77],
["2008-07-18", 66],
["2008-07-19", 64],
["2008-07-20", 55],
["2008-07-21", 64],
["2008-07-22", 66],
["2008-07-23", 89],
["2008-07-24", 113],
["2008-07-25", 109],
["2008-07-26", 118],
["2008-07-27", 113],
["2008-07-28", 96],
["2008-07-29", 90],
["2008-07-30", 43],
["2008-07-31", 69],
["2008-08-01", 27],
["2008-08-02", 34],
["2008-08-03", 35],
["2008-08-04", 83],
["2008-08-05", 88],
["2008-08-06", 85],
["2008-08-07", 95],
["2008-08-08", 94],
["2008-08-09", 78],
["2008-08-10", 82],
["2008-08-11", 37],
["2008-08-12", 32],
["2008-08-13", 60],
["2008-08-14", 61],
["2008-08-15", 17],
["2008-08-16", 23],
["2008-08-16", 84],
["2008-08-17", 42],
["2008-08-18", 25],
["2008-08-19", 42],
["2008-08-20", 53],
["2008-08-21", 60],
["2008-08-22", 36],
["2008-08-23", 41],
["2008-08-24", 45],
["2008-08-25", 67],
["2008-08-26", 64],
["2008-08-27", 56],
["2008-08-28", 79],
["2008-08-29", 110],
["2008-08-30", 64],
["2008-08-31", 24],
["2008-09-01", 25],
["2008-09-02", 37],
["2008-09-03", 72],
["2008-09-04", 57],
["2008-09-05", 58],
["2008-09-06", 59],
["2008-09-07", 86],
["2008-09-08", 49],
["2008-09-09", 64],
["2008-09-10", 51],
["2008-09-11", 46],
["2008-09-12", 58],
["2008-09-13", 57],
["2008-09-14", 56],
["2008-09-15", 58],
["2008-09-16", 63],
["2008-09-17", 62],
["2008-09-19", 66],
["2008-09-20", 59],
["2008-09-21", 88],
["2008-09-22", 59],
["2008-09-23", 12],
["2008-09-24", 26],
["2008-09-25", 30],
["2008-09-26", 17],
["2008-09-28", 71],
["2008-09-29", 83],
["2008-09-30", 106],
["2008-10-01", 104],
["2008-10-02", 126],
["2008-10-03", 108],
["2008-10-04", 63],
["2008-10-05", 49],
["2008-10-06", 25],
["2008-10-07", 58],
["2008-10-08", 75],
["2008-10-09", 47],
["2008-10-10", 58],
["2008-10-11", 44],
["2008-10-12", 59],
["2008-10-13", 92],
["2008-10-14", 114],
["2008-10-15", 85],
["2008-10-16", 61],
["2008-10-17", 93],
["2008-10-18", 174],
["2008-10-19", 86],
["2008-10-20", 86],
["2008-10-21", 134],
["2008-10-22", 111],
["2008-10-23", 43],
["2008-10-24", 14],
["2008-10-25", 58],
["2008-10-26", 32],
["2008-10-27", 32],
["2008-10-28", 67],
["2008-10-29", 80],
["2008-10-30", 58],
["2008-10-31", 79],
["2008-11-01", 71],
["2008-11-02", 60],
["2008-11-03", 54],
["2008-11-04", 68],
["2008-11-05", 109],
["2008-11-06", 97],
["2008-11-07", 55],
["2008-11-08", 65],
["2008-11-09", 86],
["2008-11-10", 94],
["2008-11-11", 131],
["2008-11-12", 186],
["2008-11-13", 161],
["2008-11-14", 34],
["2008-11-15", 120],
["2008-11-16", 54],
["2008-11-17", 46],
["2008-11-18", 28],
["2008-11-19", 40],
["2008-11-20", 103],
["2008-11-21", 52],
["2008-11-22", 91],
["2008-11-23", 95],
["2008-11-24", 97],
["2008-11-25", 59],
["2008-11-26", 89],
["2008-11-27", 40],
["2008-11-28", 77],
["2008-11-29", 53],
["2008-11-30", 84],
["2008-12-01", 146],
["2008-12-02", 87],
["2008-12-03", 144],
["2008-12-04", 51],
["2008-12-05", 59],
["2008-12-06", 51],
["2008-12-07", 112],
["2008-12-08", 169],
["2008-12-09", 246],
["2008-12-10", 162],
["2008-12-11", 96],
["2008-12-12", 154],
["2008-12-13", 57],
["2008-12-14", 86],
["2008-12-15", 109],
["2008-12-16", 135],
["2008-12-17", 134],
["2008-12-18", 46],
["2008-12-19", 98],
["2008-12-20", 45],
["2008-12-21", 67],
["2008-12-22", 49],
["2008-12-23", 89],
["2008-12-24", 115],
["2008-12-25", 55],
["2008-12-26", 66],
["2008-12-27", 129],
["2008-12-28", 134],
["2008-12-29", 69],
["2008-12-30", 36],
["2008-12-31", 29],
["2009-01-01", 42],
["2009-01-02", 79],
["2009-01-03", 90],
["2009-01-04", 69],
["2009-01-05", 64],
["2009-01-06", 71],
["2009-01-07", 56],
["2009-01-08", 100],
["2009-01-09", 32],
["2009-01-10", 54],
["2009-01-11", 51],
["2009-01-12", 36],
["2009-01-13", 59],
["2009-01-14", 43],
["2009-01-15", 72],
["2009-01-16", 90],
["2009-01-17", 74],
["2009-01-18", 97],
["2009-01-19", 76],
["2009-01-20", 137],
["2009-01-21", 109],
["2009-01-22", 117],
["2009-01-23", 97],
["2009-01-24", 67],
["2009-01-25", 48],
["2009-01-26", 88],
["2009-01-27", 95],
["2009-01-28", 129],
["2009-01-29", 135],
["2009-01-30", 131],
["2009-01-31", 133],
["2009-02-01", 91],
["2009-02-02", 107],
["2009-02-03", 87],
["2009-02-04", 80],
["2009-02-05", 98],
["2009-02-06", 78],
["2009-02-07", 90],
["2009-02-08", 71],
["2009-02-09", 112],
["2009-02-10", 307],
["2009-02-11", 89],
["2009-02-12", 139],
["2009-02-13", 82],
["2009-02-14", 72],
["2009-02-15", 53],
["2009-02-16", 55],
["2009-02-17", 56],
["2009-02-19", 64],
["2009-02-20", 99],
["2009-02-21", 86],
["2009-02-22", 80],
["2009-02-23", 59],
["2009-02-24", 84],
["2009-02-25", 36],
["2009-02-26", 68],
["2009-02-27", 96],
["2009-02-28", 67],
["2009-03-01", 93],
["2009-03-02", 59],
["2009-03-03", 98],
["2009-03-04", 161],
["2009-03-05", 96],
["2009-03-06", 19],
["2009-03-07", 73],
["2009-03-08", 119],
["2009-03-09", 64],
["2009-03-10", 74],
["2009-03-11", 85],
["2009-03-12", 88],
["2009-03-13", 99],
["2009-03-14", 81],
["2009-03-15", 119],
["2009-03-16", 100],
["2009-03-17", 169],
["2009-03-18", 268],
["2009-03-19", 195],
["2009-03-20", 80],
["2009-03-21", 82],
["2009-03-22", 77],
["2009-03-23", 64],
["2009-03-24", 59],
["2009-03-25", 44],
["2009-03-26", 58],
["2009-03-27", 79],
["2009-03-28", 69],
["2009-03-29", 69],
["2009-03-30", 71],
["2009-03-31", 51],
["2009-04-01", 27],
["2009-04-02", 72],
["2009-04-03", 91],
["2009-04-04", 96],
["2009-04-05", 72],
["2009-04-06", 53],
["2009-04-07", 94],
["2009-04-08", 140],
["2009-04-09", 117],
["2009-04-10", 115],
["2009-04-11", 113],
["2009-04-12", 122],
["2009-04-13", 148],
["2009-04-14", 75],
["2009-04-15", 81],
["2009-04-16", 69],
["2009-04-17", 84],
["2009-04-18", 116],
["2009-04-19", 97],
["2009-04-20", 63],
["2009-04-21", 34],
["2009-04-22", 59],
["2009-04-23", 70],
["2009-04-24", 77],
["2009-04-25", 54],
["2009-04-26", 34],
["2009-04-27", 57],
["2009-04-28", 78],
["2009-04-29", 73],
["2009-04-30", 95],
["2009-05-01", 95],
["2009-05-02", 54],
["2009-05-03", 82],
["2009-05-04", 96],
["2009-05-05", 106],
["2009-05-06", 100],
["2009-05-07", 109],
["2009-05-08", 125],
["2009-05-09", 106],
["2009-05-10", 57],
["2009-05-11", 72],
["2009-05-12", 75],
["2009-05-13", 63],
["2009-05-14", 91],
["2009-05-15", 64],
["2009-05-16", 81],
["2009-05-17", 78],
["2009-05-18", 90],
["2009-05-19", 97],
["2009-05-20", 98],
["2009-05-21", 85],
["2009-05-22", 27],
["2009-05-23", 65],
["2009-05-24", 95],
["2009-05-25", 128],
["2009-05-26", 81],
["2009-05-27", 105],
["2009-05-28", 94],
["2009-05-29", 59],
["2009-05-30", 45],
["2009-05-31", 56],
["2009-06-01", 79],
["2009-06-02", 55],
["2009-06-03", 61],
["2009-06-04", 71],
["2009-06-05", 68],
["2009-06-06", 67],
["2009-06-07", 63],
["2009-06-08", 77],
["2009-06-09", 34],
["2009-06-10", 21],
["2009-06-11", 66],
["2009-06-12", 60],
["2009-06-13", 58],
["2009-06-14", 61],
["2009-06-15", 70],
["2009-06-16", 89],
["2009-06-17", 75],
["2009-06-18", 104],
["2009-06-20", 165],
["2009-06-21", 98],
["2009-06-22", 42],
["2009-06-23", 60],
["2009-06-24", 67],
["2009-06-25", 81],
["2009-06-26", 104],
["2009-06-27", 116],
["2009-06-28", 96],
["2009-06-29", 90],
["2009-06-30", 48],
["2009-07-01", 30],
["2009-07-02", 51],
["2009-07-03", 73],
["2009-07-04", 103],
["2009-07-05", 110],
["2009-07-06", 70],
["2009-07-07", 93],
["2009-07-08", 85],
["2009-07-09", 48],
["2009-07-10", 79],
["2009-07-11", 94],
["2009-07-12", 72],
["2009-07-13", 104],
["2009-07-14", 57],
["2009-07-15", 71],
["2009-07-16", 100],
["2009-07-17", 60],
["2009-07-18", 45],
["2009-07-19", 74],
["2009-07-20", 69],
["2009-07-21", 60],
["2009-07-22", 101],
["2009-07-23", 64],
["2009-07-24", 36],
["2009-07-25", 29],
["2009-07-26", 59],
["2009-07-27", 81],
["2009-07-28", 79],
["2009-07-29", 107],
["2009-07-30", 109],
["2009-07-31", 71],
["2009-08-01", 89],
["2009-08-02", 59],
["2009-08-03", 75],
["2009-08-04", 97],
["2009-08-05", 74],
["2009-08-06", 58],
["2009-08-07", 74],
["2009-08-08", 75],
["2009-08-09", 81],
["2009-08-10", 60],
["2009-08-11", 75],
["2009-08-12", 68],
["2009-08-13", 82],
["2009-08-14", 123],
["2009-08-15", 115],
["2009-08-16", 113],
["2009-08-17", 63],
["2009-08-18", 76],
["2009-08-19", 77],
["2009-08-20", 38],
["2009-08-21", 62],
["2009-08-22", 58],
["2009-08-23", 71],
["2009-08-24", 97],
["2009-08-25", 90],
["2009-08-26", 97],
["2009-08-27", 69],
["2009-08-28", 36],
["2009-08-29", 61],
["2009-08-30", 69],
["2009-08-31", 78],
["2009-09-01", 88],
["2009-09-02", 98],
["2009-09-03", 109],
["2009-09-04", 99],
["2009-09-05", 92],
["2009-09-06", 32],
["2009-09-07", 20],
["2009-09-08", 51],
["2009-09-09", 66],
["2009-09-10", 77],
["2009-09-11", 69],
["2009-09-12", 42],
["2009-09-13", 65],
["2009-09-14", 91],
["2009-09-15", 72],
["2009-09-16", 93],
["2009-09-17", 117],
["2009-09-18", 121],
["2009-09-19", 75],
["2009-09-20", 101],
["2009-09-21", 111],
["2009-09-22", 79],
["2009-09-23", 90],
["2009-09-24", 108],
["2009-09-25", 130],
["2009-09-26", 98],
["2009-09-27", 66],
["2009-09-28", 74],
["2009-09-29", 97],
["2009-09-30", 112],
["2009-10-01", 88],
["2009-10-02", 22],
["2009-10-03", 29],
["2009-10-04", 44],
["2009-10-05", 69],
["2009-10-06", 83],
["2009-10-07", 74],
["2009-10-08", 72],
["2009-10-09", 88],
["2009-10-10", 73],
["2009-10-11", 94],
["2009-10-12", 108],
["2009-10-13", 37],
["2009-10-14", 42],
["2009-10-15", 72],
["2009-10-16", 114],
["2009-10-17", 57],
["2009-10-18", 92],
["2009-10-19", 90],
["2009-10-20", 77],
["2009-10-21", 76],
["2009-10-22", 100],
["2009-10-23", 111],
["2009-10-24", 141],
["2009-10-25", 147],
["2009-10-26", 77],
["2009-10-27", 68],
["2009-10-28", 100],
["2009-10-29", 137],
["2009-10-30", 120],
["2009-10-31", 51],
["2009-11-01", 48],
["2009-11-02", 12],
["2009-11-03", 66],
["2009-11-04", 111],
["2009-11-05", 136],
["2009-11-06", 186],
["2009-11-07", 276],
["2009-11-08", 259],
["2009-11-09", 84],
["2009-11-10", 20],
["2009-11-11", 34],
["2009-11-12", 53],
["2009-11-13", 59],
["2009-11-14", 53],
["2009-11-15", 26],
["2009-11-16", 29],
["2009-11-17", 35],
["2009-11-18", 66],
["2009-11-19", 47],
["2009-11-20", 74],
["2009-11-21", 63],
["2009-11-22", 121],
["2009-11-23", 149],
["2009-11-24", 184],
["2009-11-25", 79],
["2009-11-26", 107],
["2009-11-27", 132],
["2009-11-28", 99],
["2009-11-29", 167],
["2009-11-30", 117],
["2009-12-01", 86],
["2009-12-02", 133],
["2009-12-03", 36],
["2009-12-04", 99],
["2009-12-05", 62],
["2009-12-06", 94],
["2009-12-07", 141],
["2009-12-08", 186],
["2009-12-10", 167],
["2009-12-11", 147],
["2009-12-12", 31],
["2009-12-13", 80],
["2009-12-14", 96],
["2009-12-15", 49],
["2009-12-16", 55],
["2009-12-17", 45],
["2009-12-18", 42],
["2009-12-19", 44],
["2009-12-20", 48],
["2009-12-21", 63],
["2009-12-22", 94],
["2009-12-23", 93],
["2009-12-24", 133],
["2009-12-25", 500],
["2009-12-26", 96],
["2009-12-27", 94],
["2009-12-28", 89],
["2009-12-29", 160],
["2009-12-30", 55],
["2009-12-31", 55],
["2010-01-01", 91],
["2010-01-02", 105],
["2010-01-03", 90],
["2010-01-04", 49],
["2010-01-05", 47],
["2010-01-06", 59],
["2010-01-07", 64],
["2010-01-08", 80],
["2010-01-09", 100],
["2010-01-10", 60],
["2010-01-11", 52],
["2010-01-12", 30],
["2010-01-13", 54],
["2010-01-14", 76],
["2010-01-15", 58],
["2010-01-16", 85],
["2010-01-17", 124],
["2010-01-18", 143],
["2010-01-19", 183],
["2010-01-20", 140],
["2010-01-21", 24],
["2010-01-22", 57],
["2010-01-23", 78],
["2010-01-24", 66],
["2010-01-25", 99],
["2010-01-26", 76],
["2010-01-27", 128],
["2010-01-28", 63],
["2010-01-29", 43],
["2010-01-30", 58],
["2010-01-31", 56],
["2010-02-01", 65],
["2010-02-02", 61],
["2010-02-03", 54],
["2010-02-04", 54],
["2010-02-05", 63],
["2010-02-06", 70],
["2010-02-07", 61],
["2010-02-08", 87],
["2010-02-09", 109],
["2010-02-10", 50],
["2010-02-11", 23],
["2010-02-12", 31],
["2010-02-14", 137],
["2010-02-15", 38],
["2010-02-16", 52],
["2010-02-17", 94],
["2010-02-18", 58],
["2010-02-19", 98],
["2010-02-20", 87],
["2010-02-21", 118],
["2010-02-22", 82],
["2010-02-23", 92],
["2010-02-24", 152],
["2010-02-25", 153],
["2010-02-26", 76],
["2010-02-27", 65],
["2010-02-28", 80],
["2010-03-01", 56],
["2010-03-02", 72],
["2010-03-03", 113],
["2010-03-04", 140],
["2010-03-05", 97],
["2010-03-06", 27],
["2010-03-07", 71],
["2010-03-08", 68],
["2010-03-09", 25],
["2010-03-10", 56],
["2010-03-11", 89],
["2010-03-12", 98],
["2010-03-13", 76],
["2010-03-14", 90],
["2010-03-16", 77],
["2010-03-17", 66],
["2010-03-18", 76],
["2010-03-19", 145],
["2010-03-20", 500],
["2010-03-21", 136],
["2010-03-22", 245],
["2010-03-23", 157],
["2010-03-24", 92],
["2010-03-25", 60],
["2010-03-26", 83],
["2010-03-27", 110],
["2010-03-28", 82],
["2010-03-29", 100],
["2010-03-30", 159],
["2010-03-31", 94],
["2010-04-01", 99],
["2010-04-02", 63],
["2010-04-03", 73],
["2010-04-04", 147],
["2010-04-05", 125],
["2010-04-06", 56],
["2010-04-07", 77],
["2010-04-08", 147],
["2010-04-09", 163],
["2010-04-10", 69],
["2010-04-11", 77],
["2010-04-12", 66],
["2010-04-13", 61],
["2010-04-14", 59],
["2010-04-15", 93],
["2010-04-16", 147],
["2010-04-17", 94],
["2010-04-18", 109],
["2010-04-19", 150],
["2010-04-20", 74],
["2010-04-21", 60],
["2010-04-22", 31],
["2010-04-23", 40],
["2010-04-24", 72],
["2010-04-25", 100],
["2010-04-26", 45],
["2010-04-27", 50],
["2010-04-28", 52],
["2010-04-29", 46],
["2010-04-30", 54],
["2010-05-01", 90],
["2010-05-02", 116],
["2010-05-03", 97],
["2010-05-04", 149],
["2010-05-05", 119],
["2010-05-06", 17],
["2010-05-07", 86],
["2010-05-08", 145],
["2010-05-09", 144],
["2010-05-10", 146],
["2010-05-11", 58],
["2010-05-12", 59],
["2010-05-13", 78],
["2010-05-14", 95],
["2010-05-15", 133],
["2010-05-16", 121],
["2010-05-17", 52],
["2010-05-18", 53],
["2010-05-19", 61],
["2010-05-20", 75],
["2010-05-22", 127],
["2010-05-23", 122],
["2010-05-24", 91],
["2010-05-25", 46],
["2010-05-26", 76],
["2010-05-27", 82],
["2010-05-28", 63],
["2010-05-29", 84],
["2010-05-30", 39],
["2010-05-31", 58],
["2010-06-01", 69],
["2010-06-02", 68],
["2010-06-03", 83],
["2010-06-04", 88],
["2010-06-05", 96],
["2010-06-06", 114],
["2010-06-07", 118],
["2010-06-08", 98],
["2010-06-09", 86],
["2010-06-10", 64],
["2010-06-11", 58],
["2010-06-12", 81],
["2010-06-13", 82],
["2010-06-14", 66],
["2010-06-15", 95],
["2010-06-16", 77],
["2010-06-17", 56],
["2010-06-18", 47],
["2010-06-19", 77],
["2010-06-20", 71],
["2010-06-21", 71],
["2010-06-23", 77],
["2010-06-24", 83],
["2010-06-25", 99],
["2010-06-26", 112],
["2010-06-27", 93],
["2010-06-28", 94],
["2010-06-29", 123],
["2010-06-30", 100],
["2010-07-01", 118],
["2010-07-02", 40],
["2010-07-03", 63],
["2010-07-04", 86],
["2010-07-05", 66],
["2010-07-06", 54],
["2010-07-07", 73],
["2010-07-08", 80],
["2010-07-09", 74],
["2010-07-10", 59],
["2010-07-11", 68],
["2010-07-12", 73],
["2010-07-13", 84],
["2010-07-14", 78],
["2010-07-15", 89],
["2010-07-16", 115],
["2010-07-17", 84],
["2010-07-18", 87],
["2010-07-19", 121],
["2010-07-21", 63],
["2010-07-22", 90],
["2010-07-23", 123],
["2010-07-24", 88],
["2010-07-25", 100],
["2010-07-26", 121],
["2010-07-27", 139],
["2010-07-28", 100],
["2010-07-29", 119],
["2010-07-30", 113],
["2010-07-31", 92],
["2010-08-01", 48],
["2010-08-02", 68],
["2010-08-03", 83],
["2010-08-04", 98],
["2010-08-05", 26],
["2010-08-06", 31],
["2010-08-07", 71],
["2010-08-08", 57],
["2010-08-09", 94],
["2010-08-10", 90],
["2010-08-11", 94],
["2010-08-12", 64],
["2010-08-13", 83],
["2010-08-13", 83],
["2010-08-14", 84],
["2010-08-15", 57],
["2010-08-16", 66],
["2010-08-17", 94],
["2010-08-18", 137],
["2010-08-19", 73],
["2010-08-20", 76],
["2010-08-21", 56],
["2010-08-22", 23],
["2010-08-23", 54],
["2010-08-24", 87],
["2010-08-25", 65],
["2010-08-26", 66],
["2010-08-27", 52],
["2010-08-28", 55],
["2010-08-29", 76],
["2010-08-30", 79],
["2010-08-31", 78],
["2010-09-01", 67],
["2010-09-02", 54],
["2010-09-03", 73],
["2010-09-04", 64],
["2010-09-05", 80],
["2010-09-06", 87],
["2010-09-07", 95],
["2010-09-08", 67],
["2010-09-09", 89],
["2010-09-10", 75],
["2010-09-11", 49],
["2010-09-12", 67],
["2010-09-13", 84],
["2010-09-14", 97],
["2010-09-15", 134],
["2010-09-16", 122],
["2010-09-17", 62],
["2010-09-18", 19],
["2010-09-19", 50],
["2010-09-20", 60],
["2010-09-21", 23],
["2010-09-22", 24],
["2010-09-23", 52],
["2010-09-24", 72],
["2010-09-25", 93],
["2010-09-26", 84],
["2010-09-27", 57],
["2010-09-28", 32],
["2010-09-29", 65],
["2010-09-30", 92],
["2010-10-01", 125],
["2010-10-02", 88],
["2010-10-03", 17],
["2010-10-04", 36],
["2010-10-05", 63],
["2010-10-06", 95],
["2010-10-07", 186],
["2010-10-08", 192],
["2010-10-09", 177],
["2010-10-10", 202],
["2010-10-11", 70],
["2010-10-12", 27],
["2010-10-13", 65],
["2010-10-14", 58],
["2010-10-15", 30],
["2010-10-16", 80],
["2010-10-17", 65],
["2010-10-18", 80],
["2010-10-19", 50],
["2010-10-20", 66],
["2010-10-21", 83],
["2010-10-22", 95],
["2010-10-23", 103],
["2010-10-24", 96],
["2010-10-25", 17],
["2010-10-26", 15],
["2010-10-27", 63],
["2010-10-28", 92],
["2010-10-29", 67],
["2010-10-30", 62],
["2010-10-31", 70],
["2010-11-01", 65],
["2010-11-02", 36],
["2010-11-03", 86],
["2010-11-04", 81],
["2010-11-05", 86],
["2010-11-06", 107],
["2010-11-07", 142],
["2010-11-08", 34],
["2010-11-09", 34],
["2010-11-10", 85],
["2010-11-11", 139],
["2010-11-12", 51],
["2010-11-13", 66],
["2010-11-14", 39],
["2010-11-15", 34],
["2010-11-16", 96],
["2010-11-17", 122],
["2010-11-18", 243],
["2010-11-19", 313],
["2010-11-20", 165],
["2010-11-21", 192],
["2010-11-22", 37],
["2010-11-23", 100],
["2010-11-24", 141],
["2010-11-25", 42],
["2010-11-26", 88],
["2010-11-27", 130],
["2010-11-28", 72],
["2010-11-29", 143],
["2010-11-30", 132],
["2010-12-01", 177],
["2010-12-02", 199],
["2010-12-03", 52],
["2010-12-04", 97],
["2010-12-05", 125],
["2010-12-06", 37],
["2010-12-07", 65],
["2010-12-08", 81],
["2010-12-09", 97],
["2010-12-10", 176],
["2010-12-11", 50],
["2010-12-12", 85],
["2010-12-13", 72],
["2010-12-14", 31],
["2010-12-15", 53],
["2010-12-16", 92],
["2010-12-17", 105],
["2010-12-18", 156],
["2010-12-19", 182],
["2010-12-20", 100],
["2010-12-21", 165],
["2010-12-22", 222],
["2010-12-23", 30],
["2010-12-24", 40],
["2010-12-25", 57],
["2010-12-26", 66],
["2010-12-27", 82],
["2010-12-28", 70],
["2010-12-29", 63],
["2010-12-30", 67],
["2010-12-31", 47],
["2011-01-01", 34],
["2011-01-02", 41],
["2011-01-03", 82],
["2011-01-04", 96],
["2011-01-05", 55],
["2011-01-06", 35],
["2011-01-07", 36],
["2011-01-08", 78],
["2011-01-09", 35],
["2011-01-10", 34],
["2011-01-11", 67],
["2011-01-12", 49],
["2011-01-13", 90],
["2011-01-14", 73],
["2011-01-16", 35],
["2011-01-17", 62],
["2011-01-18", 30],
["2011-01-19", 39],
["2011-01-20", 36],
["2011-01-21", 61],
["2011-01-22", 76],
["2011-01-23", 50],
["2011-01-24", 35],
["2011-01-25", 61],
["2011-01-26", 41],
["2011-01-27", 59],
["2011-01-28", 41],
["2011-01-29", 30],
["2011-01-30", 25],
["2011-01-31", 48],
["2011-02-01", 53],
["2011-02-02", 58],
["2011-02-03", 83],
["2011-02-04", 111],
["2011-02-05", 75],
["2011-02-06", 84],
["2011-02-07", 77],
["2011-02-09", 83],
["2011-02-10", 58],
["2011-02-11", 58],
["2011-02-12", 21],
["2011-02-13", 53],
["2011-02-14", 41],
["2011-02-15", 74],
["2011-02-16", 146],
["2011-02-17", 132],
["2011-02-18", 115],
["2011-02-19", 112],
["2011-02-20", 100],
["2011-02-21", 333],
["2011-02-22", 270],
["2011-02-23", 208],
["2011-02-25", 56],
["2011-02-26", 56],
["2011-02-27", 60],
["2011-02-28", 30],
["2011-03-01", 21],
["2011-03-02", 33],
["2011-03-03", 34],
["2011-03-04", 59],
["2011-03-05", 77],
["2011-03-06", 65],
["2011-03-07", 26],
["2011-03-08", 41],
["2011-03-09", 33],
["2011-03-10", 64],
["2011-03-11", 58],
["2011-03-12", 135],
["2011-03-13", 197],
["2011-03-14", 54],
["2011-03-15", 56],
["2011-03-16", 72],
["2011-03-17", 98],
["2011-03-18", 161],
["2011-03-19", 123],
["2011-03-20", 250],
["2011-03-21", 121],
["2011-03-22", 67],
["2011-03-23", 51],
["2011-03-24", 51],
["2011-03-25", 48],
["2011-03-26", 78],
["2011-03-27", 41],
["2011-03-28", 71],
["2011-03-29", 86],
["2011-03-30", 98],
["2011-03-31", 140],
["2011-04-01", 137],
["2011-04-02", 38],
["2011-04-03", 59],
["2011-04-04", 67],
["2011-04-05", 88],
["2011-04-06", 95],
["2011-04-07", 96],
["2011-04-08", 70],
["2011-04-09", 108],
["2011-04-10", 142],
["2011-04-11", 53],
["2011-04-12", 88],
["2011-04-13", 157],
["2011-04-14", 138],
["2011-04-15", 98],
["2011-04-16", 128],
["2011-04-17", 164],
["2011-04-18", 99],
["2011-04-19", 83],
["2011-04-20", 127],
["2011-04-21", 154],
["2011-04-22", 44],
["2011-04-23", 49],
["2011-04-24", 26],
["2011-04-25", 76],
["2011-04-26", 111],
["2011-04-27", 60],
["2011-04-28", 76],
["2011-04-29", 119],
["2011-04-30", 141],
["2011-05-01", 500],
["2011-05-02", 85],
["2011-05-03", 60],
["2011-05-04", 79],
["2011-05-05", 87],
["2011-05-06", 99],
["2011-05-07", 57],
["2011-05-08", 74],
["2011-05-09", 53],
["2011-05-10", 50],
["2011-05-11", 80],
["2011-05-12", 197],
["2011-05-13", 52],
["2011-05-14", 70],
["2011-05-15", 76],
["2011-05-16", 90],
["2011-05-17", 91],
["2011-05-18", 155],
["2011-05-19", 64],
["2011-05-20", 59],
["2011-05-21", 54],
["2011-05-22", 83],
["2011-05-23", 98],
["2011-05-24", 94],
["2011-05-25", 75],
["2011-05-26", 86],
["2011-05-27", 65],
["2011-05-28", 102],
["2011-05-29", 98],
["2011-05-30", 75],
["2011-05-31", 47],
["2011-06-01", 28],
["2011-06-02", 75],
["2011-06-03", 75],
["2011-06-04", 66],
["2011-06-05", 79],
["2011-06-06", 83],
["2011-06-07", 98],
["2011-06-08", 51],
["2011-06-10", 105],
["2011-06-11", 75],
["2011-06-12", 28],
["2011-06-13", 71],
["2011-06-14", 99],
["2011-06-15", 107],
["2011-06-16", 77],
["2011-06-17", 81],
["2011-06-18", 97],
["2011-06-19", 119],
["2011-06-20", 122],
["2011-06-21", 130],
["2011-06-22", 128],
["2011-06-23", 123],
["2011-06-24", 24],
["2011-06-25", 38],
["2011-06-26", 57],
["2011-06-27", 56],
["2011-06-28", 90],
["2011-06-29", 129],
["2011-06-30", 99],
["2011-07-01", 94],
["2011-07-02", 71],
["2011-07-03", 71],
["2011-07-04", 55],
["2011-07-05", 80],
["2011-07-06", 115],
["2011-07-07", 73],
["2011-07-08", 42],
["2011-07-09", 37],
["2011-07-10", 75],
["2011-07-11", 112],
["2011-07-12", 88],
["2011-07-13", 83],
["2011-07-14", 83],
["2011-07-15", 65],
["2011-07-16", 65],
["2011-07-17", 67],
["2011-07-18", 65],
["2011-07-19", 83],
["2011-07-20", 42],
["2011-07-21", 53],
["2011-07-22", 71],
["2011-07-23", 148],
["2011-07-24", 159],
["2011-07-25", 19],
["2011-07-26", 28],
["2011-07-27", 52],
["2011-07-28", 92],
["2011-07-29", 113],
["2011-07-30", 21],
["2011-07-31", 54],
["2011-08-01", 78],
["2011-08-02", 94],
["2011-08-03", 69],
["2011-08-04", 82],
["2011-08-05", 98],
["2011-08-06", 91],
["2011-08-07", 74],
["2011-08-08", 77],
["2011-08-09", 108],
["2011-08-10", 58],
["2011-08-11", 68],
["2011-08-12", 90],
["2011-08-13", 93],
["2011-08-14", 78],
["2011-08-15", 73],
["2011-08-16", 29],
["2011-08-17", 58],
["2011-08-18", 28],
["2011-08-19", 65],
["2011-08-20", 72],
["2011-08-21", 80],
["2011-08-22", 78],
["2011-08-23", 88],
["2011-08-24", 95],
["2011-08-25", 80],
["2011-08-26", 61],
["2011-08-27", 63],
["2011-08-28", 65],
["2011-08-29", 80],
["2011-08-30", 99],
["2011-08-31", 117],
["2011-09-01", 89],
["2011-09-02", 54],
["2011-09-03", 69],
["2011-09-04", 77],
["2011-09-05", 76],
["2011-09-06", 76],
["2011-09-07", 126],
["2011-09-08", 48],
["2011-09-09", 39],
["2011-09-10", 35],
["2011-09-11", 24],
["2011-09-12", 61],
["2011-09-13", 81],
["2011-09-14", 87],
["2011-09-15", 93],
["2011-09-16", 52],
["2011-09-17", 22],
["2011-09-18", 35],
["2011-09-19", 45],
["2011-09-20", 50],
["2011-09-21", 52],
["2011-09-22", 58],
["2011-09-24", 96],
["2011-09-25", 125],
["2011-09-26", 160],
["2011-09-27", 121],
["2011-09-28", 128],
["2011-09-29", 94],
["2011-09-30", 30],
["2011-10-01", 56],
["2011-10-02", 33],
["2011-10-03", 47],
["2011-10-04", 79],
["2011-10-05", 157],
["2011-10-06", 61],
["2011-10-07", 84],
["2011-10-08", 106],
["2011-10-09", 159],
["2011-10-10", 137],
["2011-10-11", 87],
["2011-10-12", 130],
["2011-10-13", 98],
["2011-10-14", 32],
["2011-10-15", 33],
["2011-10-16", 31],
["2011-10-17", 35],
["2011-10-18", 72],
["2011-10-19", 87],
["2011-10-20", 149],
["2011-10-21", 146],
["2011-10-22", 139],
["2011-10-23", 155],
["2011-10-24", 19],
["2011-10-25", 28],
["2011-10-26", 78],
["2011-10-27", 129],
["2011-10-29", 97],
["2011-10-30", 147],
["2011-10-31", 131],
["2011-11-01", 128],
["2011-11-02", 53],
["2011-11-03", 68],
["2011-11-04", 82],
["2011-11-05", 60],
["2011-11-06", 52],
["2011-11-07", 63],
["2011-11-08", 73],
["2011-11-09", 49],
["2011-11-10", 60],
["2011-11-11", 84],
["2011-11-12", 99],
["2011-11-13", 65],
["2011-11-14", 73],
["2011-11-15", 124],
["2011-11-16", 128],
["2011-11-17", 97],
["2011-11-18", 62],
["2011-11-19", 36],
["2011-11-20", 27],
["2011-11-21", 80],
["2011-11-22", 131],
["2011-11-23", 40],
["2011-11-24", 68],
["2011-11-25", 120],
["2011-11-26", 142],
["2011-11-27", 135],
["2011-11-28", 109],
["2011-11-29", 66],
["2011-11-30", 81],
["2011-12-01", 71],
["2011-12-02", 144],
["2011-12-03", 97],
["2011-12-04", 80],
["2011-12-05", 193],
["2011-12-06", 131],
["2011-12-07", 111],
["2011-12-08", 17],
["2011-12-09", 19],
["2011-12-10", 23],
["2011-12-11", 77],
["2011-12-12", 56],
["2011-12-13", 76],
["2011-12-14", 84],
["2011-12-15", 19],
["2011-12-16", 27],
["2011-12-17", 63],
["2011-12-18", 63],
["2011-12-19", 53],
["2011-12-20", 70],
["2011-12-21", 67],
["2011-12-22", 31],
["2011-12-23", 61],
["2011-12-24", 27],
["2011-12-25", 59],
["2011-12-26", 69],
["2011-12-27", 100],
["2011-12-28", 114],
["2011-12-29", 81],
["2011-12-30", 75],
["2011-12-31", 109],
["2012-01-01", 81],
["2012-01-02", 74],
["2012-01-03", 35],
["2012-01-04", 30],
["2012-01-05", 63],
["2012-01-06", 95],
["2012-01-07", 65],
["2012-01-08", 89],
["2012-01-09", 102],
["2012-01-10", 161],
["2012-01-11", 25],
["2012-01-12", 86],
["2012-01-13", 79],
["2012-01-14", 60],
["2012-01-15", 70],
["2012-01-16", 106],
["2012-01-17", 111],
["2012-01-18", 193],
["2012-01-19", 269],
["2012-01-20", 131],
["2012-01-21", 21],
["2012-01-22", 23],
["2012-01-23", 149],
["2012-01-24", 49],
["2012-01-25", 45],
["2012-01-26", 78],
["2012-01-27", 67],
["2012-01-28", 74],
["2012-01-29", 62],
["2012-01-30", 66],
["2012-01-31", 92],
["2012-02-01", 30],
["2012-02-02", 26],
["2012-02-03", 60],
["2012-02-04", 52],
["2012-02-05", 84],
["2012-02-06", 112],
["2012-02-07", 64],
["2012-02-08", 34],
["2012-02-09", 58],
["2012-02-10", 49],
["2012-02-11", 73],
["2012-02-12", 75],
["2012-02-13", 100],
["2012-02-14", 125],
["2012-02-15", 62],
["2012-02-16", 61],
["2012-02-17", 34],
["2012-02-18", 29],
["2012-02-19", 68],
["2012-02-20", 73],
["2012-02-21", 118],
["2012-02-22", 118],
["2012-02-23", 73],
["2012-02-24", 73],
["2012-02-25", 57],
["2012-02-26", 57],
["2012-02-27", 95],
["2012-02-28", 152],
["2012-02-29", 118],
["2012-03-01", 142],
["2012-03-02", 111],
["2012-03-03", 68],
["2012-03-04", 90],
["2012-03-05", 97],
["2012-03-06", 63],
["2012-03-07", 38],
["2012-03-08", 31],
["2012-03-09", 65],
["2012-03-10", 78],
["2012-03-11", 36],
["2012-03-12", 62],
["2012-03-13", 104],
["2012-03-14", 57],
["2012-03-15", 64],
["2012-03-16", 109],
["2012-03-17", 144],
["2012-03-18", 61],
["2012-03-19", 57],
["2012-03-20", 81],
["2012-03-21", 105],
["2012-03-22", 146],
["2012-03-23", 55],
["2012-03-24", 56],
["2012-03-25", 30],
["2012-03-26", 90],
["2012-03-27", 112],
["2012-03-28", 65],
["2012-03-29", 90],
["2012-03-30", 76],
["2012-03-31", 159],
["2012-04-01", 78],
["2012-04-02", 103],
["2012-04-03", 73],
["2012-04-03", 73],
["2012-04-04", 73],
["2012-04-05", 64],
["2012-04-06", 70],
["2012-04-07", 71],
["2012-04-08", 119],
["2012-04-09", 118],
["2012-04-10", 138],
["2012-04-11", 41],
["2012-04-12", 69],
["2012-04-13", 81],
["2012-04-14", 100],
["2012-04-15", 109],
["2012-04-16", 84],
["2012-04-17", 100],
["2012-04-18", 140],
["2012-04-19", 98],
["2012-04-20", 133],
["2012-04-21", 81],
["2012-04-22", 102],
["2012-04-23", 140],
["2012-04-24", 133],
["2012-04-25", 32],
["2012-04-26", 60],
["2012-04-27", 147],
["2012-04-28", 164],
["2012-04-29", 473],
["2012-04-30", 268],
["2012-05-01", 208],
["2012-05-02", 111],
["2012-05-03", 106],
["2012-05-04", 100],
["2012-05-05", 99],
["2012-05-06", 100],
["2012-05-07", 100],
["2012-05-08", 111],
["2012-05-09", 107],
["2012-05-10", 129],
["2012-05-11", 133],
["2012-05-12", 90],
["2012-05-13", 96],
["2012-05-14", 64],
["2012-05-15", 58],
["2012-05-16", 58],
["2012-05-17", 78],
["2012-05-18", 84],
["2012-05-19", 143],
["2012-05-20", 85],
["2012-05-21", 97],
["2012-05-22", 109],
["2012-05-23", 64],
["2012-05-24", 69],
["2012-05-25", 63],
["2012-05-26", 90],
["2012-05-27", 88],
["2012-05-28", 133],
["2012-05-29", 116],
["2012-05-30", 29],
["2012-05-31", 64],
["2012-06-01", 54],
["2012-06-02", 90],
["2012-06-03", 112],
["2012-06-04", 80],
["2012-06-05", 65],
["2012-06-06", 98],
["2012-06-07", 71],
["2012-06-08", 77],
["2012-06-09", 91],
["2012-06-10", 32],
["2012-06-11", 50],
["2012-06-12", 58],
["2012-06-13", 62],
["2012-06-14", 50],
["2012-06-15", 22],
["2012-06-16", 33],
["2012-06-17", 69],
["2012-06-18", 137],
["2012-06-19", 132],
["2012-06-20", 105],
["2012-06-21", 112],
["2012-06-22", 84],
["2012-06-23", 81],
["2012-06-24", 95],
["2012-06-25", 49],
["2012-06-26", 65],
["2012-06-27", 55],
["2012-06-28", 54],
["2012-06-29", 60],
["2012-06-30", 46],
["2012-07-01", 70],
["2012-07-02", 69],
["2012-07-03", 59],
["2012-07-04", 71],
["2012-07-05", 70],
["2012-07-06", 59],
["2012-07-07", 86],
["2012-07-08", 84],
["2012-07-09", 64],
["2012-07-10", 50],
["2012-07-11", 44],
["2012-07-12", 46],
["2012-07-13", 31],
["2012-07-14", 48],
["2012-07-15", 53],
["2012-07-16", 70],
["2012-07-17", 78],
["2012-07-18", 71],
["2012-07-19", 82],
["2012-07-20", 111],
["2012-07-21", 131],
["2012-07-22", 15],
["2012-07-24", 60],
["2012-07-25", 72],
["2012-07-26", 55],
["2012-07-26", 55],
["2012-07-27", 50],
["2012-07-28", 56],
["2012-07-29", 57],
["2012-07-30", 30],
["2012-07-31", 28],
["2012-08-01", 20],
["2012-08-02", 17],
["2012-08-03", 53],
["2012-08-04", 40],
["2012-08-05", 48],
["2012-08-06", 60],
["2012-08-07", 59],
["2012-08-08", 68],
["2012-08-09", 43],
["2012-08-10", 72],
["2012-08-11", 80],
["2012-08-12", 41],
["2012-08-13", 36],
["2012-08-14", 62],
["2012-08-15", 60],
["2012-08-16", 68],
["2012-08-17", 83],
["2012-08-18", 110],
["2012-08-19", 84],
["2012-08-20", 92],
["2012-08-21", 25],
["2012-08-22", 40],
["2012-08-23", 74],
["2012-08-24", 94],
["2012-08-25", 92],
["2012-08-26", 117],
["2012-08-27", 100],
["2012-08-28", 59],
["2012-08-29", 84],
["2012-08-30", 135],
["2012-08-31", 150],
["2012-09-01", 128],
["2012-09-02", 52],
["2012-09-03", 15],
["2012-09-04", 22],
["2012-09-05", 50],
["2012-09-06", 70],
["2012-09-07", 77],
["2012-09-08", 40],
["2012-09-09", 79],
["2012-09-10", 96],
["2012-09-11", 93],
["2012-09-12", 44],
["2012-09-13", 28],
["2012-09-14", 31],
["2012-09-15", 50],
["2012-09-16", 65],
["2012-09-17", 63],
["2012-09-18", 61],
["2012-09-19", 56],
["2012-09-21", 128],
["2012-09-22", 93],
["2012-09-23", 85],
["2012-09-24", 74],
["2012-09-25", 78],
["2012-09-26", 26],
["2012-09-27", 65],
["2012-09-28", 15],
["2012-09-29", 24],
["2012-09-30", 38],
["2012-10-01", 52],
["2012-10-02", 78],
["2012-10-03", 108],
["2012-10-04", 28],
["2012-10-05", 41],
["2012-10-06", 74],
["2012-10-07", 83],
["2012-10-08", 123],
["2012-10-09", 140],
["2012-10-10", 18],
["2012-10-11", 73],
["2012-10-12", 121],
["2012-10-13", 97],
["2012-10-14", 40],
["2012-10-15", 83],
["2012-10-16", 78],
["2012-10-17", 23],
["2012-10-18", 65],
["2012-10-19", 79],
["2012-10-20", 139],
["2012-10-21", 81],
["2012-10-22", 26],
["2012-10-23", 54],
["2012-10-24", 89],
["2012-10-25", 90],
["2012-10-26", 163],
["2012-10-27", 154],
["2012-10-28", 22],
["2012-10-29", 59],
["2012-10-30", 36],
["2012-10-31", 51],
["2012-11-01", 67],
["2012-11-02", 103],
["2012-11-03", 135],
["2012-11-04", 20],
["2012-11-05", 16],
["2012-11-06", 48],
["2012-11-07", 80],
["2012-11-08", 62],
["2012-11-09", 93],
["2012-11-10", 82],
["2012-11-11", 17],
["2012-11-12", 27],
["2012-11-13", 30],
["2012-11-14", 26],
["2012-11-15", 71],
["2012-11-16", 92],
["2012-11-17", 47],
["2012-11-18", 96],
["2012-11-19", 55],
["2012-11-20", 74],
["2012-11-21", 123],
["2012-11-22", 156],
["2012-11-23", 22],
["2012-11-24", 80],
["2012-11-25", 133],
["2012-11-26", 44],
["2012-11-27", 105],
["2012-11-28", 151],
["2012-11-29", 54],
["2012-12-01", 50],
["2012-12-02", 96],
["2012-12-03", 123],
["2012-12-04", 50],
["2012-12-05", 64],
["2012-12-06", 50],
["2012-12-07", 73],
["2012-12-08", 53],
["2012-12-09", 38],
["2012-12-10", 53],
["2012-12-11", 86],
["2012-12-12", 103],
["2012-12-13", 130],
["2012-12-14", 107],
["2012-12-15", 114],
["2012-12-16", 108],
["2012-12-17", 45],
["2012-12-18", 22],
["2012-12-19", 72],
["2012-12-20", 121],
["2012-12-21", 120],
["2012-12-22", 24],
["2012-12-23", 36],
["2012-12-24", 53],
["2012-12-25", 58],
["2012-12-26", 67],
["2012-12-28", 137],
["2012-12-29", 94],
["2012-12-30", 38],
["2012-12-31", 57],
["2013-01-01", 71],
["2013-01-02", 27],
["2013-01-03", 35],
["2013-01-04", 57],
["2013-01-05", 79],
["2013-01-06", 58],
["2013-01-07", 105],
["2013-01-08", 124],
["2013-01-09", 32],
["2013-01-10", 87],
["2013-01-11", 232],
["2013-01-12", 174],
["2013-01-13", 498],
["2013-01-14", 184],
["2014-01-01", 85],
["2014-01-02", 158],
["2014-01-03", 74],
["2014-01-04", 165],
["2014-01-05", 113],
["2014-01-06", 190],
["2014-01-07", 122],
["2014-01-10", 95],
["2014-01-11", 159],
["2014-01-12", 52],
["2014-01-13", 117],
["2014-01-14", 113],
["2014-01-15", 180],
["2014-01-16", 403],
["2014-01-17", 209],
["2014-01-18", 113],
["2014-01-19", 149],
["2014-01-21", 68],
["2014-01-22", 162],
["2014-01-23", 276],
["2014-01-24", 195],
["2014-01-26", 77],
["2014-01-27", 114],
["2014-01-28", 67],
["2014-01-29", 165],
["2014-01-30", 93],
["2014-01-31", 188],
["2014-02-01", 178],
["2014-02-02", 85],
["2014-02-05", 119],
["2014-02-06", 158],
["2014-02-07", 124],
["2014-02-08", 84],
["2014-02-10", 53],
["2014-02-11", 142],
["2014-02-12", 150],
["2014-02-13", 242],
["2014-02-14", 329],
["2014-02-15", 429],
["2014-02-16", 348],
["2014-02-17", 118],
["2014-02-18", 98],
["2014-02-19", 92],
["2014-02-20", 270],
["2014-02-21", 311],
["2014-02-22", 311],
["2014-02-23", 255],
["2014-02-24", 313],
["2014-02-25", 404],
["2014-02-28", 113],
["2014-03-01", 68],
["2014-03-02", 189],
["2014-03-03", 268],
["2014-03-04", 67],
["2014-03-07", 70],
["2014-03-08", 179],
["2014-03-09", 127],
["2014-03-10", 110],
["2014-03-11", 195],
["2014-03-13", 69],
["2014-03-14", 64],
["2014-03-15", 133],
["2014-03-16", 145],
["2014-03-17", 142],
["2014-03-18", 85],
["2014-03-19", 73],
["2014-03-21", 62],
["2014-03-22", 86],
["2014-03-23", 186],
["2014-03-24", 271],
["2014-03-25", 255],
["2014-03-26", 331],
["2014-03-27", 285],
["2014-03-28", 169],
["2014-03-29", 63],
["2014-03-30", 77],
["2014-03-31", 183],
["2014-04-01", 147],
["2014-04-02", 133],
["2014-04-03", 66],
["2014-04-04", 91],
["2014-04-05", 68],
["2014-04-06", 98],
["2014-04-07", 135],
["2014-04-08", 223],
["2014-04-09", 156],
["2014-04-10", 246],
["2014-04-11", 83],
["2014-04-12", 133],
["2014-04-13", 212],
["2014-04-14", 270],
["2014-04-15", 109],
["2014-04-16", 90],
["2014-04-17", 124],
["2014-04-18", 182],
["2014-04-19", 84],
["2014-04-20", 84],
["2014-04-21", 73],
["2014-04-22", 85],
["2014-04-23", 156],
["2014-04-24", 156],
["2014-04-25", 163],
["2014-04-26", 69],
["2014-04-27", 74],
["2014-04-28", 83],
["2014-04-29", 122],
["2014-04-30", 139],
["2014-05-01", 156],
["2014-05-03", 93],
["2014-05-04", 57],
["2014-05-05", 54],
["2014-05-06", 105],
["2014-05-07", 82],
["2014-05-08", 104],
["2014-05-09", 84],
["2014-05-10", 69],
["2014-05-12", 74],
["2014-05-13", 86],
["2014-05-14", 59],
["2014-05-15", 122],
["2014-05-16", 92],
["2014-05-17", 124],
["2014-05-18", 171],
["2014-05-19", 146],
["2014-05-20", 113],
["2014-05-21", 170],
["2014-05-22", 183],
["2014-05-23", 140],
["2014-05-24", 104],
["2014-05-25", 91],
["2014-05-26", 77],
["2014-05-27", 107],
["2014-05-28", 121],
["2014-05-29", 120],
["2014-05-30", 192],
["2014-05-31", 177],
["2014-06-01", 130],
["2014-06-02", 90],
["2014-06-03", 117],
["2014-06-04", 124],
["2014-06-05", 157],
["2014-06-06", 103],
["2014-06-07", 51],
["2014-06-08", 70],
["2014-06-09", 87],
["2014-06-10", 95],
["2014-06-11", 74],
["2014-06-12", 90],
["2014-06-13", 116],
["2014-06-14", 165],
["2014-06-15", 178],
["2014-06-16", 178],
["2014-06-17", 104],
["2014-06-18", 116],
["2014-06-19", 116],
["2014-06-20", 84],
["2014-06-21", 96],
["2014-06-22", 91],
["2014-06-23", 115],
["2014-06-24", 161],
["2014-06-25", 138],
["2014-06-26", 163],
["2014-06-27", 68],
["2014-06-28", 77],
["2014-06-29", 161],
["2014-06-30", 185],
["2014-07-01", 172],
["2014-07-02", 80],
["2014-07-03", 248],
["2014-07-04", 237],
["2014-07-05", 165],
["2014-07-06", 256],
["2014-07-07", 216],
["2014-07-08", 134],
["2014-07-09", 63],
["2014-07-10", 114],
["2014-07-11", 77],
["2014-07-12", 80],
["2014-07-13", 64],
["2014-07-14", 156],
["2014-07-15", 140],
["2014-07-16", 133],
["2014-07-17", 186],
["2014-07-18", 182],
["2014-07-19", 106],
["2014-07-20", 119],
["2014-07-21", 68],
["2014-07-22", 54],
["2014-07-23", 82],
["2014-07-24", 90],
["2014-07-25", 134],
["2014-07-26", 188],
["2014-07-27", 194],
["2014-07-28", 159],
["2014-07-29", 159],
["2014-07-30", 169],
["2014-07-31", 244],
["2014-08-01", 199],
["2014-08-02", 163],
["2014-08-03", 149],
["2014-08-05", 80],
["2014-08-06", 67],
["2014-08-07", 162],
["2014-08-08", 140],
["2014-08-09", 143],
["2014-08-10", 125],
["2014-08-11", 76],
["2014-08-12", 119],
["2014-08-13", 70],
["2014-08-14", 104],
["2014-08-15", 109],
["2014-08-16", 159],
["2014-08-17", 124],
["2014-08-18", 135],
["2014-08-19", 150],
["2014-08-20", 164],
["2014-08-21", 169],
["2014-08-22", 83],
["2014-08-23", 155],
["2014-08-24", 75],
["2014-08-25", 59],
["2014-08-26", 78],
["2014-08-27", 136],
["2014-08-28", 103],
["2014-08-29", 104],
["2014-08-30", 176],
["2014-08-31", 89],
["2014-09-01", 127],
["2014-09-03", 54],
["2014-09-04", 100],
["2014-09-05", 140],
["2014-09-06", 186],
["2014-09-07", 200],
["2014-09-08", 61],
["2014-09-09", 109],
["2014-09-10", 111],
["2014-09-11", 114],
["2014-09-12", 97],
["2014-09-13", 94],
["2014-09-14", 66],
["2014-09-15", 54],
["2014-09-16", 87],
["2014-09-17", 80],
["2014-09-18", 84],
["2014-09-19", 117],
["2014-09-20", 168],
["2014-09-21", 129],
["2014-09-22", 127],
["2014-09-23", 64],
["2014-09-24", 60],
["2014-09-25", 144],
["2014-09-26", 170],
["2014-09-27", 58],
["2014-09-28", 87],
["2014-09-29", 70],
["2014-09-30", 53],
["2014-10-01", 92],
["2014-10-02", 78],
["2014-10-03", 123],
["2014-10-04", 95],
["2014-10-05", 54],
["2014-10-06", 68],
["2014-10-07", 200],
["2014-10-08", 314],
["2014-10-09", 379],
["2014-10-10", 346],
["2014-10-11", 233],
["2014-10-14", 80],
["2014-10-15", 73],
["2014-10-16", 76],
["2014-10-17", 132],
["2014-10-18", 211],
["2014-10-19", 289],
["2014-10-20", 250],
["2014-10-21", 82],
["2014-10-22", 99],
["2014-10-23", 163],
["2014-10-24", 267],
["2014-10-25", 353],
["2014-10-26", 78],
["2014-10-27", 72],
["2014-10-28", 88],
["2014-10-29", 140],
["2014-10-30", 206],
["2014-10-31", 204],
["2014-11-01", 65],
["2014-11-03", 59],
["2014-11-04", 150],
["2014-11-05", 79],
["2014-11-07", 63],
["2014-11-08", 93],
["2014-11-09", 80],
["2014-11-10", 95],
["2014-11-11", 59],
["2014-11-13", 65],
["2014-11-14", 77],
["2014-11-15", 143],
["2014-11-16", 98],
["2014-11-17", 64],
["2014-11-18", 93],
["2014-11-19", 282],
["2014-11-23", 155],
["2014-11-24", 94],
["2014-11-25", 196],
["2014-11-26", 293],
["2014-11-27", 83],
["2014-11-28", 114],
["2014-11-29", 276],
["2014-12-01", 54],
["2014-12-02", 65],
["2014-12-03", 51],
["2014-12-05", 62],
["2014-12-06", 89],
["2014-12-07", 65],
["2014-12-08", 82],
["2014-12-09", 276],
["2014-12-10", 153],
["2014-12-11", 52],
["2014-12-13", 69],
["2014-12-14", 113],
["2014-12-15", 82],
["2014-12-17", 99],
["2014-12-19", 53],
["2014-12-22", 103],
["2014-12-23", 100],
["2014-12-25", 73],
["2014-12-26", 155],
["2014-12-27", 243],
["2014-12-28", 155],
["2014-12-29", 125],
["2014-12-30", 65],
["2015-01-01", 65],
["2015-01-02", 79],
["2015-01-03", 200],
["2015-01-04", 226],
["2015-01-05", 122],
["2015-01-06", 60],
["2015-01-07", 85],
["2015-01-08", 190],
["2015-01-09", 105],
["2015-01-10", 208],
["2015-01-11", 59],
["2015-01-12", 160],
["2015-01-13", 211],
["2015-01-14", 265],
["2015-01-15", 386],
["2015-01-16", 118],
["2015-01-17", 89],
["2015-01-18", 94],
["2015-01-19", 77],
["2015-01-20", 113],
["2015-01-22", 143],
["2015-01-23", 257],
["2015-01-24", 117],
["2015-01-25", 185],
["2015-01-26", 119],
["2015-01-28", 65],
["2015-01-29", 87],
["2015-01-31", 60],
["2015-02-01", 108],
["2015-02-02", 188],
["2015-02-03", 143],
["2015-02-05", 62],
["2015-02-06", 100],
["2015-02-09", 152],
["2015-02-10", 166],
["2015-02-11", 55],
["2015-02-12", 59],
["2015-02-13", 175],
["2015-02-14", 293],
["2015-02-15", 326],
["2015-02-16", 153],
["2015-02-18", 73],
["2015-02-19", 267],
["2015-02-20", 183],
["2015-02-21", 394],
["2015-02-22", 158],
["2015-02-23", 86],
["2015-02-24", 207],
]
(
Line()
.add_xaxis(xaxis_data=[item[0] for item in all_data])
.add_yaxis(
series_name="",
y_axis=[item[1] for item in all_data],
yaxis_index=0,
is_smooth=True,
is_symbol_show=False,
)
.set_global_opts(
title_opts=opts.TitleOpts(title="Beijing AQI"),
tooltip_opts=opts.TooltipOpts(trigger="axis"),
datazoom_opts=[
opts.DataZoomOpts(yaxis_index=0),
opts.DataZoomOpts(type_="inside", yaxis_index=0),
],
visualmap_opts=opts.VisualMapOpts(
pos_top="10",
pos_right="10",
is_piecewise=True,
pieces=[
{"gt": 0, "lte": 50, "color": "#096"},
{"gt": 50, "lte": 100, "color": "#ffde33"},
{"gt": 100, "lte": 150, "color": "#ff9933"},
{"gt": 150, "lte": 200, "color": "#cc0033"},
{"gt": 200, "lte": 300, "color": "#660099"},
{"gt": 300, "color": "#7e0023"},
],
out_of_range={"color": "#999"},
),
xaxis_opts=opts.AxisOpts(type_="category"),
yaxis_opts=opts.AxisOpts(
type_="value",
name_location="start",
min_=0,
max_=500,
is_scale=True,
axistick_opts=opts.AxisTickOpts(is_inside=False),
),
)
.set_series_opts(
markline_opts=opts.MarkLineOpts(
data=[
{"yAxis": 50},
{"yAxis": 100},
{"yAxis": 150},
{"yAxis": 200},
{"yAxis": 300},
],
label_opts=opts.LabelOpts(position="end"),
)
)
.render("beijing_aqi.html")
)
| 24.410882
| 63
| 0.426575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 59,610
| 0.488246
|
e9ff12848b4786dd9b5181f046c3b8596891ad5d
| 1,416
|
py
|
Python
|
test_scripts/test_stack_and_visualize.py
|
jakevdp/spheredb
|
e5e5ff8b8902459b3f38a1a413a712ac1695accc
|
[
"BSD-3-Clause"
] | 1
|
2021-08-29T06:01:28.000Z
|
2021-08-29T06:01:28.000Z
|
test_scripts/test_stack_and_visualize.py
|
jakevdp/spheredb
|
e5e5ff8b8902459b3f38a1a413a712ac1695accc
|
[
"BSD-3-Clause"
] | null | null | null |
test_scripts/test_stack_and_visualize.py
|
jakevdp/spheredb
|
e5e5ff8b8902459b3f38a1a413a712ac1695accc
|
[
"BSD-3-Clause"
] | 2
|
2018-08-03T20:27:35.000Z
|
2021-08-29T06:01:30.000Z
|
"""
Stacking and Visualizing
------------------------
This script does the following:
1. Input LSST images, warp to sparse matrix, store as scidb arrays.
This tests the warping of a single LSST exposure into a sparse matrix
representation of a HEALPix grid.
"""
import os
import sys
import glob
import matplotlib.pyplot as plt
import numpy as np
sys.path.append(os.path.abspath('..'))
from spheredb.scidb_tools import HPXPixels3D, find_index_bounds
filenames = glob.glob("/home/jakevdp/research/LSST_IMGS/*/R*/S*.fits")
print "total number of files:", len(filenames)
HPX_data = HPXPixels3D(input_files=filenames[:20],
name='LSSTdata', force_reload=False)
times = HPX_data.unique_times()
xlim, ylim, tlim = HPX_data.index_bounds()
for time in times[:2]:
tslice = HPX_data.time_slice(time)
tslice_arr = tslice.arr[xlim[0]:xlim[1],
ylim[0]:ylim[1]].toarray()
fig, ax = plt.subplots()
im = ax.imshow(np.log(tslice_arr), cmap=plt.cm.binary)
ax.set_xlim(400, 440)
ax.set_ylim(860, 820)
fig.colorbar(im, ax=ax)
ax.set_title("time = {0}".format(time))
coadd = HPX_data.coadd().arr[xlim[0]:xlim[1],
ylim[0]:ylim[1]].toarray()
fig, ax = plt.subplots()
im = ax.imshow(np.log(coadd), cmap=plt.cm.binary)
ax.set_xlim(400, 440)
ax.set_ylim(860, 820)
fig.colorbar(im, ax=ax)
ax.set_title("coadd")
plt.show()
| 27.230769
| 70
| 0.664548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 366
| 0.258475
|
18007f3ffa7e153ffa5c57f5301a0d773f024cb8
| 307
|
py
|
Python
|
Problem/PeopleFund/concatenate.py
|
yeojin-dev/coding-test
|
30ce8507838beaa9232c6fc6c62a7dcb62d51464
|
[
"MIT"
] | 2
|
2018-07-11T08:13:06.000Z
|
2018-07-11T08:47:12.000Z
|
Problem/PeopleFund/concatenate.py
|
yeojin-dev/coding-test
|
30ce8507838beaa9232c6fc6c62a7dcb62d51464
|
[
"MIT"
] | null | null | null |
Problem/PeopleFund/concatenate.py
|
yeojin-dev/coding-test
|
30ce8507838beaa9232c6fc6c62a7dcb62d51464
|
[
"MIT"
] | null | null | null |
import numpy as np
sizes = list(map(int, input().split()))
arr1 = list()
arr2 = list()
for _ in range(sizes[0]):
arr1.append(list(map(int, input().split())))
for _ in range(sizes[1]):
arr2.append(list(map(int, input().split())))
print(np.concatenate((np.array(arr1), np.array(arr2)), axis=0))
| 19.1875
| 63
| 0.635179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
18012d97d113307f75b71fea1cea0948b4e7a4b1
| 28,941
|
py
|
Python
|
tests/test_splitname.py
|
goerz/bibdeskparser
|
4f60f9960f6f0156c2f3c89033065c4e121800ab
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_splitname.py
|
goerz/bibdeskparser
|
4f60f9960f6f0156c2f3c89033065c4e121800ab
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_splitname.py
|
goerz/bibdeskparser
|
4f60f9960f6f0156c2f3c89033065c4e121800ab
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from bibdeskparser.customization import InvalidName, splitname
class TestSplitnameMethod(unittest.TestCase):
def test_splitname_basic(self):
"""Basic tests of customization.splitname() """
# Empty input.
result = splitname("")
expected = {}
self.assertEqual(result, expected, msg="Invalid output for empty name")
# Non-whitespace names.
result = splitname(" ")
expected = {}
self.assertEqual(
result, expected, msg="Invalid output for space-only name"
)
result = splitname(" \t~~")
expected = {}
self.assertEqual(
result, expected, msg="Invalid output for whitespace name"
)
# Test strict mode.
with self.assertRaises(InvalidName): # Trailing comma (4 cases).
splitname("BB,", strict_mode=True)
with self.assertRaises(InvalidName):
splitname("BB, ", strict_mode=True)
with self.assertRaises(InvalidName):
splitname("BB, ~\t", strict_mode=True)
with self.assertRaises(InvalidName):
splitname(", ~\t", strict_mode=True)
with self.assertRaises(InvalidName): # Too many sections.
splitname("AA, BB, CC, DD", strict_mode=True)
with self.assertRaises(
InvalidName
): # Unterminated opening brace (x3).
splitname("AA {BB CC", strict_mode=True)
with self.assertRaises(InvalidName):
splitname("AA {{{BB CC", strict_mode=True)
with self.assertRaises(InvalidName):
splitname("AA {{{BB} CC}", strict_mode=True)
with self.assertRaises(InvalidName): # Unmatched closing brace (x3).
splitname("AA BB CC}", strict_mode=True)
with self.assertRaises(InvalidName):
splitname("AA BB CC}}}", strict_mode=True)
with self.assertRaises(InvalidName):
splitname("{AA {BB CC}}}", strict_mode=True)
# Test strict mode off for trailing comma.
expected = {'first': [], 'von': [], 'last': ["BB"], 'jr': []}
result = splitname("BB,", strict_mode=False)
self.assertEqual(
result,
expected,
msg="Invalid output for trailing comma with strict mode off",
)
result = splitname("BB, ", strict_mode=False)
self.assertEqual(
result,
expected,
msg="Invalid output for trailing comma with strict mode off",
)
result = splitname("BB, ~\t ", strict_mode=False)
self.assertEqual(
result,
expected,
msg="Invalid output for trailing comma with strict mode off",
)
expected = {}
result = splitname(", ~\t", strict_mode=False)
self.assertEqual(
result,
expected,
msg="Invalid output for trailing comma with strict mode off",
)
# Test strict mode off for too many sections.
expected = {
'first': ["CC", "DD"],
'von': [],
'last': ["AA"],
'jr': ["BB"],
}
result = splitname("AA, BB, CC, DD", strict_mode=False)
self.assertEqual(
result,
expected,
msg="Invalid output for too many sections with strict mode off",
)
# Test strict mode off for an unterminated opening brace.
result = splitname("AA {BB CC", strict_mode=False)
expected = {'first': ["AA"], 'von': [], 'last': ["{BB CC}"], 'jr': []}
self.assertEqual(
result,
expected,
msg="Invalid output for unterminated opening brace with strict mode off",
)
result = splitname("AA {{{BB CC", strict_mode=False)
expected = {
'first': ["AA"],
'von': [],
'last': ["{{{BB CC}}}"],
'jr': [],
}
self.assertEqual(
result,
expected,
msg="Invalid output for unterminated opening brace with strict mode off",
)
result = splitname("AA {{{BB} CC}", strict_mode=False)
expected = {
'first': ["AA"],
'von': [],
'last': ["{{{BB} CC}}"],
'jr': [],
}
self.assertEqual(
result,
expected,
msg="Invalid output for unterminated opening brace with strict mode off",
)
# Test strict mode off for an unmatched closing brace.
result = splitname("AA BB CC}", strict_mode=False)
expected = {
'first': ["AA", "BB"],
'von': [],
'last': ["{CC}"],
'jr': [],
}
self.assertEqual(
result,
expected,
msg="Invalid output for unmatched closing brace with strict mode off",
)
result = splitname("AA BB CC}}}", strict_mode=False)
expected = {
'first': ["AA", "BB"],
'von': [],
'last': ["{{{CC}}}"],
'jr': [],
}
self.assertEqual(
result,
expected,
msg="Invalid output for unmatched closing brace with strict mode off",
)
result = splitname("{AA {BB CC}}}", strict_mode=False)
expected = {
'first': [],
'von': [],
'last': ["{{AA {BB CC}}}"],
'jr': [],
}
self.assertEqual(
result,
expected,
msg="Invalid output for unmatched closing brace with strict mode off",
)
# Test it handles commas at higher brace levels.
result = splitname("CC, dd, {AA, BB}")
expected = {
'first': ["{AA, BB}"],
'von': [],
'last': ["CC"],
'jr': ["dd"],
}
self.assertEqual(
result, expected, msg="Invalid output for braced commas"
)
def test_splitname_cases(self):
"""Test customization.splitname() vs output from BibTeX """
for name, expected in splitname_test_cases:
result = splitname(name)
self.assertEqual(
result, expected, msg="Input name: {0}".format(name)
)
splitname_test_cases = (
(
r'Per Brinch Hansen',
{'first': ['Per', 'Brinch'], 'von': [], 'last': ['Hansen'], 'jr': []},
),
(
r'Brinch Hansen, Per',
{'first': ['Per'], 'von': [], 'last': ['Brinch', 'Hansen'], 'jr': []},
),
(
r'Brinch Hansen,, Per',
{'first': ['Per'], 'von': [], 'last': ['Brinch', 'Hansen'], 'jr': []},
),
(
r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin",
{
'first': ['Charles', 'Louis', 'Xavier', 'Joseph'],
'von': ['de', 'la'],
'last': [r'Vall{\'e}e', 'Poussin'],
'jr': [],
},
),
(
r'D[onald] E. Knuth',
{'first': ['D[onald]', 'E.'], 'von': [], 'last': ['Knuth'], 'jr': []},
),
(
r'A. {Delgado de Molina}',
{
'first': ['A.'],
'von': [],
'last': ['{Delgado de Molina}'],
'jr': [],
},
),
(
r"M. Vign{\'e}",
{'first': ['M.'], 'von': [], 'last': [r"Vign{\'e}"], 'jr': []},
),
###############################################################################
#
# Test cases from
# http://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html
#
###############################################################################
(r'AA BB', {'first': ['AA'], 'von': [], 'last': ['BB'], 'jr': []}),
(r'AA', {'first': [], 'von': [], 'last': ['AA'], 'jr': []}),
(r'AA bb', {'first': ['AA'], 'von': [], 'last': ['bb'], 'jr': []}),
(r'aa', {'first': [], 'von': [], 'last': ['aa'], 'jr': []}),
(r'AA bb CC', {'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': []}),
(
r'AA bb CC dd EE',
{'first': ['AA'], 'von': ['bb', 'CC', 'dd'], 'last': ['EE'], 'jr': []},
),
(
r'AA 1B cc dd',
{'first': ['AA', '1B'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA 1b cc dd',
{'first': ['AA'], 'von': ['1b', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {b}B cc dd',
{'first': ['AA', '{b}B'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {b}b cc dd',
{'first': ['AA'], 'von': ['{b}b', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {B}b cc dd',
{'first': ['AA'], 'von': ['{B}b', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {B}B cc dd',
{'first': ['AA', '{B}B'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA \BB{b} cc dd',
{'first': ['AA', r'\BB{b}'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA \bb{b} cc dd',
{'first': ['AA'], 'von': [r'\bb{b}', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {bb} cc DD',
{'first': ['AA', '{bb}'], 'von': ['cc'], 'last': ['DD'], 'jr': []},
),
(
r'AA bb {cc} DD',
{'first': ['AA'], 'von': ['bb'], 'last': ['{cc}', 'DD'], 'jr': []},
),
(
r'AA {bb} CC',
{'first': ['AA', '{bb}'], 'von': [], 'last': ['CC'], 'jr': []},
),
(r'bb CC, AA', {'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': []}),
(r'bb CC, aa', {'first': ['aa'], 'von': ['bb'], 'last': ['CC'], 'jr': []}),
(
r'bb CC dd EE, AA',
{'first': ['AA'], 'von': ['bb', 'CC', 'dd'], 'last': ['EE'], 'jr': []},
),
(r'bb, AA', {'first': ['AA'], 'von': [], 'last': ['bb'], 'jr': []}),
(
r'bb CC,XX, AA',
{'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': ['XX']},
),
(
r'bb CC,xx, AA',
{'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': ['xx']},
),
(r'BB,, AA', {'first': ['AA'], 'von': [], 'last': ['BB'], 'jr': []}),
(
r"Paul \'Emile Victor",
{
'first': ['Paul', r"\'Emile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Paul {\'E}mile Victor",
{
'first': ['Paul', r"{\'E}mile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Paul \'emile Victor",
{'first': ['Paul'], 'von': [r"\'emile"], 'last': ['Victor'], 'jr': []},
),
(
r"Paul {\'e}mile Victor",
{
'first': ['Paul'],
'von': [r"{\'e}mile"],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul \'Emile",
{
'first': ['Paul', r"\'Emile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul {\'E}mile",
{
'first': ['Paul', r"{\'E}mile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul \'emile",
{
'first': ['Paul', r"\'emile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul {\'e}mile",
{
'first': ['Paul', r"{\'e}mile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r'Dominique Galouzeau de Villepin',
{
'first': ['Dominique', 'Galouzeau'],
'von': ['de'],
'last': ['Villepin'],
'jr': [],
},
),
(
r'Dominique {G}alouzeau de Villepin',
{
'first': ['Dominique'],
'von': ['{G}alouzeau', 'de'],
'last': ['Villepin'],
'jr': [],
},
),
(
r'Galouzeau de Villepin, Dominique',
{
'first': ['Dominique'],
'von': ['Galouzeau', 'de'],
'last': ['Villepin'],
'jr': [],
},
),
###############################################################################
#
# Test cases from pybtex
# See file /pybtex/tests/parse_name_test.py in the pybtex source.
#
###############################################################################
(
r'A. E. Siegman',
{'first': ['A.', 'E.'], 'von': [], 'last': ['Siegman'], 'jr': []},
),
(
r'A. G. W. Cameron',
{
'first': ['A.', 'G.', 'W.'],
'von': [],
'last': ['Cameron'],
'jr': [],
},
),
(r'A. Hoenig', {'first': ['A.'], 'von': [], 'last': ['Hoenig'], 'jr': []}),
(
r'A. J. Van Haagen',
{
'first': ['A.', 'J.', 'Van'],
'von': [],
'last': ['Haagen'],
'jr': [],
},
),
(
r'A. S. Berdnikov',
{'first': ['A.', 'S.'], 'von': [], 'last': ['Berdnikov'], 'jr': []},
),
(
r'A. Trevorrow',
{'first': ['A.'], 'von': [], 'last': ['Trevorrow'], 'jr': []},
),
(
r'Adam H. Lewenberg',
{'first': ['Adam', 'H.'], 'von': [], 'last': ['Lewenberg'], 'jr': []},
),
(
r'Addison-Wesley Publishing Company',
{
'first': ['Addison-Wesley', 'Publishing'],
'von': [],
'last': ['Company'],
'jr': [],
},
),
(
r'Advogato (Raph Levien)',
{
'first': ['Advogato', '(Raph'],
'von': [],
'last': ['Levien)'],
'jr': [],
},
),
(
r'Andrea de Leeuw van Weenen',
{
'first': ['Andrea'],
'von': ['de', 'Leeuw', 'van'],
'last': ['Weenen'],
'jr': [],
},
),
(
r'Andreas Geyer-Schulz',
{'first': ['Andreas'], 'von': [], 'last': ['Geyer-Schulz'], 'jr': []},
),
(
r'Andr{\'e} Heck',
{'first': [r'Andr{\'e}'], 'von': [], 'last': ['Heck'], 'jr': []},
),
(
r'Anne Br{\"u}ggemann-Klein',
{
'first': ['Anne'],
'von': [],
'last': [r'Br{\"u}ggemann-Klein'],
'jr': [],
},
),
(r'Anonymous', {'first': [], 'von': [], 'last': ['Anonymous'], 'jr': []}),
(r'B. Beeton', {'first': ['B.'], 'von': [], 'last': ['Beeton'], 'jr': []}),
(
r'B. Hamilton Kelly',
{'first': ['B.', 'Hamilton'], 'von': [], 'last': ['Kelly'], 'jr': []},
),
(
r'B. V. Venkata Krishna Sastry',
{
'first': ['B.', 'V.', 'Venkata', 'Krishna'],
'von': [],
'last': ['Sastry'],
'jr': [],
},
),
(
r'Benedict L{\o}fstedt',
{'first': ['Benedict'], 'von': [], 'last': [r'L{\o}fstedt'], 'jr': []},
),
(
r'Bogus{\l}aw Jackowski',
{'first': ['Bogus{\l}aw'], 'von': [], 'last': ['Jackowski'], 'jr': []},
),
(
r'Christina A. L.\ Thiele',
{
'first': ['Christina', 'A.', 'L.\\'],
'von': [],
'last': ['Thiele'],
'jr': [],
},
),
(
r"D. Men'shikov",
{'first': ['D.'], 'von': [], 'last': ["Men'shikov"], 'jr': []},
),
(
r'Darko \v{Z}ubrini{\'c}',
{
'first': ['Darko'],
'von': [],
'last': [r'\v{Z}ubrini{\'c}'],
'jr': [],
},
),
(
r'Dunja Mladeni{\'c}',
{'first': ['Dunja'], 'von': [], 'last': [r'Mladeni{\'c}'], 'jr': []},
),
(
r'Edwin V. {Bell, II}',
{
'first': ['Edwin', 'V.'],
'von': [],
'last': ['{Bell, II}'],
'jr': [],
},
),
(
r'Frank G. {Bennett, Jr.}',
{
'first': ['Frank', 'G.'],
'von': [],
'last': ['{Bennett, Jr.}'],
'jr': [],
},
),
(
r'Fr{\'e}d{\'e}ric Boulanger',
{
'first': [r'Fr{\'e}d{\'e}ric'],
'von': [],
'last': ['Boulanger'],
'jr': [],
},
),
(
r'Ford, Jr., Henry',
{'first': ['Henry'], 'von': [], 'last': ['Ford'], 'jr': ['Jr.']},
),
(
r'mr Ford, Jr., Henry',
{'first': ['Henry'], 'von': ['mr'], 'last': ['Ford'], 'jr': ['Jr.']},
),
(r'Fukui Rei', {'first': ['Fukui'], 'von': [], 'last': ['Rei'], 'jr': []}),
(
r'G. Gr{\"a}tzer',
{'first': ['G.'], 'von': [], 'last': [r'Gr{\"a}tzer'], 'jr': []},
),
(
r'George Gr{\"a}tzer',
{'first': ['George'], 'von': [], 'last': [r'Gr{\"a}tzer'], 'jr': []},
),
(
r'Georgia K. M. Tobin',
{
'first': ['Georgia', 'K.', 'M.'],
'von': [],
'last': ['Tobin'],
'jr': [],
},
),
(
r'Gilbert van den Dobbelsteen',
{
'first': ['Gilbert'],
'von': ['van', 'den'],
'last': ['Dobbelsteen'],
'jr': [],
},
),
(
r'Gy{\"o}ngyi Bujdos{\'o}',
{
'first': [r'Gy{\"o}ngyi'],
'von': [],
'last': [r'Bujdos{\'o}'],
'jr': [],
},
),
(
r'Helmut J{\"u}rgensen',
{'first': ['Helmut'], 'von': [], 'last': [r'J{\"u}rgensen'], 'jr': []},
),
(
r'Herbert Vo{\ss}',
{'first': ['Herbert'], 'von': [], 'last': ['Vo{\ss}'], 'jr': []},
),
(
r"H{\'a}n Th{\^e}\llap{\raise 0.5ex\hbox{\'{\relax}}} Th{\'a}nh",
{
'first': [
r'H{\'a}n',
r"Th{\^e}\llap{\raise 0.5ex\hbox{\'{\relax}}}",
],
'von': [],
'last': [r"Th{\'a}nh"],
'jr': [],
},
),
(
r"H{\`a}n Th\^e\llap{\raise0.5ex\hbox{\'{\relax}}} Th{\`a}nh",
{
'first': [r'H{\`a}n', r"Th\^e\llap{\raise0.5ex\hbox{\'{\relax}}}"],
'von': [],
'last': [r"Th{\`a}nh"],
'jr': [],
},
),
(
r'J. Vesel{\'y}',
{'first': ['J.'], 'von': [], 'last': [r'Vesel{\'y}'], 'jr': []},
),
(
r'Javier Rodr\'{\i}guez Laguna',
{
'first': ['Javier', r'Rodr\'{\i}guez'],
'von': [],
'last': ['Laguna'],
'jr': [],
},
),
(
r'Ji\v{r}\'{\i} Vesel{\'y}',
{
'first': [r'Ji\v{r}\'{\i}'],
'von': [],
'last': [r'Vesel{\'y}'],
'jr': [],
},
),
(
r'Ji\v{r}\'{\i} Zlatu{\v{s}}ka',
{
'first': [r'Ji\v{r}\'{\i}'],
'von': [],
'last': [r'Zlatu{\v{s}}ka'],
'jr': [],
},
),
(
r'Ji\v{r}{\'\i} Vesel{\'y}',
{
'first': [r'Ji\v{r}{\'\i}'],
'von': [],
'last': [r'Vesel{\'y}'],
'jr': [],
},
),
(
r'Ji\v{r}{\'{\i}}Zlatu{\v{s}}ka',
{
'first': [],
'von': [],
'last': [r'Ji\v{r}{\'{\i}}Zlatu{\v{s}}ka'],
'jr': [],
},
),
(
r'Jim Hef{}feron',
{'first': ['Jim'], 'von': [], 'last': ['Hef{}feron'], 'jr': []},
),
(
r'J{\"o}rg Knappen',
{'first': [r'J{\"o}rg'], 'von': [], 'last': ['Knappen'], 'jr': []},
),
(
r'J{\"o}rgen L. Pind',
{
'first': [r'J{\"o}rgen', 'L.'],
'von': [],
'last': ['Pind'],
'jr': [],
},
),
(
r'J{\'e}r\^ome Laurens',
{'first': [r'J{\'e}r\^ome'], 'von': [], 'last': ['Laurens'], 'jr': []},
),
(
r'J{{\"o}}rg Knappen',
{'first': [r'J{{\"o}}rg'], 'von': [], 'last': ['Knappen'], 'jr': []},
),
(
r'K. Anil Kumar',
{'first': ['K.', 'Anil'], 'von': [], 'last': ['Kumar'], 'jr': []},
),
(
r'Karel Hor{\'a}k',
{'first': ['Karel'], 'von': [], 'last': [r'Hor{\'a}k'], 'jr': []},
),
(
r'Karel P\'{\i}{\v{s}}ka',
{
'first': ['Karel'],
'von': [],
'last': [r'P\'{\i}{\v{s}}ka'],
'jr': [],
},
),
(
r'Karel P{\'\i}{\v{s}}ka',
{
'first': ['Karel'],
'von': [],
'last': [r'P{\'\i}{\v{s}}ka'],
'jr': [],
},
),
(
r'Karel Skoup\'{y}',
{'first': ['Karel'], 'von': [], 'last': [r'Skoup\'{y}'], 'jr': []},
),
(
r'Karel Skoup{\'y}',
{'first': ['Karel'], 'von': [], 'last': [r'Skoup{\'y}'], 'jr': []},
),
(
r'Kent McPherson',
{'first': ['Kent'], 'von': [], 'last': ['McPherson'], 'jr': []},
),
(
r'Klaus H{\"o}ppner',
{'first': ['Klaus'], 'von': [], 'last': [r'H{\"o}ppner'], 'jr': []},
),
(
r'Lars Hellstr{\"o}m',
{'first': ['Lars'], 'von': [], 'last': [r'Hellstr{\"o}m'], 'jr': []},
),
(
r'Laura Elizabeth Jackson',
{
'first': ['Laura', 'Elizabeth'],
'von': [],
'last': ['Jackson'],
'jr': [],
},
),
(
r'M. D{\'{\i}}az',
{'first': ['M.'], 'von': [], 'last': [r'D{\'{\i}}az'], 'jr': []},
),
(
r'M/iche/al /O Searc/oid',
{
'first': [r'M/iche/al', r'/O'],
'von': [],
'last': [r'Searc/oid'],
'jr': [],
},
),
(
r'Marek Ry{\'c}ko',
{'first': ['Marek'], 'von': [], 'last': [r'Ry{\'c}ko'], 'jr': []},
),
(
r'Marina Yu. Nikulina',
{
'first': ['Marina', 'Yu.'],
'von': [],
'last': ['Nikulina'],
'jr': [],
},
),
(
r'Max D{\'{\i}}az',
{'first': ['Max'], 'von': [], 'last': [r'D{\'{\i}}az'], 'jr': []},
),
(
r'Merry Obrecht Sawdey',
{
'first': ['Merry', 'Obrecht'],
'von': [],
'last': ['Sawdey'],
'jr': [],
},
),
(
r'Miroslava Mis{\'a}kov{\'a}',
{
'first': ['Miroslava'],
'von': [],
'last': [r'Mis{\'a}kov{\'a}'],
'jr': [],
},
),
(
r'N. A. F. M. Poppelier',
{
'first': ['N.', 'A.', 'F.', 'M.'],
'von': [],
'last': ['Poppelier'],
'jr': [],
},
),
(
r'Nico A. F. M. Poppelier',
{
'first': ['Nico', 'A.', 'F.', 'M.'],
'von': [],
'last': ['Poppelier'],
'jr': [],
},
),
(
r'Onofrio de Bari',
{'first': ['Onofrio'], 'von': ['de'], 'last': ['Bari'], 'jr': []},
),
(
r'Pablo Rosell-Gonz{\'a}lez',
{
'first': ['Pablo'],
'von': [],
'last': [r'Rosell-Gonz{\'a}lez'],
'jr': [],
},
),
(
r'Paco La Bruna',
{'first': ['Paco', 'La'], 'von': [], 'last': ['Bruna'], 'jr': []},
),
(
r'Paul Franchi-Zannettacci',
{
'first': ['Paul'],
'von': [],
'last': ['Franchi-Zannettacci'],
'jr': [],
},
),
(
r'Pavel \v{S}eve\v{c}ek',
{
'first': ['Pavel'],
'von': [],
'last': [r'\v{S}eve\v{c}ek'],
'jr': [],
},
),
(
r'Petr Ol{\v{s}}ak',
{'first': ['Petr'], 'von': [], 'last': [r'Ol{\v{s}}ak'], 'jr': []},
),
(
r'Petr Ol{\v{s}}{\'a}k',
{'first': ['Petr'], 'von': [], 'last': [r'Ol{\v{s}}{\'a}k'], 'jr': []},
),
(
r'Primo\v{z} Peterlin',
{'first': [r'Primo\v{z}'], 'von': [], 'last': ['Peterlin'], 'jr': []},
),
(
r'Prof. Alban Grimm',
{'first': ['Prof.', 'Alban'], 'von': [], 'last': ['Grimm'], 'jr': []},
),
(
r'P{\'e}ter Husz{\'a}r',
{
'first': [r'P{\'e}ter'],
'von': [],
'last': [r'Husz{\'a}r'],
'jr': [],
},
),
(
r'P{\'e}ter Szab{\'o}',
{'first': [r'P{\'e}ter'], 'von': [], 'last': [r'Szab{\'o}'], 'jr': []},
),
(
r'Rafa{\l}\.Zbikowski',
{'first': [], 'von': [], 'last': [r'Rafa{\l}\.Zbikowski'], 'jr': []},
),
(
r'Rainer Sch{\"o}pf',
{'first': ['Rainer'], 'von': [], 'last': [r'Sch{\"o}pf'], 'jr': []},
),
(
r'T. L. (Frank) Pappas',
{
'first': ['T.', 'L.', '(Frank)'],
'von': [],
'last': ['Pappas'],
'jr': [],
},
),
(
r'TUG 2004 conference',
{
'first': ['TUG', '2004'],
'von': [],
'last': ['conference'],
'jr': [],
},
),
(
r'TUG {\sltt DVI} Driver Standards Committee',
{
'first': ['TUG', '{\sltt DVI}', 'Driver', 'Standards'],
'von': [],
'last': ['Committee'],
'jr': [],
},
),
(
r'TUG {\sltt xDVIx} Driver Standards Committee',
{
'first': ['TUG'],
'von': ['{\sltt xDVIx}'],
'last': ['Driver', 'Standards', 'Committee'],
'jr': [],
},
),
(
r'University of M{\"u}nster',
{
'first': ['University'],
'von': ['of'],
'last': [r'M{\"u}nster'],
'jr': [],
},
),
(
r'Walter van der Laan',
{
'first': ['Walter'],
'von': ['van', 'der'],
'last': ['Laan'],
'jr': [],
},
),
(
r'Wendy G. McKay',
{'first': ['Wendy', 'G.'], 'von': [], 'last': ['McKay'], 'jr': []},
),
(
r'Wendy McKay',
{'first': ['Wendy'], 'von': [], 'last': ['McKay'], 'jr': []},
),
(
r'W{\l}odek Bzyl',
{'first': [r'W{\l}odek'], 'von': [], 'last': ['Bzyl'], 'jr': []},
),
(
r'\LaTeX Project Team',
{
'first': [r'\LaTeX', 'Project'],
'von': [],
'last': ['Team'],
'jr': [],
},
),
(
r'\rlap{Lutz Birkhahn}',
{'first': [], 'von': [], 'last': [r'\rlap{Lutz Birkhahn}'], 'jr': []},
),
(
r'{Jim Hef{}feron}',
{'first': [], 'von': [], 'last': ['{Jim Hef{}feron}'], 'jr': []},
),
(
r'{Kristoffer H\o{}gsbro Rose}',
{
'first': [],
'von': [],
'last': ['{Kristoffer H\o{}gsbro Rose}'],
'jr': [],
},
),
(
r'{TUG} {Working} {Group} on a {\TeX} {Directory} {Structure}',
{
'first': ['{TUG}', '{Working}', '{Group}'],
'von': ['on', 'a'],
'last': [r'{\TeX}', '{Directory}', '{Structure}'],
'jr': [],
},
),
(
r'{The \TUB{} Team}',
{'first': [], 'von': [], 'last': [r'{The \TUB{} Team}'], 'jr': []},
),
(
r'{\LaTeX} project team',
{
'first': [r'{\LaTeX}'],
'von': ['project'],
'last': ['team'],
'jr': [],
},
),
(
r'{\NTG{} \TeX{} future working group}',
{
'first': [],
'von': [],
'last': [r'{\NTG{} \TeX{} future working group}'],
'jr': [],
},
),
(
r'{{\LaTeX\,3} Project Team}',
{
'first': [],
'von': [],
'last': [r'{{\LaTeX\,3} Project Team}'],
'jr': [],
},
),
(
r'Johansen Kyle, Derik Mamania M.',
{
'first': ['Derik', 'Mamania', 'M.'],
'von': [],
'last': ['Johansen', 'Kyle'],
'jr': [],
},
),
(
r"Johannes Adam Ferdinand Alois Josef Maria Marko d'Aviano Pius von und zu Liechtenstein",
{
'first': [
'Johannes',
'Adam',
'Ferdinand',
'Alois',
'Josef',
'Maria',
'Marko',
],
'von': ["d'Aviano", 'Pius', 'von', 'und', 'zu'],
'last': ['Liechtenstein'],
'jr': [],
},
),
(
r"Brand\~{a}o, F",
{'first': ['F'], 'von': [], 'last': ['Brand\\', '{a}o'], 'jr': []},
),
)
if __name__ == '__main__':
unittest.main()
| 27.174648
| 98
| 0.338171
| 6,221
| 0.214955
| 0
| 0
| 0
| 0
| 0
| 0
| 12,530
| 0.43295
|
1801df02ecd58a8f78ca27f271870b89690c5eb0
| 1,349
|
py
|
Python
|
db_model.py
|
Build-Week-Saltiest-Hacker/machine-learning
|
1822e2ecdca8279bc49095f6da527152e298b95d
|
[
"MIT"
] | null | null | null |
db_model.py
|
Build-Week-Saltiest-Hacker/machine-learning
|
1822e2ecdca8279bc49095f6da527152e298b95d
|
[
"MIT"
] | null | null | null |
db_model.py
|
Build-Week-Saltiest-Hacker/machine-learning
|
1822e2ecdca8279bc49095f6da527152e298b95d
|
[
"MIT"
] | null | null | null |
# schema for SQL database
from data import app, db
class HNuser(db.Model):
""" SQL database class """
username = db.Column(db.String(100), primary_key=True)
post_id = db.Column(db.Integer)
salty_rank = db.Column(db.Float, nullable=False)
salty_comments = db.Column(db.Integer, nullable=False)
# comments_total = db.Column(db.Integer, nullable=False)
def __repr__(self)
return f"User {self.username} -- Salty Ranking: {self.salty_rank}"
def salty_hackers(self):
"""return user information in Json format """
return {
"username" : self.username,
"date" : self.date,
"salty_rank" : self.salty_rank,
"salty_comments" : self.salty_comments,
}
class Comments(db.Model):
comment_id = db.Column(db.BigInteger, primary_key=True)
username = db.Column(db.String(100), db.ForeignKey('user.username'))
text = db.Column(db.String(3000))
date = db.Column(db.BigInteger)
def __repr__(self):
return f"User {self.username} -- Comment: {self.text}"
def salty_comments(self):
""" returns comments in JSON format """
return {
"comment_id" : self.comment_id,
"username" : self.username,
"text" : self.text,
"date" : self.date
""
}
| 31.372093
| 74
| 0.604151
| 1,292
| 0.957746
| 0
| 0
| 0
| 0
| 0
| 0
| 392
| 0.290586
|
1804da1fa980c8e71b8a65bd6282db015d7cd076
| 2,608
|
py
|
Python
|
acl/utils.py
|
stjordanis/aspect-document-similarity
|
ca17e0a8730caa224b0efe8909b1e5a87bb456ea
|
[
"MIT"
] | 47
|
2020-10-14T09:28:39.000Z
|
2022-03-01T01:54:32.000Z
|
acl/utils.py
|
stjordanis/aspect-document-similarity
|
ca17e0a8730caa224b0efe8909b1e5a87bb456ea
|
[
"MIT"
] | 2
|
2021-11-21T20:07:10.000Z
|
2022-02-10T09:25:40.000Z
|
acl/utils.py
|
stjordanis/aspect-document-similarity
|
ca17e0a8730caa224b0efe8909b1e5a87bb456ea
|
[
"MIT"
] | 8
|
2020-11-07T08:43:01.000Z
|
2022-02-15T05:45:13.000Z
|
import re
import logging
logger = logging.getLogger(__name__)
def get_sorted_pair(a, b):
# ensure citation pair is always in same order
if a > b:
return (a, b)
else:
return (b, a)
def to_label(t, labels):
if t in labels:
return t
else:
return 'other'
def normalize_title(t):
if t:
t = t.replace('.', ' ').replace('-', ' ').strip().lower()
#t = re.sub(r'\W+', '', t)
return t
def normalize_section(title):
if title:
return re.sub(r'[\.0-9]', '',
title.
strip() \
.lower() \
.replace('conclusions', 'conclusion') \
.replace('methodology', 'method') \
.replace('methods', 'method') \
.replace('related works', 'related work') \
.replace('models', 'model') \
.replace('datasets', 'dataset') \
.replace('our ', '') \
.replace('evaluations', 'evaluation') \
.replace('experiments', 'experiment')
).strip()
# .replace('conclusion and future perspectives', 'conclusion')\
# .replace('materials and methods', 'methods')
def get_text_from_doc(doc) -> str:
"""
Build document text from title + abstract
:param doc: S2 paper
:return: Document text
"""
text = ''
if 'title' in doc:
text += doc['title']
if doc['abstract']:
text += '\n' + doc['abstract']
return text
def get_text_from_doc_id(doc_id: str, doc_index) -> str:
"""
Build document text from title + abstract
:param doc_id: S2-id
:param doc_index: S2-id to S2-paper data
:return: Document text
"""
if doc_id in doc_index:
return get_text_from_doc(doc_index[doc_id])
else:
raise ValueError(f'Document not found in index: {doc_id}')
# resolve 'and' titles and filter for out-of-index docs
def resolve_and_sect_titles(items, doc_index=None):
for from_s2_id, to_s2_id, sect_generic, sect_title, sect_marker in items:
if doc_index and (from_s2_id not in doc_index or to_s2_id not in doc_index):
# One of the IDs does not exist in document index
continue
sect_title = normalize_section(sect_title)
if sect_title:
# Resolve combined sections
for t in sect_title.split(' and '):
if t:
yield (from_s2_id, to_s2_id, t, sect_marker)
| 26.612245
| 84
| 0.536426
| 0
| 0
| 548
| 0.210123
| 0
| 0
| 0
| 0
| 880
| 0.337423
|
1805a8be23b715a568fd9d510dee5510be26a4d2
| 995
|
py
|
Python
|
build-a-django-content-aggregator/source_code_step_2/podcasts/tests.py
|
syberflea/materials
|
54f44725b40edf00c1b523d7a85b34a85014d7eb
|
[
"MIT"
] | 3,682
|
2018-05-07T19:45:24.000Z
|
2022-03-31T15:19:10.000Z
|
build-a-django-content-aggregator/source_code_step_2/podcasts/tests.py
|
sribarrow/materials
|
c17c4a4d6f8487e59eac1df8c88ca92b73d6d2a5
|
[
"MIT"
] | 148
|
2018-05-15T21:18:49.000Z
|
2022-03-21T11:25:39.000Z
|
build-a-django-content-aggregator/source_code_step_2/podcasts/tests.py
|
sribarrow/materials
|
c17c4a4d6f8487e59eac1df8c88ca92b73d6d2a5
|
[
"MIT"
] | 5,535
|
2018-05-25T23:36:08.000Z
|
2022-03-31T16:55:52.000Z
|
from django.test import TestCase
from django.utils import timezone
from .models import Episode
class PodCastsTests(TestCase):
def setUp(self):
self.episode = Episode.objects.create(
title="My Awesome Podcast Episode",
description="Look mom, I made it!",
pub_date=timezone.now(),
link="https://myawesomeshow.com",
image="https://image.myawesomeshow.com",
podcast_name="My Python Podcast",
guid="de194720-7b4c-49e2-a05f-432436d3fetr",
)
def test_episode_content(self):
self.assertEqual(self.episode.description, "Look mom, I made it!")
self.assertEqual(self.episode.link, "https://myawesomeshow.com")
self.assertEqual(
self.episode.guid, "de194720-7b4c-49e2-a05f-432436d3fetr"
)
def test_episode_str_representation(self):
self.assertEqual(
str(self.episode), "My Python Podcast: My Awesome Podcast Episode"
)
| 34.310345
| 78
| 0.639196
| 897
| 0.901508
| 0
| 0
| 0
| 0
| 0
| 0
| 301
| 0.302513
|
18062b275cb72a752756840a4bbb8ef63a17377e
| 4,114
|
py
|
Python
|
superset/migrations/versions/070c043f2fdb_add_granularity_to_charts_where_missing.py
|
razzius/superset
|
93f59e055e8312fb28687bc9fc22342b4be68d0e
|
[
"Apache-2.0"
] | 18,621
|
2017-06-19T09:57:44.000Z
|
2021-01-05T06:28:21.000Z
|
superset/migrations/versions/070c043f2fdb_add_granularity_to_charts_where_missing.py
|
changeiot/superset
|
299b5dc64448d04abe6b35ee85fbd2b938c781bc
|
[
"Apache-2.0"
] | 9,043
|
2017-07-05T16:10:48.000Z
|
2021-01-05T17:58:01.000Z
|
superset/migrations/versions/070c043f2fdb_add_granularity_to_charts_where_missing.py
|
changeiot/superset
|
299b5dc64448d04abe6b35ee85fbd2b938c781bc
|
[
"Apache-2.0"
] | 5,527
|
2017-07-06T01:39:43.000Z
|
2021-01-05T06:01:11.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add granularity to charts where missing
Revision ID: 070c043f2fdb
Revises: 41ce8799acc3
Create Date: 2021-02-04 09:34:13.608891
"""
# revision identifiers, used by Alembic.
revision = "070c043f2fdb"
down_revision = "41ce8799acc3"
import json
from alembic import op
from sqlalchemy import and_, Boolean, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db
Base = declarative_base()
class Slice(Base):
__tablename__ = "slices"
id = Column(Integer, primary_key=True)
params = Column(Text)
datasource_id = Column(Integer)
datasource_type = Column(String(200))
class SqlaTable(Base):
__tablename__ = "tables"
id = Column(Integer, primary_key=True)
main_dttm_col = Column(String(250))
class TableColumn(Base):
__tablename__ = "table_columns"
id = Column(Integer, primary_key=True)
table_id = Column(Integer)
is_dttm = Column(Boolean)
column_name = Column(String(255))
def upgrade():
"""
Adds the granularity param to charts without it populated. This is required for
time range filtering to work properly. Uses the following approach:
- Find all charts without a granularity or granularity_sqla param.
- Get the dataset that backs the chart.
- If the dataset has the main dttm column set, use it.
- Otherwise, find all the dttm columns in the dataset and use the first one (this
matches the behavior of Explore view on the frontend)
- If no dttm columns exist in the dataset, don't change the chart.
"""
bind = op.get_bind()
session = db.Session(bind=bind)
slices_changed = 0
for slc in (
session.query(Slice)
.filter(
and_(
Slice.datasource_type == "table", Slice.params.notlike('%"granularity%')
)
)
.all()
):
try:
params = json.loads(slc.params)
if "granularity" in params or "granularity_sqla" in params:
continue
table = session.query(SqlaTable).get(slc.datasource_id)
if not table:
continue
if table.main_dttm_col:
params["granularity"] = table.main_dttm_col
slc.params = json.dumps(params, sort_keys=True)
print(f"Set granularity for slice {slc.id} to {table.main_dttm_col}")
slices_changed += 1
continue
table_columns = (
session.query(TableColumn)
.filter(TableColumn.table_id == table.id)
.filter(TableColumn.is_dttm == True)
.all()
)
if len(table_columns):
params["granularity"] = table_columns[0].column_name
slc.params = json.dumps(params, sort_keys=True)
print(
f"Set granularity for slice {slc.id} to {table_columns[0].column_name}"
)
slices_changed += 1
except Exception as e:
print(e)
print(f"Parsing params for slice {slc.id} failed.")
pass
print(f"{slices_changed} slices altered")
session.commit()
session.close()
def downgrade():
"""
It's impossible to downgrade this migration.
"""
pass
| 30.474074
| 91
| 0.645357
| 533
| 0.129558
| 0
| 0
| 0
| 0
| 0
| 0
| 1,914
| 0.465241
|
1806f8b39a3aeab210ed874956e25e9bd4d01444
| 325
|
py
|
Python
|
AtCoder/ABC/B/page-13/090B.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/B/page-13/090B.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/B/page-13/090B.py
|
Nishi05/Competitive-programming
|
e59a6755b706d9d5c1f359f4511d92c114e6a94e
|
[
"MIT"
] | null | null | null |
# A 以上 B 以下の整数のうち、回文数となるものの個数を求めてください。
# ただし、回文数とは、先頭に 0 をつけない 10 進表記を文字列として見たとき、
# 前から読んでも後ろから読んでも同じ文字列となるような正の整数のことを指します。
# 文字列を逆順にして、同じ桁数の値をみる
a, b = map(int, input().split())
cnt = 0
for i in range(a, b+1):
s = str(i)
s_r = s[::-1]
n = int(len(str(s))/2)
if s[: n] == s_r[:n]:
cnt += 1
print(cnt)
| 23.214286
| 42
| 0.609231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 386
| 0.678383
|
18070effada07af1c287eb2501ebc5c7848149ff
| 2,499
|
py
|
Python
|
__init__.py
|
kotn3l/blender-flver
|
3476d720337a6d7a28bd55f9b112524c0f61581d
|
[
"MIT"
] | 11
|
2020-04-28T03:21:13.000Z
|
2022-03-23T13:18:33.000Z
|
__init__.py
|
kotn3l/blender-flver
|
3476d720337a6d7a28bd55f9b112524c0f61581d
|
[
"MIT"
] | 2
|
2021-06-28T07:44:42.000Z
|
2022-03-18T00:47:42.000Z
|
__init__.py
|
elizagamedev/blender-flver
|
25cc152de19acb4028035d3ed389706df25e094a
|
[
"MIT"
] | 2
|
2021-12-23T13:31:57.000Z
|
2022-03-16T06:30:13.000Z
|
bl_info = {
"name": "Import Fromsoft FLVER models",
"description":
"Import models from various Fromsoft games such as Dark Souls",
"author": "Eliza Velasquez",
"version": (0, 1, 0),
"blender": (2, 80, 0),
"category": "Import-Export",
"location": "File > Import",
"warning": "",
"support": "COMMUNITY",
"wiki_url": "", # TODO: wiki url
"tracker_url": "", # TODO: tracker url
}
_submodules = {
"importer",
"flver",
"reader",
}
# Reload submodules on addon reload
if "bpy" in locals():
import importlib
for submodule in _submodules:
if submodule in locals():
importlib.reload(locals()[submodule])
import bpy
from . import importer
from bpy_extras.io_utils import ImportHelper
from bpy.props import StringProperty, BoolProperty
class FlverImporter(bpy.types.Operator, ImportHelper):
bl_idname = "import_scene.flver"
bl_label = "Fromsoft (.flver)"
filter_glob = StringProperty(default="*.flver", options={"HIDDEN"})
transpose_y_and_z = BoolProperty(
name="Transpose Y and Z axes",
description=("This will correct the orientation of the model. " +
"Rarely necessary to disable."),
default=True)
import_skeleton = BoolProperty(
name="Import skeleton",
description=("Disable to prevent the creation of an Armature " +
"and corresponding vertex groups."),
default=True)
connect_bones = BoolProperty(
name="Connect bones",
description=(
"Disable to import disjointed bones rotated about their " +
"original Euler angles. This may be potentially desireable "
"for authoring derivative FLVER files."),
default=True)
def execute(self, context):
importer.run(context=context,
path=self.filepath,
transpose_y_and_z=self.transpose_y_and_z,
import_skeleton=self.import_skeleton,
connect_bones=self.connect_bones)
return {"FINISHED"}
def menu_import(self, context):
self.layout.operator(FlverImporter.bl_idname)
def register():
bpy.utils.register_class(FlverImporter)
bpy.types.TOPBAR_MT_file_import.append(menu_import)
def unregister():
bpy.types.TOPBAR_MT_file_import.remove(menu_import)
bpy.utils.unregister_class(FlverImporter)
| 30.47561
| 74
| 0.620648
| 1,306
| 0.522609
| 0
| 0
| 0
| 0
| 0
| 0
| 806
| 0.322529
|
1809819c2d6283b15f8fc4c9f611ea65d6e320d3
| 32,193
|
py
|
Python
|
plugin.video.vstream/resources/lib/gui/gui.py
|
akuala/REPO.KUALA
|
ea9a157025530d2ce8fa0d88431c46c5352e89d4
|
[
"Apache-2.0"
] | 2
|
2018-11-02T19:55:30.000Z
|
2020-08-14T02:22:20.000Z
|
plugin.video.vstream/resources/lib/gui/gui.py
|
akuala/REPO.KUALA
|
ea9a157025530d2ce8fa0d88431c46c5352e89d4
|
[
"Apache-2.0"
] | null | null | null |
plugin.video.vstream/resources/lib/gui/gui.py
|
akuala/REPO.KUALA
|
ea9a157025530d2ce8fa0d88431c46c5352e89d4
|
[
"Apache-2.0"
] | 3
|
2019-12-17T20:47:00.000Z
|
2021-02-11T19:03:59.000Z
|
# -*- coding: utf-8 -*-
# https://github.com/Kodi-vStream/venom-xbmc-addons
from resources.lib.gui.contextElement import cContextElement
from resources.lib.gui.guiElement import cGuiElement
from resources.lib.db import cDb
from resources.lib.handler.outputParameterHandler import cOutputParameterHandler
from resources.lib.handler.inputParameterHandler import cInputParameterHandler
from resources.lib.handler.pluginHandler import cPluginHandler
from resources.lib.parser import cParser
from resources.lib.util import cUtil, QuotePlus
from resources.lib.comaddon import listitem, addon, dialog, isKrypton, window, xbmc
import re, xbmcplugin
class cGui():
SITE_NAME = 'cGui'
CONTENT = 'files'
searchResults = []
# modif 22/06
listing = []
ADDON = addon()
if isKrypton():
CONTENT = 'addons'
def addMovie(self, sId, sFunction, sLabel, sIcon, sThumbnail, sDesc, oOutputParameterHandler = ''):
cGui.CONTENT = 'movies'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setPoster(sThumbnail)
oGuiElement.setMeta(1)
oGuiElement.setDescription(sDesc)
#oGuiElement.setMovieFanart()
oGuiElement.setCat(1)
if oOutputParameterHandler.getValue('sMovieTitle'):
sTitle = oOutputParameterHandler.getValue('sMovieTitle')
oGuiElement.setFileName(sTitle)
try:
self.addFolder(oGuiElement, oOutputParameterHandler)
except:
pass
# Coffret et integrale de films
def addMoviePack(self, sId, sFunction, sLabel, sIcon, sThumbnail, sDesc, oOutputParameterHandler = ''):
cGui.CONTENT = 'movies'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setPoster(sThumbnail)
oGuiElement.setMeta(3)
oGuiElement.setDescription(sDesc)
#oGuiElement.setMovieFanart()
oGuiElement.setCat(1)
if oOutputParameterHandler.getValue('sMovieTitle'):
sTitle = oOutputParameterHandler.getValue('sMovieTitle')
oGuiElement.setFileName(sTitle)
try:
self.addFolder(oGuiElement, oOutputParameterHandler)
except:
pass
def addTV(self, sId, sFunction, sLabel, sIcon, sThumbnail, sDesc, oOutputParameterHandler = ''):
cGui.CONTENT = 'tvshows'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setPoster(sThumbnail)
oGuiElement.setMeta(2)
oGuiElement.setDescription(sDesc)
#oGuiElement.setTvFanart()
oGuiElement.setCat(2)
if oOutputParameterHandler.getValue('sMovieTitle'):
sTitle = oOutputParameterHandler.getValue('sMovieTitle')
oGuiElement.setFileName(sTitle)
try:
self.addFolder(oGuiElement, oOutputParameterHandler)
except:
pass
def addMisc(self, sId, sFunction, sLabel, sIcon, sThumbnail, sDesc, oOutputParameterHandler = ''):
#cGui.CONTENT = 'movies'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setDescription(sDesc)
#oGuiElement.setPoster(sThumbnail)
oGuiElement.setMeta(0)
#oGuiElement.setDirFanart(sIcon)
oGuiElement.setCat(5)
if oOutputParameterHandler.getValue('sMovieTitle'):
sTitle = oOutputParameterHandler.getValue('sMovieTitle')
oGuiElement.setFileName(sTitle)
self.createContexMenuWatch(oGuiElement, oOutputParameterHandler)
#self.createContexMenuinfo(oGuiElement, oOutputParameterHandler)
self.createContexMenuFav(oGuiElement, oOutputParameterHandler)
try:
self.addFolder(oGuiElement, oOutputParameterHandler)
except:
pass
#non utiliser le 18/04
#def addFav(self, sId, sFunction, sLabel, sIcon, sThumbnail, fanart, oOutputParameterHandler = ''):
#cGui.CONTENT = 'files'
#oGuiElement = cGuiElement()
#oGuiElement.setSiteName(sId)
#oGuiElement.setFunction(sFunction)
#oGuiElement.setTitle(sLabel)
#oGuiElement.setIcon(sIcon)
#oGuiElement.setMeta(0)
#oGuiElement.setThumbnail(sThumbnail)
#oGuiElement.setFanart(fanart)
#self.createContexMenuDelFav(oGuiElement, oOutputParameterHandler)
#self.addFolder(oGuiElement, oOutputParameterHandler)
def addLink(self, sId, sFunction, sLabel, sThumbnail, sDesc, oOutputParameterHandler = ''):
cGui.CONTENT = 'files'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
#oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setPoster(sThumbnail)
oGuiElement.setDescription(sDesc)
oGuiElement.setMeta(0)
#oGuiElement.setDirFanart('')
oInputParameterHandler = cInputParameterHandler()
sCat = oInputParameterHandler.getValue('sCat')
if sCat:
oGuiElement.setCat(sCat)
try:
self.addFolder(oGuiElement, oOutputParameterHandler)
except:
pass
def addDir(self, sId, sFunction, sLabel, sIcon, oOutputParameterHandler = ''):
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(oGuiElement.getIcon())
oGuiElement.setMeta(0)
#oGuiElement.setDirFanart(sIcon)
oOutputParameterHandler.addParameter('sFav', sFunction)
# context parametre
if isKrypton():
self.createContexMenuSettings(oGuiElement, oOutputParameterHandler)
try:
self.addFolder(oGuiElement, oOutputParameterHandler)
except:
pass
def addNext(self, sId, sFunction, sLabel, oOutputParameterHandler):
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon('next.png')
oGuiElement.setThumbnail(oGuiElement.getIcon())
oGuiElement.setMeta(0)
#oGuiElement.setDirFanart('next.png')
oGuiElement.setCat(5)
self.createContexMenuPageSelect(oGuiElement, oOutputParameterHandler)
self.createContexMenuFav(oGuiElement, oOutputParameterHandler)
self.addFolder(oGuiElement, oOutputParameterHandler)
# utiliser oGui.addText(SITE_IDENTIFIER)
def addNone(self, sId):
return self.addText(sId)
def addText(self, sId, sLabel = '', sIcon = 'none.png'):
# Pas de texte lors des recherches globales
if window(10101).getProperty('search') == 'true':
return
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction('DoNothing')
if not sLabel:
sLabel = self.ADDON.VSlang(30204)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setThumbnail(oGuiElement.getIcon())
oGuiElement.setMeta(0)
oOutputParameterHandler = cOutputParameterHandler()
self.addFolder(oGuiElement, oOutputParameterHandler)
# non utiliser depuis le 22/04
def addMovieDB(self, sId, sFunction, sLabel, sIcon, sThumbnail, sFanart, oOutputParameterHandler = ''):
cGui.CONTENT = 'movies'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setMeta(1)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setFanart(sFanart)
oGuiElement.setCat(7)
if oOutputParameterHandler.getValue('sMovieTitle'):
sTitle = oOutputParameterHandler.getValue('sMovieTitle')
oGuiElement.setFileName(sTitle)
self.addFolder(oGuiElement, oOutputParameterHandler)
# non utiliser 22/04
def addTVDB(self, sId, sFunction, sLabel, sIcon, sThumbnail, sFanart, oOutputParameterHandler = ''):
cGui.CONTENT = 'tvshows'
oGuiElement = cGuiElement()
oGuiElement.setSiteName(sId)
oGuiElement.setFunction(sFunction)
oGuiElement.setTitle(sLabel)
oGuiElement.setIcon(sIcon)
oGuiElement.setMeta(2)
oGuiElement.setThumbnail(sThumbnail)
oGuiElement.setFanart(sFanart)
oGuiElement.setCat(7)
if oOutputParameterHandler.getValue('sMovieTitle'):
sTitle = oOutputParameterHandler.getValue('sMovieTitle')
oGuiElement.setFileName(sTitle)
self.addFolder(oGuiElement, oOutputParameterHandler)
# afficher les liens non playable
def addFolder(self, oGuiElement, oOutputParameterHandler = '', _isFolder = True):
# recherche append les reponses
if window(10101).getProperty('search') == 'true':
import copy
cGui.searchResults.append({'guiElement': oGuiElement, 'params': copy.deepcopy(oOutputParameterHandler)})
return
# Des infos a rajouter ?
params = {
'siteUrl': oGuiElement.setSiteUrl, # indispensable
'sTmdbId': oGuiElement.setTmdbId,
'sImbdId': oGuiElement.setImdbId, # inutile ?
'sYear': oGuiElement.setYear,
}
for sParam, callback in params.iteritems():
value = oOutputParameterHandler.getValue(sParam)
if value:
callback(value)
oListItem = self.createListItem(oGuiElement)
oListItem.setProperty('IsPlayable', 'false')
# affiche tag HD
# if '1080' in oGuiElement.getTitle():
# oListItem.addStreamInfo('video', {'aspect': '1.78', 'width': 1920, 'height': 1080})
# elif '720' in oGuiElement.getTitle():
# oListItem.addStreamInfo('video', {'aspect': '1.50', 'width': 1280, 'height': 720})
# elif '2160'in oGuiElement.getTitle():
# oListItem.addStreamInfo('video', {'aspect': '1.78', 'width': 3840, 'height': 2160})
# oListItem.addStreamInfo('audio', {'language': 'fr'})
# if oGuiElement.getMeta():
# oOutputParameterHandler.addParameter('sMeta', oGuiElement.getMeta())
if oGuiElement.getCat():
oOutputParameterHandler.addParameter('sCat', oGuiElement.getCat())
sItemUrl = self.__createItemUrl(oGuiElement, oOutputParameterHandler)
oOutputParameterHandler.addParameter('sTitleWatched', oGuiElement.getTitleWatched())
# new context prend en charge les metas
if oGuiElement.getMeta() > 0:
if cGui.CONTENT == 'movies':
self.createContexMenuWatch(oGuiElement, oOutputParameterHandler)
self.createContexMenuFav(oGuiElement, oOutputParameterHandler)
self.createContexMenuinfo(oGuiElement, oOutputParameterHandler)
self.createContexMenuba(oGuiElement, oOutputParameterHandler)
if self.ADDON.getSetting('bstoken') != '':
self.createContexMenuTrakt(oGuiElement, oOutputParameterHandler)
if self.ADDON.getSetting('tmdb_account') != '':
self.createContexMenuTMDB(oGuiElement, oOutputParameterHandler)
self.createContexMenuSimil(oGuiElement, oOutputParameterHandler)
elif cGui.CONTENT == 'tvshows':
self.createContexMenuWatch(oGuiElement, oOutputParameterHandler)
self.createContexMenuFav(oGuiElement, oOutputParameterHandler)
self.createContexMenuinfo(oGuiElement, oOutputParameterHandler)
self.createContexMenuba(oGuiElement, oOutputParameterHandler)
if self.ADDON.getSetting('bstoken') != '':
self.createContexMenuTrakt(oGuiElement, oOutputParameterHandler)
if self.ADDON.getSetting('tmdb_account') != '':
self.createContexMenuTMDB(oGuiElement, oOutputParameterHandler)
self.createContexMenuSimil(oGuiElement, oOutputParameterHandler)
oListItem = self.__createContextMenu(oGuiElement, oListItem)
#sPluginHandle = cPluginHandler().getPluginHandle()
# modif 22/06
#xbmcplugin.addDirectoryItem(sPluginHandle, sItemUrl, oListItem, isFolder=_isFolder)
self.listing.append((sItemUrl, oListItem, _isFolder))
def createListItem(self, oGuiElement):
oListItem = listitem(oGuiElement.getTitle())
oListItem.setInfo(oGuiElement.getType(), oGuiElement.getItemValues())
#oListItem.setThumbnailImage(oGuiElement.getThumbnail())
#oListItem.setIconImage(oGuiElement.getIcon())
# krypton et sont comportement
oListItem.setArt({'poster': oGuiElement.getPoster(), 'thumb': oGuiElement.getThumbnail(), 'icon': oGuiElement.getIcon(), 'fanart': oGuiElement.getFanart()})
aProperties = oGuiElement.getItemProperties()
for sPropertyKey in aProperties.keys():
oListItem.setProperty(sPropertyKey, aProperties[sPropertyKey])
return oListItem
# affiche les liens playable
def addHost(self, oGuiElement, oOutputParameterHandler = ''):
if isKrypton():
cGui.CONTENT = 'movies'
if oOutputParameterHandler.getValue('siteUrl'):
sSiteUrl = oOutputParameterHandler.getValue('siteUrl')
oGuiElement.setSiteUrl(sSiteUrl)
oListItem = self.createListItem(oGuiElement)
oListItem.setProperty('IsPlayable', 'true')
oListItem.setProperty('Video', 'true')
oListItem.addStreamInfo('video', {})
sItemUrl = self.__createItemUrl(oGuiElement, oOutputParameterHandler)
oOutputParameterHandler.addParameter('sTitleWatched', oGuiElement.getTitleWatched())
self.createContexMenuWatch(oGuiElement, oOutputParameterHandler)
oListItem = self.__createContextMenu(oGuiElement, oListItem)
# sPluginHandle = cPluginHandler().getPluginHandle()
# modif 13/09
#xbmcplugin.addDirectoryItem(sPluginHandle, sItemUrl, oListItem, isFolder=False)
self.listing.append((sItemUrl, oListItem, False))
#Marquer vu/Non vu
def createContexMenuWatch(self, oGuiElement, oOutputParameterHandler= ''):
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cGui', oGuiElement.getSiteName(), 'setWatched', self.ADDON.VSlang(30206))
def createContexMenuPageSelect(self, oGuiElement, oOutputParameterHandler):
#sSiteUrl = oGuiElement.getSiteName()
oContext = cContextElement()
oContext.setFile('cGui')
oContext.setSiteName('cGui')
oContext.setFunction('selectpage')
oContext.setTitle(self.ADDON.VSlang(30017))
oOutputParameterHandler.addParameter('OldFunction', oGuiElement.getFunction())
oOutputParameterHandler.addParameter('sId', oGuiElement.getSiteName())
oContext.setOutputParameterHandler(oOutputParameterHandler)
oGuiElement.addContextItem(oContext)
oContext = cContextElement()
oContext.setFile('cGui')
oContext.setSiteName('cGui')
oContext.setFunction('viewback')
oContext.setTitle(self.ADDON.VSlang(30018))
oOutputParameterHandler.addParameter('sId', oGuiElement.getSiteName())
oContext.setOutputParameterHandler(oOutputParameterHandler)
oGuiElement.addContextItem(oContext)
#marque page
def createContexMenuFav(self, oGuiElement, oOutputParameterHandler = ''):
oOutputParameterHandler.addParameter('sId', oGuiElement.getSiteName())
oOutputParameterHandler.addParameter('sFav', oGuiElement.getFunction())
oOutputParameterHandler.addParameter('sCat', oGuiElement.getCat())
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cFav', 'cFav', 'setBookmark', self.ADDON.VSlang(30210))
def createContexMenuTrakt(self, oGuiElement, oOutputParameterHandler= ''):
oOutputParameterHandler.addParameter('sImdbId', oGuiElement.getImdbId())
oOutputParameterHandler.addParameter('sTmdbId', oGuiElement.getTmdbId())
oOutputParameterHandler.addParameter('sFileName', oGuiElement.getFileName())
sType = cGui.CONTENT.replace('tvshows', 'shows')
oOutputParameterHandler.addParameter('sType', sType)
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cTrakt', 'cTrakt', 'getAction', self.ADDON.VSlang(30214))
def createContexMenuTMDB(self, oGuiElement, oOutputParameterHandler = ''):
oOutputParameterHandler.addParameter('sImdbId', oGuiElement.getImdbId())
oOutputParameterHandler.addParameter('sTmdbId', oGuiElement.getTmdbId())
oOutputParameterHandler.addParameter('sFileName', oGuiElement.getFileName())
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'themoviedb_org', 'themoviedb_org', 'getAction', 'TMDB')
def createContexMenuDownload(self, oGuiElement, oOutputParameterHandler = '', status = '0'):
if status == '0':
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cDownload', 'cDownload', 'StartDownloadOneFile', self.ADDON.VSlang(30215))
if status == '0' or status == '2':
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cDownload', 'cDownload', 'delDownload', self.ADDON.VSlang(30216))
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cDownload', 'cDownload', 'DelFile', self.ADDON.VSlang(30217))
if status == '1':
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cDownload', 'cDownload', 'StopDownloadList', self.ADDON.VSlang(30218))
if status == '2':
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cDownload', 'cDownload', 'ReadDownload', self.ADDON.VSlang(30219))
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cDownload', 'cDownload', 'ResetDownload', self.ADDON.VSlang(30220))
# Information
def createContexMenuinfo(self, oGuiElement, oOutputParameterHandler = ''):
oOutputParameterHandler = cOutputParameterHandler()
oOutputParameterHandler.addParameter('sTitle', oGuiElement.getTitle())
oOutputParameterHandler.addParameter('sFileName', oGuiElement.getFileName())
oOutputParameterHandler.addParameter('sId', oGuiElement.getSiteName())
oOutputParameterHandler.addParameter('sMeta', oGuiElement.getMeta())
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cGui', oGuiElement.getSiteName(), 'viewinfo', self.ADDON.VSlang(30208))
# Bande annonce
def createContexMenuba(self, oGuiElement, oOutputParameterHandler = ''):
oOutputParameterHandler = cOutputParameterHandler()
oOutputParameterHandler.addParameter('sTitle', oGuiElement.getTitle())
oOutputParameterHandler.addParameter('sFileName', oGuiElement.getFileName())
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cGui', oGuiElement.getSiteName(), 'viewBA', self.ADDON.VSlang(30212))
# Recherche similaire
def createContexMenuSimil(self, oGuiElement, oOutputParameterHandler = ''):
oOutputParameterHandler = cOutputParameterHandler()
oOutputParameterHandler.addParameter('sFileName', oGuiElement.getFileName())
oOutputParameterHandler.addParameter('sTitle', oGuiElement.getTitle())
oOutputParameterHandler.addParameter('sCat', oGuiElement.getCat())
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cGui', oGuiElement.getSiteName(), 'viewsimil', self.ADDON.VSlang(30213))
def CreateSimpleMenu(self, oGuiElement, oOutputParameterHandler, sFile, sName, sFunction, sTitle):
oContext = cContextElement()
oContext.setFile(sFile)
oContext.setSiteName(sName)
oContext.setFunction(sFunction)
oContext.setTitle(sTitle)
oContext.setOutputParameterHandler(oOutputParameterHandler)
oGuiElement.addContextItem(oContext)
def createContexMenuDelFav(self, oGuiElement, oOutputParameterHandler = ''):
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'cFav', 'cFav', 'delBookmarksMenu', self.ADDON.VSlang(30209))
def createContexMenuSettings(self, oGuiElement, oOutputParameterHandler = ''):
self.CreateSimpleMenu(oGuiElement, oOutputParameterHandler, 'globalParametre', 'globalParametre', 'opensetting', self.ADDON.VSlang(30023))
def __createContextMenu(self, oGuiElement, oListItem):
sPluginPath = cPluginHandler().getPluginPath()
aContextMenus = []
# Menus classiques reglés a la base
if len(oGuiElement.getContextItems()) > 0:
for oContextItem in oGuiElement.getContextItems():
oOutputParameterHandler = oContextItem.getOutputParameterHandler()
sParams = oOutputParameterHandler.getParameterAsUri()
sTest = '%s?site=%s&function=%s&%s' % (sPluginPath, oContextItem.getFile(), oContextItem.getFunction(), sParams)
aContextMenus += [(oContextItem.getTitle(), 'XBMC.RunPlugin(%s)' % (sTest, ), )]
oListItem.addContextMenuItems(aContextMenus, True)
return oListItem
def __ContextMenu(self, oGuiElement, oListItem):
sPluginPath = cPluginHandler().getPluginPath()
aContextMenus = []
if len(oGuiElement.getContextItems()) > 0:
for oContextItem in oGuiElement.getContextItems():
oOutputParameterHandler = oContextItem.getOutputParameterHandler()
sParams = oOutputParameterHandler.getParameterAsUri()
sTest = '%s?site=%s&function=%s&%s' % (sPluginPath, oContextItem.getFile(), oContextItem.getFunction(), sParams)
aContextMenus += [(oContextItem.getTitle(), 'XBMC.RunPlugin(%s)' % (sTest, ), )]
oListItem.addContextMenuItems(aContextMenus)
#oListItem.addContextMenuItems(aContextMenus, True)
return oListItem
def __ContextMenuPlay(self, oGuiElement, oListItem):
sPluginPath = cPluginHandler().getPluginPath()
aContextMenus = []
if len(oGuiElement.getContextItems()) > 0:
for oContextItem in oGuiElement.getContextItems():
oOutputParameterHandler = oContextItem.getOutputParameterHandler()
sParams = oOutputParameterHandler.getParameterAsUri()
sTest = '%s?site=%s&function=%s&%s' % (sPluginPath, oContextItem.getFile(), oContextItem.getFunction(), sParams)
aContextMenus += [(oContextItem.getTitle(), 'XBMC.RunPlugin(%s)' % (sTest, ), )]
oListItem.addContextMenuItems(aContextMenus)
#oListItem.addContextMenuItems(aContextMenus, True)
return oListItem
def setEndOfDirectory(self, ForceViewMode = False):
iHandler = cPluginHandler().getPluginHandle()
# modif 22/06
if not self.listing:
self.addText('cGui')
xbmcplugin.addDirectoryItems(iHandler, self.listing, len(self.listing))
xbmcplugin.setPluginCategory(iHandler, '')
xbmcplugin.setContent(iHandler, cGui.CONTENT)
xbmcplugin.addSortMethod(iHandler, xbmcplugin.SORT_METHOD_NONE)
xbmcplugin.endOfDirectory(iHandler, succeeded = True, cacheToDisc = True)
# reglage vue
# 50 = liste / 51 grande liste / 500 icone / 501 gallerie / 508 fanart /
if ForceViewMode:
xbmc.executebuiltin('Container.SetViewMode(' + str(ForceViewMode) + ')')
else:
if self.ADDON.getSetting('active-view') == 'true':
if cGui.CONTENT == 'movies':
#xbmc.executebuiltin('Container.SetViewMode(507)')
xbmc.executebuiltin('Container.SetViewMode(%s)' % self.ADDON.getSetting('movie-view'))
elif cGui.CONTENT == 'tvshows':
xbmc.executebuiltin('Container.SetViewMode(%s)' % self.ADDON.getSetting('serie-view'))
elif cGui.CONTENT == 'files':
xbmc.executebuiltin('Container.SetViewMode(%s)' % self.ADDON.getSetting('default-view'))
# bug affichage Kodi 18
del self.listing [:]
def updateDirectory(self):
xbmc.executebuiltin('Container.Refresh')
def viewback(self):
sPluginPath = cPluginHandler().getPluginPath()
oInputParameterHandler = cInputParameterHandler()
# sParams = oInputParameterHandler.getAllParameter()
sId = oInputParameterHandler.getValue('sId')
sTest = '%s?site=%s' % (sPluginPath, sId)
xbmc.executebuiltin('XBMC.Container.Update(%s, replace)' % sTest)
def viewsimil(self):
sPluginPath = cPluginHandler().getPluginPath()
oInputParameterHandler = cInputParameterHandler()
# sFileName = oInputParameterHandler.getValue('sFileName')
sTitle = oInputParameterHandler.getValue('sTitle')
sCat = oInputParameterHandler.getValue('sCat')
oOutputParameterHandler = cOutputParameterHandler()
#oOutputParameterHandler.addParameter('searchtext', sFileName)
oOutputParameterHandler.addParameter('searchtext', cUtil().CleanName(sTitle))
oOutputParameterHandler.addParameter('sCat', sCat)
oOutputParameterHandler.addParameter('readdb', 'False')
sParams = oOutputParameterHandler.getParameterAsUri()
sTest = '%s?site=%s&function=%s&%s' % (sPluginPath, 'globalSearch', 'globalSearch', sParams)
xbmc.executebuiltin('XBMC.Container.Update(%s)' % sTest)
return False
def selectpage(self):
sPluginPath = cPluginHandler().getPluginPath()
oInputParameterHandler = cInputParameterHandler()
#sParams = oInputParameterHandler.getAllParameter()
sId = oInputParameterHandler.getValue('sId')
sFunction = oInputParameterHandler.getValue('OldFunction')
siteUrl = oInputParameterHandler.getValue('siteUrl')
oParser = cParser()
oldNum = oParser.getNumberFromString(siteUrl)
newNum = 0
if oldNum:
newNum = self.showNumBoard()
if newNum:
try:
siteUrl = siteUrl.replace(oldNum, newNum)
oOutputParameterHandler = cOutputParameterHandler()
oOutputParameterHandler.addParameter('siteUrl', siteUrl)
sParams = oOutputParameterHandler.getParameterAsUri()
sTest = '%s?site=%s&function=%s&%s' % (sPluginPath, sId, sFunction, sParams)
xbmc.executebuiltin('XBMC.Container.Update(%s)' % sTest)
except:
return False
return False
def selectpage2(self):
sPluginPath = cPluginHandler().getPluginPath()
oInputParameterHandler = cInputParameterHandler()
sId = oInputParameterHandler.getValue('sId')
sFunction = oInputParameterHandler.getValue('OldFunction')
siteUrl = oInputParameterHandler.getValue('siteUrl')
# sParams = oInputParameterHandler.getAllParameter()
selpage = self.showNumBoard()
oOutputParameterHandler = cOutputParameterHandler()
oOutputParameterHandler.addParameter('siteUrl', siteUrl)
oOutputParameterHandler.addParameter('Selpage', selpage)
sParams = oOutputParameterHandler.getParameterAsUri()
sTest = '%s?site=%s&function=%s&%s' % (sPluginPath, sId, sFunction, sParams)
xbmc.executebuiltin('XBMC.Container.Update(%s, replace)' % sTest)
def setWatched(self):
if True:
#Use VStream database
oInputParameterHandler = cInputParameterHandler()
sSite = oInputParameterHandler.getValue('siteUrl')
sTitle = oInputParameterHandler.getValue('sTitleWatched')
if not sTitle:
return
meta = {}
meta['title'] = sTitle
meta['site'] = sSite
db = cDb()
row = db.get_watched(meta)
if row:
db.del_watched(meta)
db.del_resume(meta)
else:
db.insert_watched(meta)
# To test
#xbmc.executebuiltin('Container.Refresh')
else:
# Use kodi buildin feature
xbmc.executebuiltin('Action(ToggleWatched)')
# Not usefull ?
#xbmc.executebuiltin('Container.Refresh')
def viewBA(self):
oInputParameterHandler = cInputParameterHandler()
sFileName = oInputParameterHandler.getValue('sFileName')
from resources.lib.ba import cShowBA
cBA = cShowBA()
cBA.SetSearch(sFileName)
cBA.SearchBA()
def viewinfo(self):
from resources.lib.config import WindowsBoxes
# oGuiElement = cGuiElement()
oInputParameterHandler = cInputParameterHandler()
sTitle = oInputParameterHandler.getValue('sTitle')
# sId = oInputParameterHandler.getValue('sId')
sFileName = oInputParameterHandler.getValue('sFileName')
sMeta = oInputParameterHandler.getValue('sMeta')
sYear = oInputParameterHandler.getValue('sYear')
# sMeta = 1 >> film sMeta = 2 >> serie
sCleanTitle = cUtil().CleanName(sFileName)
# on vire saison et episode
if True: # sMeta == 2:
sCleanTitle = re.sub('(?i).pisode [0-9]+', '', sCleanTitle)
sCleanTitle = re.sub('(?i)saison [0-9]+', '', sCleanTitle)
sCleanTitle = re.sub('(?i)S[0-9]+E[0-9]+', '', sCleanTitle)
sCleanTitle = re.sub('(?i)[S|E][0-9]+', '', sCleanTitle)
ui = WindowsBoxes(sTitle, sCleanTitle, sMeta, sYear)
def __createItemUrl(self, oGuiElement, oOutputParameterHandler = ''):
if (oOutputParameterHandler == ''):
oOutputParameterHandler = cOutputParameterHandler()
sParams = oOutputParameterHandler.getParameterAsUri()
# cree une id unique
# if oGuiElement.getSiteUrl():
# print str(hash(oGuiElement.getSiteUrl()))
sPluginPath = cPluginHandler().getPluginPath()
if (len(oGuiElement.getFunction()) == 0):
sItemUrl = '%s?site=%s&title=%s&%s' % (sPluginPath, oGuiElement.getSiteName(), QuotePlus(oGuiElement.getCleanTitle()), sParams)
else:
sItemUrl = '%s?site=%s&function=%s&title=%s&%s' % (sPluginPath, oGuiElement.getSiteName(), oGuiElement.getFunction(), QuotePlus(oGuiElement.getCleanTitle()), sParams)
#print sItemUrl
return sItemUrl
def showKeyBoard(self, sDefaultText = '', heading = ''):
keyboard = xbmc.Keyboard(sDefaultText)
keyboard.setHeading(heading)
keyboard.doModal()
if (keyboard.isConfirmed()):
sSearchText = keyboard.getText()
if (len(sSearchText)) > 0:
return sSearchText
return False
def showNumBoard(self, sDefaultNum = ''):
dialogs = dialog()
numboard = dialogs.numeric(0, self.ADDON.VSlang(30019), sDefaultNum)
#numboard.doModal()
if numboard != None:
return numboard
return False
def openSettings(self):
return False
def showNofication(self, sTitle, iSeconds = 0):
return False
def showError(self, sTitle, sDescription, iSeconds = 0):
return False
def showInfo(self, sTitle, sDescription, iSeconds = 0):
return False
| 42.192661
| 178
| 0.671916
| 31,550
| 0.979996
| 0
| 0
| 0
| 0
| 0
| 0
| 5,767
| 0.179133
|
1809e4f7973197265ce5a6a201169c2856659885
| 1,555
|
py
|
Python
|
src/jt/rubicon/java/_typemanager.py
|
karpierz/jtypes.rubicon
|
8f8196e47de93183eb9728fec0d08725fc368ee0
|
[
"BSD-3-Clause"
] | 2
|
2018-11-29T06:19:05.000Z
|
2018-12-09T09:47:55.000Z
|
src/jt/rubicon/java/_typemanager.py
|
karpierz/jtypes.rubicon
|
8f8196e47de93183eb9728fec0d08725fc368ee0
|
[
"BSD-3-Clause"
] | null | null | null |
src/jt/rubicon/java/_typemanager.py
|
karpierz/jtypes.rubicon
|
8f8196e47de93183eb9728fec0d08725fc368ee0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2016-2019, Adam Karpierz
# Licensed under the BSD license
# http://opensource.org/licenses/BSD-3-Clause
from ...jvm.lib.compat import *
from ...jvm.lib import annotate
from ...jvm.lib import public
from ._typehandler import * # noqa
@public
class TypeManager(object):
__slots__ = ('_state', '_handlers')
def __init__(self, state=None):
super(TypeManager, self).__init__()
self._state = state
self._handlers = {}
def start(self):
self._register_handler(VoidHandler)
self._register_handler(BooleanHandler)
self._register_handler(CharHandler)
self._register_handler(ByteHandler)
self._register_handler(ShortHandler)
self._register_handler(IntHandler)
self._register_handler(LongHandler)
self._register_handler(FloatHandler)
self._register_handler(DoubleHandler)
self._register_handler(StringHandler)
def stop(self):
self._handlers = {}
def _register_handler(self, hcls):
thandler = hcls(self._state)
self._handlers[thandler._jclass] = thandler
return thandler
def get_handler(self, jclass):
thandler = self._handlers.get(jclass)
if thandler is None:
if not jclass.startswith("L"):
raise ValueError("Don't know how to convert argument with "
"type signature '{}'".format(jclass))
self._handlers[jclass] = thandler = ObjectHandler(self._state, jclass)
return thandler
| 28.272727
| 82
| 0.652733
| 1,292
| 0.830868
| 0
| 0
| 1,300
| 0.836013
| 0
| 0
| 208
| 0.133762
|
180ba7fe8e58c4e3cae590b1f061d367ca5c9d22
| 63,592
|
py
|
Python
|
rest/models.py
|
istarnes/restit
|
24d2805ab68696cab7718cc1164b7f716582ffb7
|
[
"0BSD"
] | null | null | null |
rest/models.py
|
istarnes/restit
|
24d2805ab68696cab7718cc1164b7f716582ffb7
|
[
"0BSD"
] | null | null | null |
rest/models.py
|
istarnes/restit
|
24d2805ab68696cab7718cc1164b7f716582ffb7
|
[
"0BSD"
] | null | null | null |
import os
from django.conf import settings
from django.core.exceptions import FieldDoesNotExist
from hashids import Hashids
import hashlib
import string
from datetime import datetime, date, timedelta
from decimal import Decimal
TWOPLACES = Decimal(10) ** -2
from django.db import models
from django.apps import apps
get_model = apps.get_model
from django.http import Http404
from django.core.exceptions import ValidationError
import threading
from rest import helpers as rest_helpers
from rest.uberdict import UberDict
from rest import search
from rest.privpub import PrivatePublicEncryption
import importlib
GRAPH_HELPERS = UberDict()
GRAPH_HELPERS.restGet = None
GRAPH_HELPERS.get_request = None
GRAPH_HELPERS.views = None
ENCRYPTER_KEY_FILE = os.path.join(settings.ROOT, "config", "encrypt_key.pem")
ENCRYPTER = None
if os.path.exists(ENCRYPTER_KEY_FILE):
ENCRYPTER = PrivatePublicEncryption(private_key_file=ENCRYPTER_KEY_FILE)
class RestError(Exception):
def __init__(self, reason, code=None):
self.reason = reason
self.code = code
def __repr__(self):
return self.reason
class PermisionDeniedException(RestError):
def __init__(self, reason="permission denied", code=401):
self.reason = reason
self.code = code
def requestHasPerms(request, perms, group=None):
if not request.user.is_authenticated:
return False, "auth required", 401
if not hasattr(request, 'member'):
request.member, request.group = request.user.__class__.getMemberGroup(request, False, False)
if request.member.hasPerm(perms):
return True, None, None
if group is None and hasattr(request, "group"):
group = request.group
if group and request.member.hasGroupPerm(group, perms):
return True, None, None
return False, "permission denied", 402
class MetaDataBase(models.Model):
class Meta:
abstract = True
category = models.CharField(db_index=True, max_length=32, default=None, null=True, blank=True)
key = models.CharField(db_index=True, max_length=80)
value_format = models.CharField(max_length=16)
value = models.TextField()
int_value = models.IntegerField(default=None, null=True, blank=True)
float_value = models.IntegerField(default=None, null=True, blank=True)
def setValue(self, value):
self.value = "{}".format(value)
if type(value) is int and value < 2147483647:
self.value_format = "I"
self.int_value = value
elif type(value) is float:
self.value_format = "F"
self.float_value = value
elif isinstance(value, list):
self.value_format = "L"
# self.value = ",".join(value)
elif isinstance(value, dict):
self.value_format = "O"
elif type(value) in [str, str] and len(value) < 9 and value.isdigit():
self.value_format = "I"
self.int_value = value
elif value in ["True", "true", "False", "false"]:
self.value_format = "I"
if value in ["True", "true"]:
self.int_value = 1
else:
self.int_value = 0
elif isinstance(value, bool):
self.value_format = "I"
if value:
self.int_value = 1
else:
self.int_value = 0
else:
self.value_format = "S"
def getStrictType(self, field_type):
if type(self.value) is field_type:
return self.value
if field_type in [int, str, float, str]:
return field_type(self.value)
elif field_type is bool:
return self.value in [True, 1, '1', 'y', 'Y', 'true']
elif field_type in [date, datetime]:
return rest_helpers.parseDate(self.value)
return self.value
def getValue(self, field_type=None):
if field_type:
return self.getStrictType(field_type)
elif self.value_format == 'I':
return self.int_value
elif self.value_format == 'F':
return self.float_value
elif self.value_format in ["L", "O"] and self.value:
try:
return eval(self.value)
except:
pass
return self.value
def __unicode__(self):
if self.category:
return "{}.{}={}".format(self.category, self.key, self.value)
return "{}={}".format(self.key, self.value)
def __str__(self):
if self.category:
return "{}.{}={}".format(self.category, self.key, self.value)
return "{}={}".format(self.key, self.value)
class MetaDataModel(object):
def set_metadata(self, request, values=None):
# this may get called before the model is saved
if not self.id:
self.save()
if values is None:
values = request
request = None
if not isinstance(values, dict):
raise Exception("invalid metadata: {}".format(values))
for key, value in list(values.items()):
cat = None
if "." in key:
cat, key = key.split('.')
self.setProperty(key, value, cat, request=request)
def metadata(self):
return self.getProperties()
def removeProperties(self, category=None):
# this will remove all properties
# if category is not it will remove all properties
self.properties.filter(category=category).delete()
def getProperties(self, category=None):
ret = {}
for p in self.properties.all():
if p.category:
props = self.getFieldProps(p.category)
if props.hidden:
continue
if p.category not in ret or not isinstance(ret.get(p.category, None), dict):
ret[p.category] = {}
props = self.getFieldProps("{}.{}".format(p.category, p.key))
if props.hidden:
continue
ret[p.category][p.key] = p.getValue()
else:
props = self.getFieldProps(p.key)
if props.hidden:
continue
ret[p.key] = p.getValue()
if category is not None:
if category in ret:
return ret[category]
return {}
return ret
def __initFieldProps(self):
if not hasattr(self, "__field_props"):
if hasattr(self.RestMeta, "METADATA_FIELD_PROPERTIES"):
# this provides extra protection for metadata fields
self.__field_props = self.RestMeta.METADATA_FIELD_PROPERTIES
else:
self.__field_props = None
def getFieldProps(self, key):
self.__initFieldProps()
full_key = key
category = None
if "." in key:
category, key = key.split('.')
props = UberDict()
if self.__field_props:
if category and self.__field_props.get(category, None):
cat_props = self.__field_props.get(category, None)
if cat_props:
props.notify = cat_props.get("notify", None)
props.on_change_name = cat_props.get("on_change", None)
props.hidden = cat_props.get("hidden", False)
if props.on_change_name:
props.on_change = getattr(self, props.on_change_name, None)
field_props = self.__field_props.get(full_key, None)
if field_props:
props.notify = field_props.get("notify", props.notify)
props.requires = field_props.get("requires", None)
props.hidden = field_props.get("hidden", False)
on_change_name = field_props.get("on_change", None)
if on_change_name:
on_change = getattr(self, on_change_name, None)
if on_change:
props.on_change = on_change
return props
def checkFieldPerms(self, full_key, props, request=None):
if not props.requires:
return True
if not request or not request.member:
return False
if request.member.hasPermission(props.requires) or request.user.is_superuser:
return True
# this a unauthorized attempt to change field, log and throw exception
if props.notify and request.member:
subject = "permission denied changing protected '{}' field".format(full_key)
msg = "permission denied changing protected field '{}'\nby user: {}\nfor: {}".format(
full_key,
request.user.username,
self
)
request.member.notifyWithPermission(props.notify, subject, msg, email_only=True)
raise PermisionDeniedException(subject)
def setProperties(self, data, category=None, request=None, using=None):
for k,v in list(data.items()):
self.setProperty(k, v, category, request=request, using=using)
def setProperty(self, key, value, category=None, request=None, using=None):
# rest_helpers.log_print("{}:{} ({})".format(key, value, type(value)))
on_change = None
if not using:
using = getattr(self.RestMeta, "DATABASE", using)
if not request:
request = RestModel.getActiveRequest()
self.__initFieldProps()
if isinstance(value, dict):
return self.setProperties(value, key)
username = "root"
if request and request.member:
username = request.member.username
prop = None
if "." in key:
category, key = key.split('.')
if category:
# delete any keys with this category name
full_key = "{}.{}".format(category, key)
# this deletes anything with the key that matches the category
# this works because the category is stored not in key but category field
# rest_helpers.log_print("deleting key={}".format(category))
self.properties.filter(key=category).delete()
else:
full_key = key
field_props = self.getFieldProps(full_key)
if not self.checkFieldPerms(full_key, field_props, request):
return False
check_value = "{}".format(value)
has_changed = False
prop = self.properties.filter(category=category, key=key).last()
old_value = None
if prop:
# existing property we need to make sure we delete
old_value = prop.getValue()
if value is None or value == "":
prop.delete()
has_changed = True
else:
has_changed = check_value != prop.value
if not has_changed:
return
prop.setValue(value)
prop.save(using=using)
if field_props.on_change:
field_props.on_change(key, value, old_value, category)
elif value is None or value == "":
# do not create none or empty property
return False
else:
has_changed = True
PropClass = self.get_fk_model("properties")
prop = PropClass(parent=self, key=key, category=category)
prop.setValue(value)
# rest_helpers.log_print(u"saving {}.{}".format(category, key))
# rest_helpers.log_print(u"saving {} : {}".format(full_key, value))
prop.save(using=using)
if hasattr(self, "_recordRestChange"):
self._recordRestChange("metadata.{}".format(full_key), old_value)
if field_props.notify and request and request.member:
notify = field_props.get("notify")
msg = "protected field '{}' changed to '{}'\nby user: {}\nfor: {}".format(
full_key,
value,
username,
self
)
request.member.notifyWithPermission(notify, "protected '{}' field changed".format(full_key), msg, email_only=True)
return has_changed
def getProperty(self, key, default=None, category=None, field_type=None):
try:
if "." in key:
category, key = key.split('.')
return self.properties.get(category=category, key=key).getValue(field_type)
except:
pass
return default
class RestValidationError(RestError):
pass
class RestModel(object):
class __RestMeta__:
NO_SAVE_FIELDS = ["uuid", "id", "pk", "created", "modified"]
NO_SHOW_FIELDS = ["password"]
WHITELISTED = ["merchant", "group", "user", "member", "terminal"]
class RestMeta:
NO_SAVE_FIELDS = []
SAVE_FIELDS = []
GRAPHS = {}
@staticmethod
def generateUUID(*args, **kwargs):
upper = kwargs.get("upper", True)
max_length = kwargs.get("max_length", None)
uuid = ""
for key in args:
if isinstance(key, float):
key = str(float)
if isinstance(key, int):
uuid += Hashids().encrypt(key)
if isinstance(key, str):
uuid += rest_helpers.toString(hashlib.md5(rest_helpers.toBytes(key)).hexdigest())
if len(uuid) > 125:
uuid = uuid[:125]
if max_length != None:
uuid = uuid[:max_length]
if upper:
return uuid.upper()
return uuid
@classmethod
def buildGraph(cls, name):
# we need to build it
if hasattr(cls.RestMeta, "GRAPHS"):
graphs = cls.RestMeta.GRAPHS
if name in graphs:
graph = graphs[name]
else:
graph = {}
else:
graph = {}
if "no_uscore" not in graph:
graph["no_uscore"] = False
no_show_fields = RestModel.__RestMeta__.NO_SHOW_FIELDS
if hasattr(cls.RestMeta, "NO_SHOW_FIELDS"):
no_show_fields = cls.RestMeta.NO_SHOW_FIELDS
field_names = []
for f in cls._meta.fields:
if not f.name.endswith("_ptr"):
if f.name not in no_show_fields:
field_names.append(f.name)
if "graphs" in graph:
if "recurse_into" not in graph:
graph["recurse_into"] = []
if "fields" in graph:
graph["fields"] = graph["fields"]
elif "fields" not in graph and "self" in graph["graphs"]:
graph["fields"] = []
else:
graph["fields"] = field_names
for field in graph["graphs"]:
gname = graph["graphs"][field]
size = None
ForeignModel = None
sort = None
if field.startswith("generic__"):
if field not in graph["recurse_into"]:
graph["recurse_into"].append((field, gname))
continue
if isinstance(gname, dict):
size = gname.get("size")
sort = gname.get("sort")
fm_name = gname.get("model")
gname = gname.get("graph")
if not gname:
gname = "default"
if fm_name:
a_name, m_name = fm_name.split(".")
ForeignModel = RestModel.getModel(a_name, m_name)
if not field or field == "self":
# this means it is referencing self
foreign_graph = cls.buildGraph(gname)
for part in foreign_graph:
if part not in graph:
graph[part] = foreign_graph[part]
else:
for f in foreign_graph[part]:
if f not in graph[part]:
graph[part].append(f)
# graph[part] += foreign_graph[part]
continue
# print "get FK: {0}".format(field)
if not ForeignModel:
ForeignModel = cls.get_fk_model(field)
if not ForeignModel:
rest_helpers.log_print("no foreignkey: {0}".format(field))
continue
# print ForeignModel
# print graph["recurse_into"]
# print graph["recurse_into"]
if field not in graph["recurse_into"]:
graph["recurse_into"].append(field)
# print ForeignModel
# if not hasattr(ForeignModel, "getGraph"):
# foreign_graph = {}
# foreign_graph["fields"] = []
# for f in ForeignModel._meta.fields:
# if f.name not in RestModel.__RestMeta__.NO_SHOW_FIELDS:
# foreign_graph["fields"].append(f.name)
# print ForeignModel
# print foreign_graph["fields"]
# else:
if not hasattr(ForeignModel, "getGraph"):
# print "NO getGraph"
continue
# print "getting graph: {0} for {1}".format(gname, field)
foreign_graph = ForeignModel.getGraph(gname)
# print foreign_graph
for part in ["fields", "recurse_into", "extra", "exclude"]:
if part not in foreign_graph:
continue
graph_part = foreign_graph[part]
if part not in graph:
graph[part] = []
root_part = graph[part]
for f in graph_part:
if type(f) is tuple:
f1, f2 = f
nfname = ("{0}.{1}".format(field, f1), f2)
elif graph["no_uscore"] and '_' in f:
f1, f2 = f, f.replace('_', '').split('.')[-1]
# print field
# print f2
nfname = ("{0}.{1}".format(field, f1), f2)
else:
nfname = "{0}.{1}".format(field, f)
if nfname not in root_part:
root_part.append(nfname)
del graph["graphs"]
if "fields" not in graph:
if graph["no_uscore"]:
graph["fields"] = []
for f in field_names:
if "_" in f:
f1, f2 = f, f.lower().replace('_', '')
# print "noscore"
# print f1
# print f2
graph["fields"].append((f1, f2))
else:
graph["fields"].append(f)
else:
graph["fields"] = field_names
if "no_uscore" in graph:
del graph["no_uscore"]
return graph
@classmethod
def ro_objects(cls):
using = getattr(cls.RestMeta, "RO_DATABASE", None)
if using is None:
using = getattr(cls.RestMeta, "DATABASE", None)
# if using is None:
# if settings.DATABASES.get("readonly", None) != None:
# using = "readonly"
if using:
return cls.objects.using(using)
return cls.objects
@classmethod
def rw_objects(cls):
using = getattr(cls.RestMeta, "DATABASE", None)
if using:
return cls.objects.using(using)
return cls.objects
def safeSave(self, **kwargs):
using = getattr(self.RestMeta, "DATABASE", None)
if using:
return self.save(using=using, **kwargs)
return self.save(**kwargs)
@classmethod
def getGraph(cls, name):
graph_key = "_graph_{0}__".format(name)
if hasattr(cls, graph_key):
return getattr(cls, graph_key)
if not hasattr(cls, "_lock__"):
cls._lock__ = threading.RLock()
# cls._lock__.acquire()
# try:
graph = cls.buildGraph(name)
# print "-" * 80
# print "SETTING GRAPH {0} FOR {1}".format(name, cls.__name__)
# print graph
setattr(cls, graph_key, graph)
# print "." * 80
# except:
# pass
# cls._lock__.release()
return graph
def toGraph(self, request=None, graph="basic"):
RestModel._setupGraphHelpers()
if not request:
request = GRAPH_HELPERS.get_request()
return GRAPH_HELPERS.restGet(request, self, return_httpresponse=False, **self.getGraph(graph))
@classmethod
def getActiveLogger(cls):
return rest_helpers.getLogger(cls.getActiveRequest())
@classmethod
def getActiveMember(cls):
request = cls.getActiveRequest()
if request:
return request.member
return None
@classmethod
def getActiveRequest(cls):
if not GRAPH_HELPERS.get_request:
mw = importlib.import_module("rest.middleware")
GRAPH_HELPERS.get_request = mw.get_request
return GRAPH_HELPERS.get_request()
@classmethod
def getFromRequest(cls, request):
key = cls.__name__.lower()
key_p = "{0}_id".format(key)
lookup_fields = [key, key_p]
using = getattr(cls.RestMeta, "DATABASE", None)
for field in lookup_fields:
value = request.DATA.get(field)
if value:
if not using:
obj = cls.objects.filter(pk=value).first()
else:
obj = cls.objects.using(using).filter(pk=value).first()
if obj:
return obj
lookup_fields = getattr(cls.RestMeta, "UNIQUE_LOOKUP", [])
for field in lookup_fields:
value = request.DATA.get(field)
if value:
q = {}
q[field] = value
if not using:
obj = cls.objects.filter(**q).first()
else:
obj = cls.objects.using(using).filter(**q).first()
if obj:
return obj
return None
value = request.DATA.get(key_p)
if not value:
value = request.DATA.get(key)
if not value:
return None
if using:
return cls.objects.using(using).filter(pk=value).first()
return cls.objects.filter(pk=value).first()
@classmethod
def getFromPK(cls, pk):
using = getattr(cls.RestMeta, "DATABASE", None)
if using:
return cls.objects.using(using).filter(pk=pk).first()
return cls.objects.filter(pk=pk).first()
@classmethod
def restEncrypt(cls, data):
if ENCRYPTER:
return ENCRYPTER.encrypt(data)
return data
@staticmethod
def restGetModel(app_name, model_name):
return apps.get_model(app_name, model_name)
@staticmethod
def getModel(app_name, model_name):
return apps.get_model(app_name, model_name)
def restGetGenericModel(self, field):
# called by the rest module to magically parse
# a component that is marked genericrelation in a graph
if not hasattr(self, field):
rest_helpers.log_print("model has no field: {0}".format(field))
return None
name = getattr(self, field)
if not name or "." not in name:
return None
a_name, m_name = name.split(".")
model = RestModel.getModel(a_name, m_name)
if not model:
rest_helpers.log_print("GENERIC MODEL DOES NOT EXIST: {0}".format(name))
return model
def restGetGenericRelation(self, field):
# called by the rest module to magically parse
# a component that is marked genericrelation in a graph
GenericModel = self.restGetGenericModel(field)
if not GenericModel:
return None
key = getattr(self, "{0}_id".format(field))
return GenericModel.rw_objects().filter(pk=key).first()
@staticmethod
def restGetModelDB(Model, default=None):
if hasattr(Model, "RestMeta"):
return getattr(Model.RestMeta, "DATABASE", default)
return default
@property
def has_model_changed(self):
if hasattr(self, "_changed__"):
return len(self._changed__) > 0
return False
def saveFields(self, allow_null=True, **kwargs):
"""
Helper method to save a list of fields
"""
self._changed__ = UberDict()
for key, value in list(kwargs.items()):
if value is None and not allow_null:
continue
self.restSaveField(key, value)
if len(self._changed__):
self.save()
def restSaveField(self, fieldname, value, has_fields=False, has_no_fields=False, using=None):
if not hasattr(self, "_changed__"):
self._changed__ = UberDict()
if fieldname.startswith("_"):
return
if not hasattr(self, "_field_names__"):
self._field_names__ = [f.name for f in self._meta.get_fields()]
# print "saving field: {0} = {1}".format(fieldname, value)
if fieldname in RestModel.__RestMeta__.NO_SAVE_FIELDS:
return
if has_no_fields and fieldname in self.RestMeta.NO_SAVE_FIELDS:
return
if has_fields and fieldname not in self.RestMeta.SAVE_FIELDS:
return
if fieldname.endswith("_id") and not self.get_field_type(fieldname):
# django will have ForeignKeys with _id, we don't want that, on_delete=models.CASCADE
fieldname = fieldname[:-3]
setter = "set_{0}".format(fieldname)
if hasattr(self, setter):
getattr(self, setter)(value)
return
if fieldname in self._field_names__:
# TODO check if it is a function
if isinstance(value, models.Model):
setattr(self, fieldname, value)
self._changed__[fieldname] = True
return
ForeignModel = self.get_fk_model(fieldname)
if ForeignModel and isinstance(value, dict):
obj = getattr(self, fieldname, None)
if obj is None:
obj = ForeignModel()
if using is None:
using = self.restGetModelDB(self)
obj.saveFromDict(None, value, using=using)
# rest_helpers.log_print("{} vs {}".format(self._state.db, obj._state.db))
# rest_helpers.log_print("saving FK to {} ({}.{}) - {}".format(fieldname, using, obj.pk, type(obj)), value)
setattr(self, fieldname, obj)
self._changed__[fieldname] = True
return
elif ForeignModel and value and (type(value) is int or value.isdigit()):
# print "\tforeign model({2}) field: {0} = {1}".format(fieldname, value, ForeignModel.__class__.__name__)
value = int(value)
using = RestModel.restGetModelDB(ForeignModel)
if using:
value = ForeignModel.objects.using(using).filter(pk=value).first()
else:
value = ForeignModel.objects.filter(pk=value).first()
elif ForeignModel and "MediaItem" in ForeignModel.__name__:
if value:
self.saveMediaFile(value, fieldname, None, True)
return
elif ForeignModel and not value:
value = None
# maybe we could look for to_python here to make sure we have proper conversion
# thinking mainly around datetimes from epoch values
if not ForeignModel:
# field_model, model, direct, mm = self._meta.get_field_by_name(fieldname)
field_model = self._meta.get_field(fieldname)
# hack to handle save datetime fields correctly from floats
try:
if field_model and value != None:
field_model_name = field_model.__class__.__name__
if field_model_name == "DateTimeField":
value = rest_helpers.parseDateTime(value)
# value = datetime.fromtimestamp(float(value))
elif field_model_name == "DateField":
value = rest_helpers.parseDate(value, as_date=True)
elif field_model_name == "IntegerField":
value = int(value)
elif field_model_name == "FloatField":
value = float(value)
elif field_model_name == "CurrencyField":
value = Decimal(value).quantize(TWOPLACES)
elif field_model_name == "BooleanField":
if value in [True, 1, 'True', 'true', '1', 't', 'y', 'yes']:
value = True
else:
value = False
except Exception:
return
if hasattr(self, fieldname) and getattr(self, fieldname) != value:
self._changed__[fieldname] = getattr(self, fieldname)
setattr(self, fieldname, value)
# else:
# print "does not have field: {0}".format(fieldname)
def saveFromRequest(self, request, **kwargs):
if "files" not in kwargs:
kwargs["files"] = request.FILES
return self.saveFromDict(request, request.DATA, **kwargs)
def _recordRestChange(self, fieldname, old_value):
if not hasattr(self, "_changed__"):
self._changed__ = UberDict()
if "." in fieldname:
fields = fieldname.split('.')
root = self._changed__
for f in fields[:-1]:
if f not in root:
root[f] = UberDict()
root = root[f]
root[fields[-1]] = old_value
else:
self._changed__[fieldname] = old_value
def saveFromDict(self, request, data, files=None, **kwargs):
can_save = getattr(self.RestMeta, "CAN_SAVE", True)
if not can_save:
return self.restStatus(request, False, error="saving not allowed via rest for this model.")
# check check for save permissions
if request is None:
request = RestModel.getActiveRequest()
if request is None:
request = UberDict(member=None, FILES=[])
if hasattr(self, "onRestCanSave"):
# this should throw an error
self.onRestCanSave(request)
is_new = self.id is None
has_fields = hasattr(self.RestMeta, "SAVE_FIELDS") and len(self.RestMeta.SAVE_FIELDS)
has_no_fields = hasattr(self.RestMeta, "NO_SAVE_FIELDS") and len(self.RestMeta.NO_SAVE_FIELDS)
self._field_names__ = [f.name for f in self._meta.get_fields()]
# fix for multidatabase support and using readonly db for get
self._state.db = kwargs.get("using", self.restGetModelDB(self, "default"))
auto_save_fields = getattr(self.RestMeta, "AUTO_SAVE", None)
if auto_save_fields:
rest_helpers.log_print(auto_save_fields)
for field in auto_save_fields:
rest_helpers.log_print(field)
if isinstance(field, tuple):
m_field, req_field = field
else:
m_field = field
req_field = field
req_value = getattr(request, req_field, None)
if request and req_value:
data[m_field] = req_value
rest_helpers.log_print(data)
self._changed__ = UberDict()
if hasattr(self.RestMeta, "POST_SAVE_FIELDS"):
post_save_fields = self.RestMeta.POST_SAVE_FIELDS
else:
post_save_fields = []
using = kwargs.get("using", self.restGetModelDB(self))
deferred = {}
group_fields = {}
for fieldname in data:
# we allow override via kwargs
value = data.get(fieldname)
if "." in fieldname:
gname = fieldname[:fieldname.find('.')]
fname = fieldname[fieldname.find('.')+1:]
setter = "set_{0}".format(gname)
if hasattr(self, setter):
if gname not in group_fields:
group_fields[gname] = {}
group_fields[gname][fname] = value
continue
if fieldname in post_save_fields or fieldname.startswith("metadata"):
deferred[fieldname] = value
continue
if fieldname not in kwargs:
self.restSaveField(fieldname, value, has_fields, has_no_fields, using=using)
for key, value in list(kwargs.items()):
if key in post_save_fields:
deferred[fieldname] = value
continue
self.restSaveField(key, value, has_fields, has_no_fields, using=using)
self.restSaveFiles(request, files)
self.on_rest_pre_save(request)
self.save(using=using)
for key, value in list(deferred.items()):
self.restSaveField(key, value, has_fields, has_no_fields, using=using)
if len(deferred):
self.save(using=using)
# these setters are responsible for saving themselves
for gname in group_fields:
setter = "set_{0}".format(gname)
getattr(self, setter)(request, group_fields[gname])
if hasattr(self, "onSavedFromRequest"):
self.onSavedFromRequest(request, **kwargs)
elif not is_new:
self.on_rest_saved(request)
return self
def restSaveFiles(self, request, files=None):
if files is None:
files = request.FILES
for name in files:
key = "upload__{0}".format(name)
if hasattr(self, key):
getattr(self, key)(files[name], name)
else:
ForeignModel = self.get_fk_model(name)
if ForeignModel and ForeignModel.__name__ == "MediaItem":
rest_helpers.log_print("saving media file: {}".format(name))
self.saveMediaFile(files[name], name)
def changesFromDict(self, data):
deltas = []
field_names = [f.name for f in self._meta.get_fields()]
for key in data:
if key not in field_names:
continue
# we allow override via kwargs
value = data.get(key)
# field_model, model, direct, mm = self._meta.get_field_by_name(key)
field_model = self._meta.get_field(key)
# hack to handle save datetime fields correctly from floats
try:
if field_model and value != None:
field_model_name = field_model.__class__.__name__
if field_model_name == "DateTimeField":
value = datetime.fromtimestamp(float(value))
elif field_model_name == "DateField":
value = rest_helpers.parseDate(value)
elif field_model_name == "IntegerField":
value = int(value)
elif field_model_name == "FloatField":
value = float(value)
elif field_model_name == "CurrencyField":
value = Decimal(value).quantize(TWOPLACES)
if hasattr(self, key) and getattr(self, key) != value:
deltas.append(key)
except:
pass
return deltas
def copyFieldsFrom(self, obj, fields):
for f in fields:
if hasattr(self, f):
setattr(self, f, getattr(obj, f))
def saveMediaFile(self, file, name, file_name=None, is_base64=False, group=None):
"""
Generic method to save a media file
"""
if file_name is None:
file_name = name
MediaItem = RestModel.getModel("medialib", "MediaItem")
# make sure we set the name base64_data
if is_base64:
mi = MediaItem(name=file_name, base64_data=file, group=group)
elif type(file) in [str, str] and (file.startswith("https:") or file.startswith("http:")):
mi = MediaItem(name=name, downloadurl=file, group=group)
else:
mi = MediaItem(name=name, newfile=file, group=group)
mi.save()
setattr(self, name, mi)
self.save()
return mi
def updateLogModel(self, request, model):
if not request:
request = self.getActiveRequest()
if not request or not hasattr(request, "setLogModel"):
rest_helpers.log_print("request does not support setLogModel")
return
if not self.id:
self.save()
request.setLogModel(model, self.id)
def on_rest_pre_get(self, request):
pass
def on_rest_get(self, request):
# check view permissions
perms = getattr(self.RestMeta, "VIEW_PERMS", None)
if perms:
# we need to check if this user has permission
group_field = getattr(self.RestMeta, "GROUP_FIELD", "group")
status, error, code = requestHasPerms(request, perms, getattr(self, group_field, None))
if not status:
return self.restStatus(request, False, error=error, error_code=code)
graph = request.DATA.get("graph", "default")
self.on_rest_pre_get(request)
return self.restGet(request, graph)
def on_rest_post(self, request):
perms = getattr(self.RestMeta, "SAVE_PERMS", None)
if perms:
# we need to check if this user has permission
group_field = getattr(self.RestMeta, "GROUP_FIELD", "group")
status, error, code = requestHasPerms(request, perms, getattr(self, group_field, None))
if not status:
return self.restStatus(request, False, error=error, error_code=code)
self.saveFromRequest(request)
status_only = request.DATA.get("status_only", False, field_type=bool)
if status_only:
return self.restStatus(request, True)
graph = request.DATA.get("graph", "default")
return self.restGet(request, graph)
def on_rest_pre_save(self, request, **kwargs):
pass
def on_rest_created(self, request):
self.on_rest_saved(request)
def on_rest_saved(self, request):
pass
def on_rest_delete(self, request):
can_delete = getattr(self.RestMeta, "CAN_DELETE", False)
if not can_delete:
return self.restStatus(request, False, error="deletion not allowed via rest for this model.")
perms = getattr(self.RestMeta, "SAVE_PERMS", None)
if perms:
# we need to check if this user has permission
group_field = getattr(self.RestMeta, "GROUP_FIELD", "group")
status, error, code = requestHasPerms(request, perms, getattr(self, group_field, None))
if not status:
return self.restStatus(request, False, error=error, error_code=code)
self.delete()
RestModel._setupGraphHelpers()
return self.restStatus(request, True)
@classmethod
def restList(cls, request, qset, graph=None, totals=None, return_httpresponse=True):
RestModel._setupGraphHelpers()
sort = None
if hasattr(cls.RestMeta, "DEFAULT_SORT"):
sort = cls.RestMeta.DEFAULT_SORT
if totals:
fields = totals
totals = {}
for tf in fields:
cls_method = "qset_totals_{}".format(tf)
if hasattr(cls, cls_method):
totals[tf] = getattr(cls, cls_method)(qset, request)
if not graph and request != None:
graph = request.DATA.get("graph", "default")
return GRAPH_HELPERS.restList(request, qset, sort=sort, totals=totals, return_httpresponse=return_httpresponse, **cls.getGraph(graph))
@classmethod
def toList(cls, qset, graph=None, totals=None, request=None):
return cls.restList(request, qset, graph, totals, False)
def restStatus(self, request, status, **kwargs):
RestModel._setupGraphHelpers()
return GRAPH_HELPERS.restStatus(request, status, **kwargs)
def restGet(self, request, graph=None, as_dict=False):
RestModel._setupGraphHelpers()
if not request:
request = self.getActiveRequest()
if not graph and request:
graph = request.DATA.get("graph", "default")
elif not graph:
graph = "default"
return_response = not as_dict
return GRAPH_HELPERS.restGet(request, self, return_httpresponse=return_response, **self.getGraph(graph))
def toDict(self, graph=None):
RestModel._setupGraphHelpers()
return self.restGet(None, graph=graph, as_dict=True)
@classmethod
def on_rest_list_filter(cls, request, qset=None):
# override on do any pre filters
return cls.on_rest_list_perms(request, qset)
@classmethod
def on_rest_list_perms(cls, request, qset=None):
if request.group:
group_perms = getattr(cls.RestMeta, "LIST_PERMS_GROUP", None)
if group_perms is None:
group_perms = getattr(cls.RestMeta, "VIEW_PERMS", None)
if group_perms and request.member:
has_perm = request.member.hasGroupPerm(request.group, group_perms) or request.member.hasPerm(group_perms)
if not has_perm:
return cls.objects.none()
qset = cls.on_rest_filter_children(request, qset)
else:
all_perms = getattr(cls.RestMeta, "VIEW_PERMS", None)
if all_perms:
if not request.member.hasPerm(all_perms):
return cls.objects.none()
return qset
@classmethod
def on_rest_filter_children(cls, request, qset=None):
group_field = getattr(cls.RestMeta, "GROUP_FIELD", "group")
parent_kinds = getattr(cls.RestMeta, "LIST_PARENT_KINDS", ["org"])
if request.DATA.get("child_groups") or request.group.kind in parent_kinds:
ids = request.group.getAllChildrenIds()
ids.append(request.group.id)
# to avoid future filtering issues remove group
request.group = None
request.DATA.remove(group_field)
if group_field != "group":
request.DATA.remove("group")
q = {}
q["{}_id__in".format(group_field)] = ids
return qset.filter(**q)
return qset
@classmethod
def on_rest_list_ready(cls, request, qset=None):
# override on do any post filters
return qset
@classmethod
def on_rest_date_filter(cls, request, qset=None):
date_range_field = getattr(cls.RestMeta, "DATE_RANGE_FIELD", "created")
date_range_default = getattr(cls.RestMeta, "DATE_RANGE_DEFAULT", None)
if date_range_default != None:
date_range_default = datetime.now() - timedelta(days=date_range_default)
qset = rest_helpers.filterByDateRange(qset, request, start=date_range_default, end=datetime.now()+timedelta(days=1), field=date_range_field)
else:
qset = rest_helpers.filterByDateRange(qset, request, field=date_range_field)
return qset
@classmethod
def on_rest_list(cls, request, qset=None):
qset = cls.on_rest_list_query(request, qset)
graph = request.DATA.get("graph", "list")
format = request.DATA.get("format")
if format:
return cls.on_rest_list_format(request, format, qset)
totals = request.DATA.getlist("totals", None)
return cls.restList(request, qset, graph, totals)
@classmethod
def on_rest_list_query(cls, request, qset=None):
cls._boundRest()
request.rest_class = cls
if qset is None:
qset = cls.ro_objects().all()
qset = cls.on_rest_list_filter(request, qset)
qset = cls.filterFromRequest(request, qset)
qset = cls.queryFromRequest(request, qset)
qset = cls.searchFromRequest(request, qset)
qset = cls.on_rest_date_filter(request, qset)
qset = cls.on_rest_list_ready(request, qset)
return qset
@classmethod
def on_rest_list_format(cls, request, format, qset):
if format in ["summary", "summary_only"]:
return cls.on_rest_list_summary(request, qset)
if hasattr(cls.RestMeta, "FORMATS"):
fields = cls.RestMeta.FORMATS.get(format)
else:
no_show_fields = RestModel.__RestMeta__.NO_SHOW_FIELDS
if hasattr(cls.RestMeta, "NO_SHOW_FIELDS"):
no_show_fields = cls.RestMeta.NO_SHOW_FIELDS
fields = []
for f in cls._meta.fields:
if not f.name.endswith("_ptr"):
if f.name not in no_show_fields:
fields.append(f.name)
if fields:
name = request.DATA.get("format_filename", None)
format_size = request.DATA.get("format_size", 10000)
if name is None:
name = "{}.{}".format(cls.__name__.lower(), format)
# print "csv size: {}".format(qset.count())
sort = request.DATA.get("sort", getattr(cls.RestMeta, "DEFAULT_SORT", None))
if sort:
qset = qset.order_by(sort)
cls._boundRest()
return GRAPH_HELPERS.views.restCSV(request, qset, fields, name, format_size)
@classmethod
def on_rest_list_summary(cls, request, qset):
if not hasattr(cls.RestMeta, "SUMMARY_FIELDS"):
return cls.restList(request, qset, None)
cls._boundRest()
summary_info = getattr(cls.RestMeta, "SUMMARY_FIELDS")
output = UberDict()
output.count = qset.count()
for key, value in list(summary_info.items()):
if key == "sum":
res = rest_helpers.getSum(qset, *value)
if isinstance(res, dict):
output.update(res)
else:
output[value[0]] = res
elif key == "avg":
for f in value:
output["avg_{}".format(f)] = rest_helpers.getAverage(qset, f)
elif key == "max":
for f in value:
output["max_{}".format(f)] = rest_helpers.getMax(qset, f)
elif isinstance(value, dict):
if "|" in key:
fields = key.split("|")
if len(fields) > 1:
lbl = fields[0]
action = fields[1]
field = None
if len(fields) > 2:
field = fields[2]
else:
action = "count"
lbl = key
field = None
act_qset = qset.filter(**value)
if action == "count":
output[lbl] = act_qset.count()
elif action == "sum":
output[lbl] = rest_helpers.getSum(act_qset, field)
elif action == "avg":
output[lbl] = rest_helpers.getAverage(act_qset, field)
elif action == "max":
output[lbl] = rest_helpers.getMax(act_qset, field)
return GRAPH_HELPERS.restGet(request, output)
@classmethod
def on_rest_batch(cls, request, action):
# this method is called when rest_batch='somme action'
cls._boundRest()
batch_ids = request.DATA.getlist("batch_ids", [])
batch_id_field = request.DATA.get("batch_id_field", "pk")
q = {}
if batch_ids:
q["{}__in".format(batch_id_field)] = batch_ids
batch_query = request.DATA.get("batch_query", None)
if batch_query:
# we ignore ids when doing a query
q.update(batch_query)
if action == "delete":
can_delete = getattr(cls.RestMeta, "CAN_DELETE", False)
if not can_delete:
return GRAPH_HELPERS.restStatus(request, False, error="deletion not allowed via rest for this model.")
qset = cls.rw_objects().filter(**q)
count = qset.delete()
return GRAPH_HELPERS.restStatus(request, True, error="delete {} items".format(count))
elif action == "update":
qset = cls.rw_objects().filter(**q)
update_fields = request.DATA.get(["batch_data", "batch_update"])
if not isinstance(update_fields, dict):
return GRAPH_HELPERS.restStatus(request, False, error="batch_update should be key/values")
count = qset.update(**update_fields)
return GRAPH_HELPERS.restStatus(request, True, error="updated {} items".format(count))
elif action == "create":
batch_data = request.DATA.getlist("batch_data", [])
items = []
exist = []
for item in batch_data:
try:
obj = cls.ro_objects().filter(**item).last()
if not obj:
obj.saveFromDict(request, item)
items.append(obj)
except:
pass
return GRAPH_HELPERS.restList(request, items)
return GRAPH_HELPERS.restStatus(request, False, error="not implemented")
@classmethod
def on_rest_create(cls, request, pk=None):
can_create = getattr(cls.RestMeta, "CAN_CREATE", True)
if not can_create:
return GRAPH_HELPERS.restStatus(request, False, error="creation not allowed via rest for this model.")
if hasattr(cls.RestMeta, "REQUEST_DEFAULTS"):
kv = {}
for k, v in list(cls.RestMeta.REQUEST_DEFAULTS.items()):
if hasattr(request, k):
value = getattr(request, k)
if value != None:
kv[v] = value
obj = cls.createFromRequest(request, **kv)
else:
obj = cls.createFromRequest(request)
obj.on_rest_created(request)
graph = request.DATA.get("graph", "default")
return obj.restGet(request, graph)
@classmethod
def _boundRest(cls):
RestModel._setupGraphHelpers()
@staticmethod
def _setupGraphHelpers():
if not GRAPH_HELPERS.views:
views = importlib.import_module("rest.views")
GRAPH_HELPERS.views = views
GRAPH_HELPERS.restNotFound = views.restNotFound
GRAPH_HELPERS.restStatus = views.restStatus
GRAPH_HELPERS.restList = views.restList
GRAPH_HELPERS.restGet = views.restGet
if not GRAPH_HELPERS.get_request:
mw = importlib.import_module("rest.middleware")
GRAPH_HELPERS.get_request = mw.get_request
@classmethod
def get_rest_help(cls):
output = UberDict()
if cls.__doc__:
output.doc = cls.__doc__.rstrip()
else:
output.doc = ""
output.model_name = cls.__name__
output.fields = cls.rest_getQueryFields(True)
output.graphs = {}
if hasattr(cls, "RestMeta"):
output.graph_names = list(getattr(cls.RestMeta, "GRAPHS", {}).keys())
for key in output.graph_names:
output.graphs[key] = cls.getGraph(key)
output.no_show_fields = getattr(cls.RestMeta, "NO_SHOW_FIELDS", [])
output.no_save_fields = getattr(cls.RestMeta, "NO_SAVE_FIELDS", [])
output.search_fields = getattr(cls.RestMeta, "SEARCH_FIELDS", [])
return output
@classmethod
def on_rest_request(cls, request, pk=None):
# check if model id is in post
request.rest_class = cls
cls._boundRest()
if not pk:
pk_fields = []
key = cls.__name__.lower()
key_p = "{0}_id".format(key)
pk_fields.append(key_p)
# check if the cls has a field with the class name, (causes conflict)
if not cls.get_field_type(key):
pk_fields.append(key)
pk = request.DATA.get(pk_fields, None, field_type=int)
# generic rest request handler
if pk:
using = getattr(cls.RestMeta, "RO_DATABASE", None)
if using is None:
using = getattr(cls.RestMeta, "DATABASE", None)
if using:
obj = cls.objects.using(using).filter(pk=pk).last()
else:
obj = cls.objects.filter(pk=pk).last()
if not obj:
return GRAPH_HELPERS.views.restNotFound(request)
if request.method == "GET":
return obj.on_rest_get(request)
elif request.method == "POST":
return obj.on_rest_post(request)
elif request.method == "DELETE":
return obj.on_rest_delete(request)
return GRAPH_HELPERS.views.restNotFound(request)
if request.method == "GET":
return cls.on_rest_list(request)
elif request.method == "POST":
if request.DATA.get("rest_batch"):
return cls.on_rest_batch(request, request.DATA.get("rest_batch"))
return cls.on_rest_create(request)
return GRAPH_HELPERS.views.restNotFound(request)
@classmethod
def searchFromRequest(cls, request, qset):
'''returns None if not foreignkey, otherswise the relevant model'''
search_fields = getattr(cls.RestMeta, "SEARCH_FIELDS", None)
search_terms = getattr(cls.RestMeta, "SEARCH_TERMS", None)
q = request.DATA.get(["search", "q"])
if q:
sq = search.get_query(q, search_fields, search_terms)
if sq:
qset = qset.filter(sq)
return qset
@classmethod
def rest_getWHITELISTED(cls):
if hasattr(cls.RestMeta, "WHITELISTED"):
return cls.RestMeta.WHITELISTED
return cls.__RestMeta__.WHITELISTED
@classmethod
def rest_getQueryFields(cls, detailed=False):
field_names = []
all_fields = True
if hasattr(cls.RestMeta, "QUERY_FIELDS"):
field_names = cls.RestMeta.QUERY_FIELDS
all_fields = "all_fields" in field_names
if all_fields:
for f in cls._meta.fields:
if not f.name.endswith("_ptr") or f in cls.rest_getWHITELISTED():
field_names.append(f.name)
if issubclass(cls, MetaDataModel):
if detailed:
field_names.append("metadata")
else:
field_names.append("properties__key")
field_names.append("properties__value")
if detailed:
output = []
for f in field_names:
if f == "metadata":
t = "MetaData"
fm = None
else:
t = cls.get_field_type(f)
fm = cls.get_fk_model(f)
info = {}
info["name"] = f
info["type"] = t
if fm:
info["model"] = "{}.{}".format(fm._meta.app_label, fm.__name__)
try:
fd = cls._meta.get_field(f)
if fd.choices:
info["choices"] = fd.choices
if fd.help_text:
info["help"] = fd.help_text()
except:
pass
output.append(info)
return output
return field_names
@classmethod
def filterFromRequest(cls, request, qset):
'''returns None if not foreignkey, otherswise the relevant model'''
field_names = cls.rest_getQueryFields()
q = {}
# check for customer filer
filter = request.DATA.get("filter")
# filter must be a dictionary
if filter:
"""
we can do customer filters but the name must be a allowed field
and can only be one level deep ie no double "__" "group__member__owner"
html select:
name: "user_filter"
field: "filter"
options: [
{
label: "Staff Only",
value: "is_staff:1"
},
{
label: "Online",
value: "is_online:1"
},
{
label: "Online",
value: "is_online:1"
},
]
"""
if not isinstance(filter, dict):
filters = filter.split(';')
filter = {}
for f in filters:
if ":" in f:
k, v = f.split(':')
if v in ["true", "True"]:
v = True
elif v in ["false", "False"]:
v = False
filter[k] = v
now = datetime.now()
# rest_helpers.log_print(field_names)
for key in filter:
name = key.split('__')[0]
value = filter[key]
if name in field_names and value != '__':
if isinstance(value, str) and ':' in value and value.startswith('__'):
k, v = value.split(':')
key = key + k.strip()
value = v.strip()
if key.endswith("__in") and ',' in value:
if value.startswith("["):
value = value[1:-1]
value = value.split(',')
elif value in ["true", "True"]:
value = True
elif value in ["false", "False"]:
value = False
if isinstance(value, str) and "(" in value and ")" in value:
# this is a special function call
# rest_helpers.log_print(value)
if value.startswith("days("):
spos = value.find("(")+1
epos = value.find(")")
# rest_helpers.log_print(int(value[spos:epos]))
value = now + timedelta(days=int(value[spos:epos]))
# rest_helpers.log_print(now)
# rest_helpers.log_print(value)
elif value.startswith("hours("):
spos = value.find("(")+1
epos = value.find(")")
value = now + timedelta(hours=int(value[spos:epos]))
elif value.startswith("minutes("):
spos = value.find("(")+1
epos = value.find(")")
value = now + timedelta(minutes=int(value[spos:epos]))
elif value.startswith("seconds("):
spos = value.find("(")+1
epos = value.find(")")
value = now + timedelta(seconds=int(value[spos:epos]))
else:
continue
if key.count('__') <= 4:
q[key] = value
else:
rest_helpers.log_print("filterFromRequest: invalid field: {} or {}".format(name, key))
if q:
rest_helpers.log_print(q)
qset = qset.filter(**q)
return qset
@classmethod
def queryFromRequest(cls, request, qset):
'''returns None if not foreignkey, otherswise the relevant model'''
field_names = cls.rest_getQueryFields()
# group_field = getattr(self.RestMeta, "GROUP_FIELD", "group")
# elif fn in ["group", "merchant"]: should change to group_field
q = {}
for fn in field_names:
v = None
if fn in cls.rest_getWHITELISTED():
if fn in ["user", "member"]:
Member = RestModel.getModel("account", "Member")
v = Member.getFromRequest(request)
elif fn == "terminal":
Terminal = ForeignModel = cls.get_fk_model(fn)
# Terminal = RestModel.getModel("payauth", "Terminal")
if Terminal:
tid = request.DATA.get("tid")
if tid:
v = Terminal.ro_objects().filter(tid=tid).last()
else:
v = Terminal.getFromRequest(request)
else:
v = request.DATA.get(fn)
elif fn in ["group", "merchant"]:
v = request.DATA.get(fn)
if not v:
if request.group:
v = request.group
elif getattr(request, "terminal", None):
v = request.terminal.merchant
elif fn == "start":
# this is a reserved field
# TODO internally change start to _start_
continue
else:
v = request.DATA.get(fn)
if v is None:
continue
if (isinstance(v, str) or isinstance(v, str)) and ',' in v:
v = [a.strip() for a in v.split(',')]
q["{}__in".format(fn)] = v
if isinstance(v, list):
q["{}__in".format(fn)] = v
elif v != None:
q[fn] = v
if q:
# rest_helpers.log_print("queryFromRequest: {}".format(q))
qset = qset.filter(**q)
return qset
@classmethod
def createFromRequest(cls, request, **kwargs):
obj = cls()
return obj.saveFromRequest(request, files=request.FILES, __is_new=True, **kwargs)
@classmethod
def createFromDict(cls, request, data, **kwargs):
obj = cls()
return obj.saveFromDict(request, data, __is_new=True, **kwargs)
@classmethod
def get_field_type(cls, fieldname):
'''returns None if not foreignkey, otherswise the relevant model'''
for field in cls._meta.fields:
if fieldname == field.name:
return field.get_internal_type()
return None
@classmethod
def get_fk_model(cls, fieldname):
'''returns None if not foreignkey, otherswise the relevant model'''
try:
field = cls._meta.get_field(fieldname)
return field.related_model
except:
return None
| 40.146465
| 152
| 0.544754
| 62,073
| 0.976113
| 0
| 0
| 33,105
| 0.520584
| 0
| 0
| 9,289
| 0.146072
|
180d1820c70ce1e075a46251cae4f2ab29f2929f
| 803
|
py
|
Python
|
examples/rp_analytics.py
|
eirrgang/radical.pilot
|
ceccd1867dd172935d602ff4c33a5ed4467e0dc8
|
[
"MIT"
] | 47
|
2015-03-16T01:08:11.000Z
|
2022-02-02T10:36:39.000Z
|
examples/rp_analytics.py
|
eirrgang/radical.pilot
|
ceccd1867dd172935d602ff4c33a5ed4467e0dc8
|
[
"MIT"
] | 1,856
|
2015-01-02T09:32:20.000Z
|
2022-03-31T21:45:06.000Z
|
examples/rp_analytics.py
|
eirrgang/radical.pilot
|
ceccd1867dd172935d602ff4c33a5ed4467e0dc8
|
[
"MIT"
] | 28
|
2015-06-10T18:15:14.000Z
|
2021-11-07T04:36:45.000Z
|
#!/usr/bin/env python3
__copyright__ = 'Copyright 2013-2016, http://radical.rutgers.edu'
__license__ = 'MIT'
import sys
import radical.utils as ru
import radical.pilot as rp
rpu = rp.utils
# ------------------------------------------------------------------------------
#
if __name__ == '__main__':
if len(sys.argv) <= 1:
print("\n\tusage: %s <session_id>\n")
sys.exit(1)
sid = sys.argv[1]
profiles = rpu.fetch_profiles(sid=sid, skip_existing=True)
for p in profiles:
print(p)
profs = ru.read_profiles(profiles)
for p in profs:
print(type(p))
prof = ru.combine_profiles(profs)
print(len(prof))
for entry in prof:
print(entry)
# ------------------------------------------------------------------------------
| 18.25
| 80
| 0.495641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 277
| 0.344956
|
180d3a3f60ca987d84a73cb66042ea85d5cffea9
| 758
|
py
|
Python
|
tests/contrib/django/testapp/middleware.py
|
mvas/apm-agent-python
|
f4582e90eb5308b915ca51e2e98620fc22af09ec
|
[
"BSD-3-Clause"
] | null | null | null |
tests/contrib/django/testapp/middleware.py
|
mvas/apm-agent-python
|
f4582e90eb5308b915ca51e2e98620fc22af09ec
|
[
"BSD-3-Clause"
] | null | null | null |
tests/contrib/django/testapp/middleware.py
|
mvas/apm-agent-python
|
f4582e90eb5308b915ca51e2e98620fc22af09ec
|
[
"BSD-3-Clause"
] | null | null | null |
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
# no-op class for Django < 1.10
class MiddlewareMixin(object):
pass
class BrokenRequestMiddleware(MiddlewareMixin):
def process_request(self, request):
raise ImportError('request')
class BrokenResponseMiddleware(MiddlewareMixin):
def process_response(self, request, response):
raise ImportError('response')
class BrokenViewMiddleware(MiddlewareMixin):
def process_view(self, request, func, args, kwargs):
raise ImportError('view')
class MetricsNameOverrideMiddleware(MiddlewareMixin):
def process_response(self, request, response):
request._elasticapm_transaction_name = 'foobar'
return response
| 27.071429
| 56
| 0.740106
| 623
| 0.8219
| 0
| 0
| 0
| 0
| 0
| 0
| 64
| 0.084433
|
180dd0f316d9175e1decc0de1732de58c97bdcf4
| 3,874
|
py
|
Python
|
run.py
|
Yvonne-Ouma/Password-Locker
|
b16f8e9ee36d3cb70eefb58bf7be2de1bb1948fc
|
[
"MIT"
] | null | null | null |
run.py
|
Yvonne-Ouma/Password-Locker
|
b16f8e9ee36d3cb70eefb58bf7be2de1bb1948fc
|
[
"MIT"
] | null | null | null |
run.py
|
Yvonne-Ouma/Password-Locker
|
b16f8e9ee36d3cb70eefb58bf7be2de1bb1948fc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3.6
from user import User
from credential import Credential
def createUser(userName,password):
'''
Function to create a new user
'''
newUser = User(userName,password)
return newUser
def saveUsers(user):
'''
Function to save users
'''
user.saveUser()
def createCredential(firstName,lastName,accountName,password):
newCredential = Credential(firstName,lastName,accountName,password)
return newCredential
def saveCredential(credential):
'''
Function to save a new credential
'''
Credential.saveCredential(credential)
def delCredential(credential):
'''
Function to delete a credential
'''
credential.deleteCredential()
def findCredential(name):
'''
Function that finds a credential by name returns the credential
'''
return Credential.find_by_name(name)
def check_existingCredentials(name):
'''
Function that checks if a credential exists with that name and return a boolean
'''
return Credential.credential_exist(name)
def displayCredentials():
'''
Function that returns all the saved credentials
'''
return Credential.displayCredentials()
def main():
print("Hello Welcome to password locker.\n Login:")
userName = input("What is your name?")
password = input("Enter your password :")
print(f"Hello {userName}. what would you like to do??\n Create an acount First!!" )
print("-"* 15)
while True:
print("Us this short codes : cc - Create a new credential, dc -display credentials, fc -to search a credential, dl -to delete credential, ex -exit the credential list ")
short_code = input()
if short_code == 'cc':
print("New Credential")
print("-"*10)
print ("firstName ....")
firstName = input()
print("lastName ...")
lastName = input()
print("accountName ...")
accountName = input()
print("password ...")
password = input()
saveCredential(createCredential(firstName,lastName,accountName,password)) # create and save new credential.
print('\n')
# print (f'New Credential {firstName} {lastName} {accountName} created')
print('\n')
elif short_code == 'dc':
if displayCredentials():
print("Here is a list of all your credentials")
print('\n')
for credential in displayCredentials():
print(f"{credential.firstName} {credential.lastName} ....{credential.accountName}")
print('\n')
else:
print('\n')
print("You dont seem to have any credentials saved yet")
print('\n')
elif short_code =='dl':
print("Are your sure you want to delete this credential\n Please insert the name of the credential:")
searchName = input()
deleteCredential = Credential.deleteCredential(searchName)
elif short_code == 'fc':
print("Enter the name you want to search for")
searchName = input()
searchCredential = findCredential(searchName)
print(f" {searchCredential.lastName}")
print('-' * 20)
print(f"accountName........{searchCredential.accountName}")
elif short_code == "ex":
print("Bye ......")
break
else:
print("I really didn't get that. Please use the short codes")
if __name__ == '__main__':
main()
| 27.28169
| 177
| 0.558596
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,456
| 0.375839
|
180e054f46ac36903917c85a5ca1fbddc3d6ad0b
| 844
|
py
|
Python
|
soundrts/constants.py
|
ctoth/soundrts
|
1a1271182d53c16d3e29f5dc8f8e987415a9467b
|
[
"BSD-3-Clause"
] | null | null | null |
soundrts/constants.py
|
ctoth/soundrts
|
1a1271182d53c16d3e29f5dc8f8e987415a9467b
|
[
"BSD-3-Clause"
] | null | null | null |
soundrts/constants.py
|
ctoth/soundrts
|
1a1271182d53c16d3e29f5dc8f8e987415a9467b
|
[
"BSD-3-Clause"
] | null | null | null |
# constants used in more than one module
# Some of them might find a better home later.
from lib.nofloat import PRECISION
MAIN_METASERVER_URL = open("cfg/metaserver.txt").read().strip()
# old value used by some features (stats, ...)
METASERVER_URL = "http://jlpo.free.fr/soundrts/metaserver/"
# simulation
VIRTUAL_TIME_INTERVAL = 300 # milliseconds
COLLISION_RADIUS = 175 # millimeters # 350 / 2
USE_RANGE_MARGIN = 175 # millimeters
ORDERS_QUEUE_LIMIT = 10
MAX_NB_OF_RESOURCE_TYPES = 10
DEFAULT_MINIMAL_DAMAGE = int(.17 * PRECISION)
# used for packing the orders
NEWLINE_REPLACEMENT = ";"
SPACE_REPLACEMENT = ","
# minimal interval (in seconds) between 2 sounds
ALERT_LIMIT = .5
FOOTSTEP_LIMIT = .1
# don't play events after this limit (in seconds)
EVENT_LIMIT = 3
# use the profiler (warning: will slow down the game)
PROFILE = False
| 25.575758
| 63
| 0.755924
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 441
| 0.522512
|
180efba78897c0fa073f01ffc1050d72acb958e1
| 9,104
|
py
|
Python
|
Modules/Attention/Steps.py
|
ishine/GST_Tacotron
|
0c3d8e51042dc5d49abc842b59a13ea70f927f9d
|
[
"MIT"
] | 21
|
2020-02-23T03:35:27.000Z
|
2021-11-01T11:08:18.000Z
|
Modules/Attention/Steps.py
|
ishine/GST_Tacotron
|
0c3d8e51042dc5d49abc842b59a13ea70f927f9d
|
[
"MIT"
] | 6
|
2020-03-14T15:43:38.000Z
|
2021-07-06T09:06:57.000Z
|
Modules/Attention/Steps.py
|
ishine/GST_Tacotron
|
0c3d8e51042dc5d49abc842b59a13ea70f927f9d
|
[
"MIT"
] | 7
|
2020-03-07T11:33:09.000Z
|
2021-11-28T16:19:01.000Z
|
import tensorflow as tf
import numpy as np
'''
TF 2.0's basic attention layers(Attention and AdditiveAttention) calculate parallelly.
TO USE MONOTONIC FUNCTION, ATTENTION MUST KNOW 'n-1 ALIGNMENT'.
Thus, this parallel versions do not support the monotonic function.
'''
class BahdanauAttention(tf.keras.layers.Layer):
'''
Refer: https://www.tensorflow.org/tutorials/text/nmt_with_attention
'''
def __init__(self, size):
super(BahdanauAttention, self).__init__()
self.size = size
def build(self, input_shapes):
self.layer_Dict = {
'Query': tf.keras.layers.Dense(self.size),
'Value': tf.keras.layers.Dense(self.size),
'V': tf.keras.layers.Dense(1)
}
self.built = True
def call(self, inputs):
'''
inputs: [queries, values]
queries: [Batch, Query_dim]
values: [Batch, T_v, Value_dim]
'''
queries, values = inputs
queries = self.layer_Dict['Query'](queries) #[Batch, Att_dim]
values = self.layer_Dict['Value'](values) #[Batch, T_v, Att_dim]
queries = tf.expand_dims(queries, 1) #[Batch, 1, Att_dim]
score = self.layer_Dict['V'](tf.nn.tanh(values + queries)) #[Batch, T_v, 1]
attention_weights = tf.nn.softmax(score - tf.reduce_max(score, axis= 1, keepdims= True), axis=1) #[Batch, T_v, 1]
context_vector = tf.reduce_sum(attention_weights * values, axis=1) #[Batch, T_v, Att_dim] -> [Batch, Att_dim]
return context_vector, tf.squeeze(attention_weights, axis= -1)
def initial_alignment_fn(self, batch_size, key_time, dtype):
return tf.zeros((batch_size, key_time), dtype= dtype)
class BahdanauMonotonicAttention(tf.keras.layers.Layer):
'''
Refer
https://www.tensorflow.org/tutorials/text/nmt_with_attention
https://github.com/tensorflow/addons/blob/v0.7.1/tensorflow_addons/seq2seq/attention_wrapper.py#L1004-L1175
'''
def __init__(self, size, sigmoid_noise= 0.0, normalize= False, **kwargs):
super(BahdanauMonotonicAttention, self).__init__()
self.size = size
self.sigmoid_noise = sigmoid_noise
self.normalize = normalize
def build(self, input_shapes):
self.layer_Dict = {
'Query': tf.keras.layers.Dense(self.size),
'Value': tf.keras.layers.Dense(self.size),
'Key': tf.keras.layers.Dense(self.size)
}
self.attention_v = self.add_weight(
name='attention_v',
shape=[self.size,],
initializer='glorot_uniform',
dtype=self.dtype,
trainable=True
)
self.attention_score_bias = self.add_weight(
name='attention_score_bias',
shape=[],
initializer=tf.zeros_initializer(),
dtype=self.dtype,
trainable=True
)
if self.normalize:
self.attention_g = self.add_weight(
name='attention_g',
shape=[],
initializer= tf.initializers.constant([np.sqrt(1. / self.size),]),
dtype=self.dtype,
trainable=True
)
self.attention_b = self.add_weight(
name='attention_b',
shape=[self.size,],
initializer= tf.zeros_initializer(),
dtype=self.dtype,
trainable=True
)
self.bulit = True
def call(self, inputs):
'''
inputs: [queries, values, previous_alignments] or [queries, values, keys, previous_alignments]
query: [Batch, Query_dim]
value: [Batch, T_v, Value_dim]
key: [Batch, T_v, Key_dim]
previous_alignment: [Batch, T_v]
'''
if len(inputs) == 3:
query, value, previous_alignment = inputs
elif len(inputs) == 4:
query, value, key, previous_alignment = inputs
else:
raise ValueError('Unexpected input length')
query = self.layer_Dict['Query'](query) # [Batch, Att_dim]
value = self.layer_Dict['Value'](value) # [Batch, T_v, Att_dim]
key = self.layer_Dict['Key'](key) if len(inputs) == 4 else value # [Batch, T_v, Att_dim]
query = tf.expand_dims(query, 1) # [Batch, 1, Att_dim]
previous_alignment = tf.expand_dims(previous_alignment, axis= 1) # [Batch, 1, T_v]
score = self._calculate_scores(query= query, key= key)
context, alignment = self._apply_scores(
score= score,
value= value,
previous_alignment= previous_alignment
) # [Batch, Att_dim], [Batch, 1, T_v]
return context, alignment
def _calculate_scores(self, query, key):
'''
Calculates attention scores as a nonlinear sum of query and key.
Args:
query: Query tensor of shape `[batch_size, 1, Att_dim]`.
key: Key tensor of shape `[batch_size, T_k, Att_dim]`.
Returns:
Tensor of shape `[batch_size, T_k]`.
'''
if self.normalize:
norm_v = self.attention_g * self.attention_v * tf.math.rsqrt(tf.reduce_sum(tf.square(self.attention_v)))
return tf.reduce_sum(norm_v * tf.tanh(query + key + self.attention_b), axis= -1) + self.attention_score_bias #[Batch, T_k, Att_dim] -> [Batch, T_k]
else:
return tf.reduce_sum(self.attention_v * tf.tanh(query + key), axis= -1) + self.attention_score_bias #[Batch, T_k, Att_dim] -> [Batch, T_k]
def _apply_scores(self, score, value, previous_alignment):
'''
score shape: [batch_size, T_v]`. (Must T_k == T_v)
value shape: [batch_size, T_v, Att_dim]`.
previous_alignment shape: [batch_size, 1, T_v]`.
Return: [batch_size, Att_dim], [batch_size, T_v]
'''
score = tf.expand_dims(score, axis= 1) #[Batch_size, 1, T_v]
alignment = self._monotonic_probability_fn(score, previous_alignment) #[Batch_size, 1, T_v]
context = tf.matmul(alignment, value) #[Batch_size, 1, Att_dim]
return tf.squeeze(context, axis= 1), tf.squeeze(alignment, axis= 1)
def _monotonic_probability_fn(self, score, previous_alignment):
if self.sigmoid_noise > 0.0:
score += self.sigmoid_noise * tf.random.normal(tf.shape(score), dtype= score.dtype)
p_choose_i = tf.sigmoid(score)
cumprod_1mp_choose_i = self.safe_cumprod(1 - p_choose_i, axis= 2, exclusive= True)
alignment = p_choose_i * cumprod_1mp_choose_i * tf.cumsum(
previous_alignment / tf.clip_by_value(cumprod_1mp_choose_i, 1e-10, 1.),
axis= 2
)
return alignment
# https://github.com/tensorflow/addons/blob/9e9031133c8362fedf40f2d05f00334b6f7a970b/tensorflow_addons/seq2seq/attention_wrapper.py#L810
def safe_cumprod(self, x, *args, **kwargs):
"""Computes cumprod of x in logspace using cumsum to avoid underflow.
The cumprod function and its gradient can result in numerical instabilities
when its argument has very small and/or zero values. As long as the
argument is all positive, we can instead compute the cumulative product as
exp(cumsum(log(x))). This function can be called identically to
tf.cumprod.
Args:
x: Tensor to take the cumulative product of.
*args: Passed on to cumsum; these are identical to those in cumprod.
**kwargs: Passed on to cumsum; these are identical to those in cumprod.
Returns:
Cumulative product of x.
"""
x = tf.convert_to_tensor(x, name='x')
tiny = np.finfo(x.dtype.as_numpy_dtype).tiny
return tf.exp(tf.cumsum(tf.math.log(tf.clip_by_value(x, tiny, 1)), *args, **kwargs))
def initial_alignment_fn(self, batch_size, key_time, dtype):
return tf.one_hot(
indices= tf.zeros((batch_size), dtype= tf.int32),
depth= key_time,
dtype= dtype
)
class StepwiseMonotonicAttention(BahdanauMonotonicAttention):
'''
Refer: https://gist.github.com/dy-octa/38a7638f75c21479582d7391490df37c
'''
def __init__(self, size, sigmoid_noise= 2.0, normalize= False, **kwargs):
super(StepwiseMonotonicAttention, self).__init__(size, sigmoid_noise, normalize, **kwargs)
def _monotonic_probability_fn(self, score, previous_alignment):
'''
score: [Batch_size, 1, T_v]
previous_alignment: [batch_size, 1, T_v]
'''
if self.sigmoid_noise > 0.0:
score += self.sigmoid_noise * tf.random.normal(tf.shape(score), dtype= score.dtype)
p_choose_i = tf.sigmoid(score) # [Batch_size, 1, T_v]
pad = tf.zeros([tf.shape(p_choose_i)[0], 1, 1], dtype=p_choose_i.dtype) # [Batch_size, 1, 1]
alignment = previous_alignment * p_choose_i + tf.concat(
[pad, previous_alignment[:, :, :-1] * (1.0 - p_choose_i[:, :, :-1])], axis= -1)
return alignment
| 39.755459
| 161
| 0.611819
| 8,828
| 0.969684
| 0
| 0
| 0
| 0
| 0
| 0
| 3,107
| 0.341279
|
180f8229eeb538cba11111f51d0cfaabcfe979dc
| 14,002
|
py
|
Python
|
test.py
|
gmberton/deep-visual-geo-localization-benchmark
|
7ac395411b7eeff99da66675dedc5372839e5632
|
[
"MIT"
] | 1
|
2022-03-25T06:48:16.000Z
|
2022-03-25T06:48:16.000Z
|
test.py
|
gmberton/deep-visual-geo-localization-benchmark
|
7ac395411b7eeff99da66675dedc5372839e5632
|
[
"MIT"
] | null | null | null |
test.py
|
gmberton/deep-visual-geo-localization-benchmark
|
7ac395411b7eeff99da66675dedc5372839e5632
|
[
"MIT"
] | null | null | null |
import faiss
import torch
import logging
import numpy as np
from tqdm import tqdm
from torch.utils.data import DataLoader
from torch.utils.data.dataset import Subset
def test_efficient_ram_usage(args, eval_ds, model, test_method="hard_resize"):
"""This function gives the same output as test(), but uses much less RAM.
This can be useful when testing with large descriptors (e.g. NetVLAD) on large datasets (e.g. San Francisco).
Obviously it is slower than test(), and can't be used with PCA.
"""
model = model.eval()
if test_method == 'nearest_crop' or test_method == "maj_voting":
distances = np.empty([eval_ds.queries_num * 5, eval_ds.database_num], dtype=np.float32)
else:
distances = np.empty([eval_ds.queries_num, eval_ds.database_num], dtype=np.float32)
with torch.no_grad():
if test_method == 'nearest_crop' or test_method == 'maj_voting':
queries_features = np.ones((eval_ds.queries_num * 5, args.features_dim), dtype="float32")
else:
queries_features = np.ones((eval_ds.queries_num, args.features_dim), dtype="float32")
logging.debug("Extracting queries features for evaluation/testing")
queries_infer_batch_size = 1 if test_method == "single_query" else args.infer_batch_size
eval_ds.test_method = test_method
queries_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num, eval_ds.database_num+eval_ds.queries_num)))
queries_dataloader = DataLoader(dataset=queries_subset_ds, num_workers=args.num_workers,
batch_size=queries_infer_batch_size, pin_memory=(args.device=="cuda"))
for inputs, indices in tqdm(queries_dataloader, ncols=100):
if test_method == "five_crops" or test_method == "nearest_crop" or test_method == 'maj_voting':
inputs = torch.cat(tuple(inputs)) # shape = 5*bs x 3 x 480 x 480
features = model(inputs.to(args.device))
if test_method == "five_crops": # Compute mean along the 5 crops
features = torch.stack(torch.split(features, 5)).mean(1)
if test_method == "nearest_crop" or test_method == 'maj_voting':
start_idx = (indices[0] - eval_ds.database_num) * 5
end_idx = start_idx + indices.shape[0] * 5
indices = np.arange(start_idx, end_idx)
queries_features[indices, :] = features.cpu().numpy()
else:
queries_features[indices.numpy()-eval_ds.database_num, :] = features.cpu().numpy()
queries_features = torch.tensor(queries_features).type(torch.float32).cuda()
logging.debug("Extracting database features for evaluation/testing")
# For database use "hard_resize", although it usually has no effect because database images have same resolution
eval_ds.test_method = "hard_resize"
database_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num)))
database_dataloader = DataLoader(dataset=database_subset_ds, num_workers=args.num_workers,
batch_size=args.infer_batch_size, pin_memory=(args.device=="cuda"))
for inputs, indices in tqdm(database_dataloader, ncols=100):
inputs = inputs.to(args.device)
features = model(inputs)
for pn, (index, pred_feature) in enumerate(zip(indices, features)):
distances[:, index] = ((queries_features-pred_feature)**2).sum(1).cpu().numpy()
del features, queries_features, pred_feature
predictions = distances.argsort(axis=1)[:, :max(args.recall_values)]
if test_method == 'nearest_crop':
distances = np.array([distances[row, index] for row, index in enumerate(predictions)])
distances = np.reshape(distances, (eval_ds.queries_num, 20 * 5))
predictions = np.reshape(predictions, (eval_ds.queries_num, 20 * 5))
for q in range(eval_ds.queries_num):
# sort predictions by distance
sort_idx = np.argsort(distances[q])
predictions[q] = predictions[q, sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(predictions[q], return_index=True)
# unique_idx is sorted based on the unique values, sort it again
predictions[q, :20] = predictions[q, np.sort(unique_idx)][:20]
predictions = predictions[:, :20] # keep only the closer 20 predictions for each
elif test_method == 'maj_voting':
distances = np.array([distances[row, index] for row, index in enumerate(predictions)])
distances = np.reshape(distances, (eval_ds.queries_num, 5, 20))
predictions = np.reshape(predictions, (eval_ds.queries_num, 5, 20))
for q in range(eval_ds.queries_num):
# votings, modify distances in-place
top_n_voting('top1', predictions[q], distances[q], args.majority_weight)
top_n_voting('top5', predictions[q], distances[q], args.majority_weight)
top_n_voting('top10', predictions[q], distances[q], args.majority_weight)
# flatten dist and preds from 5, 20 -> 20*5
# and then proceed as usual to keep only first 20
dists = distances[q].flatten()
preds = predictions[q].flatten()
# sort predictions by distance
sort_idx = np.argsort(dists)
preds = preds[sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(preds, return_index=True)
# unique_idx is sorted based on the unique values, sort it again
# here the row corresponding to the first crop is used as a
# 'buffer' for each query, and in the end the dimension
# relative to crops is eliminated
predictions[q, 0, :20] = preds[np.sort(unique_idx)][:20]
predictions = predictions[:, 0, :20] # keep only the closer 20 predictions for each query
del distances
#### For each query, check if the predictions are correct
positives_per_query = eval_ds.get_positives()
# args.recall_values by default is [1, 5, 10, 20]
recalls = np.zeros(len(args.recall_values))
for query_index, pred in enumerate(predictions):
for i, n in enumerate(args.recall_values):
if np.any(np.in1d(pred[:n], positives_per_query[query_index])):
recalls[i:] += 1
break
recalls = recalls / eval_ds.queries_num * 100
recalls_str = ", ".join([f"R@{val}: {rec:.1f}" for val, rec in zip(args.recall_values, recalls)])
return recalls, recalls_str
def test(args, eval_ds, model, test_method="hard_resize", pca=None):
"""Compute features of the given dataset and compute the recalls."""
assert test_method in ["hard_resize", "single_query", "central_crop", "five_crops",
"nearest_crop", "maj_voting"], f"test_method can't be {test_method}"
if args.efficient_ram_testing:
return test_efficient_ram_usage(args, eval_ds, model, test_method)
model = model.eval()
with torch.no_grad():
logging.debug("Extracting database features for evaluation/testing")
# For database use "hard_resize", although it usually has no effect because database images have same resolution
eval_ds.test_method = "hard_resize"
database_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num)))
database_dataloader = DataLoader(dataset=database_subset_ds, num_workers=args.num_workers,
batch_size=args.infer_batch_size, pin_memory=(args.device=="cuda"))
if test_method == "nearest_crop" or test_method == 'maj_voting':
all_features = np.empty((5 * eval_ds.queries_num + eval_ds.database_num, args.features_dim), dtype="float32")
else:
all_features = np.empty((len(eval_ds), args.features_dim), dtype="float32")
for inputs, indices in tqdm(database_dataloader, ncols=100):
features = model(inputs.to(args.device))
features = features.cpu().numpy()
if pca != None:
features = pca.transform(features)
all_features[indices.numpy(), :] = features
logging.debug("Extracting queries features for evaluation/testing")
queries_infer_batch_size = 1 if test_method == "single_query" else args.infer_batch_size
eval_ds.test_method = test_method
queries_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num, eval_ds.database_num+eval_ds.queries_num)))
queries_dataloader = DataLoader(dataset=queries_subset_ds, num_workers=args.num_workers,
batch_size=queries_infer_batch_size, pin_memory=(args.device=="cuda"))
for inputs, indices in tqdm(queries_dataloader, ncols=100):
if test_method == "five_crops" or test_method == "nearest_crop" or test_method == 'maj_voting':
inputs = torch.cat(tuple(inputs)) # shape = 5*bs x 3 x 480 x 480
features = model(inputs.to(args.device))
if test_method == "five_crops": # Compute mean along the 5 crops
features = torch.stack(torch.split(features, 5)).mean(1)
features = features.cpu().numpy()
if pca != None:
features = pca.transform(features)
if test_method == "nearest_crop" or test_method == 'maj_voting': # store the features of all 5 crops
start_idx = eval_ds.database_num + (indices[0] - eval_ds.database_num) * 5
end_idx = start_idx + indices.shape[0] * 5
indices = np.arange(start_idx, end_idx)
all_features[indices, :] = features
else:
all_features[indices.numpy(), :] = features
queries_features = all_features[eval_ds.database_num:]
database_features = all_features[:eval_ds.database_num]
faiss_index = faiss.IndexFlatL2(args.features_dim)
faiss_index.add(database_features)
del database_features, all_features
logging.debug("Calculating recalls")
distances, predictions = faiss_index.search(queries_features, max(args.recall_values))
if test_method == 'nearest_crop':
distances = np.reshape(distances, (eval_ds.queries_num, 20 * 5))
predictions = np.reshape(predictions, (eval_ds.queries_num, 20 * 5))
for q in range(eval_ds.queries_num):
# sort predictions by distance
sort_idx = np.argsort(distances[q])
predictions[q] = predictions[q, sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(predictions[q], return_index=True)
# unique_idx is sorted based on the unique values, sort it again
predictions[q, :20] = predictions[q, np.sort(unique_idx)][:20]
predictions = predictions[:, :20] # keep only the closer 20 predictions for each query
elif test_method == 'maj_voting':
distances = np.reshape(distances, (eval_ds.queries_num, 5, 20))
predictions = np.reshape(predictions, (eval_ds.queries_num, 5, 20))
for q in range(eval_ds.queries_num):
# votings, modify distances in-place
top_n_voting('top1', predictions[q], distances[q], args.majority_weight)
top_n_voting('top5', predictions[q], distances[q], args.majority_weight)
top_n_voting('top10', predictions[q], distances[q], args.majority_weight)
# flatten dist and preds from 5, 20 -> 20*5
# and then proceed as usual to keep only first 20
dists = distances[q].flatten()
preds = predictions[q].flatten()
# sort predictions by distance
sort_idx = np.argsort(dists)
preds = preds[sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(preds, return_index=True)
# unique_idx is sorted based on the unique values, sort it again
# here the row corresponding to the first crop is used as a
# 'buffer' for each query, and in the end the dimension
# relative to crops is eliminated
predictions[q, 0, :20] = preds[np.sort(unique_idx)][:20]
predictions = predictions[:, 0, :20] # keep only the closer 20 predictions for each query
#### For each query, check if the predictions are correct
positives_per_query = eval_ds.get_positives()
# args.recall_values by default is [1, 5, 10, 20]
recalls = np.zeros(len(args.recall_values))
for query_index, pred in enumerate(predictions):
for i, n in enumerate(args.recall_values):
if np.any(np.in1d(pred[:n], positives_per_query[query_index])):
recalls[i:] += 1
break
# Divide by the number of queries*100, so the recalls are in percentages
recalls = recalls / eval_ds.queries_num * 100
recalls_str = ", ".join([f"R@{val}: {rec:.1f}" for val, rec in zip(args.recall_values, recalls)])
return recalls, recalls_str
def top_n_voting(topn, predictions, distances, maj_weight):
if topn == 'top1':
n = 1
selected = 0
elif topn == 'top5':
n = 5
selected = slice(0, 5)
elif topn == 'top10':
n = 10
selected = slice(0, 10)
# find predictions that repeat in the first, first five,
# or fist ten columns for each crop
vals, counts = np.unique(predictions[:, selected], return_counts=True)
# for each prediction that repeats more than once,
# subtract from its score
for val, count in zip(vals[counts > 1], counts[counts > 1]):
mask = (predictions[:, selected] == val)
distances[:, selected][mask] -= maj_weight * count/n
| 54.909804
| 121
| 0.644337
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,424
| 0.244536
|
18107664baf7404f0465f06470c192a8803624ac
| 355
|
py
|
Python
|
insertionsort.py
|
emcd123/Matroids
|
f1ab7a5164a60b753ba429ef7ba9ce36517d4439
|
[
"MIT"
] | null | null | null |
insertionsort.py
|
emcd123/Matroids
|
f1ab7a5164a60b753ba429ef7ba9ce36517d4439
|
[
"MIT"
] | null | null | null |
insertionsort.py
|
emcd123/Matroids
|
f1ab7a5164a60b753ba429ef7ba9ce36517d4439
|
[
"MIT"
] | 1
|
2021-11-21T18:03:07.000Z
|
2021-11-21T18:03:07.000Z
|
import random
li=[]
for i in range(10):#creating a random list using code from blackboard
li=li+[random.randrange(0,50)]
print(li)
def insertionSort(li):#insertion sort function
i=1
while(i < len(li)):
j = i
while (j>0) and (li[j-1] > li[j]):
li[j], li[j-1] = li[j-1], li[j]
j = j-1
i = i+1
return li
print(insertionSort(li))
#print(li)
| 18.684211
| 69
| 0.630986
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 84
| 0.23662
|
1810948fff7ddb4956a7253f2de040223223f990
| 1,442
|
py
|
Python
|
python-packages/hyperopt-0.0.2/hyperopt/tests/test_fmin.py
|
rajegannathan/grasp-lift-eeg-cat-dog-solution-updated
|
ee45bee6f96cdb6d91184abc16f41bba1546c943
|
[
"BSD-3-Clause"
] | 2
|
2017-08-13T14:09:32.000Z
|
2018-07-16T23:39:00.000Z
|
python-packages/hyperopt-0.0.2/hyperopt/tests/test_fmin.py
|
rajegannathan/grasp-lift-eeg-cat-dog-solution-updated
|
ee45bee6f96cdb6d91184abc16f41bba1546c943
|
[
"BSD-3-Clause"
] | null | null | null |
python-packages/hyperopt-0.0.2/hyperopt/tests/test_fmin.py
|
rajegannathan/grasp-lift-eeg-cat-dog-solution-updated
|
ee45bee6f96cdb6d91184abc16f41bba1546c943
|
[
"BSD-3-Clause"
] | 2
|
2018-04-02T06:45:11.000Z
|
2018-07-16T23:39:02.000Z
|
import nose.tools
from hyperopt import fmin, rand, tpe, hp, Trials, exceptions, space_eval
def test_quadratic1_rand():
trials = Trials()
argmin = fmin(
fn=lambda x: (x - 3) ** 2,
space=hp.uniform('x', -5, 5),
algo=rand.suggest,
max_evals=500,
trials=trials)
assert len(trials) == 500
assert abs(argmin['x'] - 3.0) < .25
def test_quadratic1_tpe():
trials = Trials()
argmin = fmin(
fn=lambda x: (x - 3) ** 2,
space=hp.uniform('x', -5, 5),
algo=tpe.suggest,
max_evals=50,
trials=trials)
assert len(trials) == 50, len(trials)
assert abs(argmin['x'] - 3.0) < .25, argmin
@nose.tools.raises(exceptions.DuplicateLabel)
def test_duplicate_label_is_error():
trials = Trials()
def fn(xy):
x, y = xy
return x ** 2 + y ** 2
fmin(fn=fn,
space=[
hp.uniform('x', -5, 5),
hp.uniform('x', -5, 5),
],
algo=rand.suggest,
max_evals=500,
trials=trials)
def test_space_eval():
space = hp.choice('a',
[
('case 1', 1 + hp.lognormal('c1', 0, 1)),
('case 2', hp.uniform('c2', -10, 10))
])
assert space_eval(space, {'a': 0, 'c1': 1.0}) == ('case 1', 2.0)
assert space_eval(space, {'a': 1, 'c2': 3.5}) == ('case 2', 3.5)
| 23.258065
| 72
| 0.489598
| 0
| 0
| 0
| 0
| 391
| 0.271151
| 0
| 0
| 75
| 0.052011
|
1810ed3f25b77f5724cfa46b09080dd25d3ba89c
| 737
|
py
|
Python
|
aaweb/__init__.py
|
cpelite/astorian-airways
|
55498f308de7a4b8159519e191b492675ec5612a
|
[
"CC0-1.0"
] | null | null | null |
aaweb/__init__.py
|
cpelite/astorian-airways
|
55498f308de7a4b8159519e191b492675ec5612a
|
[
"CC0-1.0"
] | null | null | null |
aaweb/__init__.py
|
cpelite/astorian-airways
|
55498f308de7a4b8159519e191b492675ec5612a
|
[
"CC0-1.0"
] | 3
|
2020-04-14T20:46:50.000Z
|
2021-03-11T19:07:20.000Z
|
# -*- coding: utf-8 -*-
import os
from datetime import timedelta
from flask import Flask, session
default_timezone = 'Europe/Berlin'
app = Flask(__name__, static_folder='../static', static_url_path='/static', template_folder="../templates/")
app.permanent_session_lifetime = timedelta(minutes=60)
app.config.update(
SESSION_COOKIE_NAME = "AAsession",
ERROR_LOG_FILE = "%s/app.log" % os.environ.get('OPENSHIFT_LOG_DIR', 'logs')
)
@app.before_request
def session_activity():
session.modified = True
@app.route('/robots.txt')
def serve_robots():
return 'User-agent: *\nDisallow: /'
# VIEWS
import aaweb.views
import aaweb.forms
# API
import aaweb.api
# additional functionalities
import aaweb.error
import aaweb.log
| 20.472222
| 108
| 0.738128
| 0
| 0
| 0
| 0
| 154
| 0.208955
| 0
| 0
| 202
| 0.274084
|
1812cc808e8b51d1262a39abd3b6e4c2337c6ac5
| 1,528
|
py
|
Python
|
Examples/Segmentation/WatershedSegmentation1.py
|
nalinimsingh/ITK_4D
|
95a2eacaeaffe572889832ef0894239f89e3f303
|
[
"Apache-2.0"
] | 3
|
2018-10-01T20:46:17.000Z
|
2019-12-17T19:39:50.000Z
|
Examples/Segmentation/WatershedSegmentation1.py
|
nalinimsingh/ITK_4D
|
95a2eacaeaffe572889832ef0894239f89e3f303
|
[
"Apache-2.0"
] | null | null | null |
Examples/Segmentation/WatershedSegmentation1.py
|
nalinimsingh/ITK_4D
|
95a2eacaeaffe572889832ef0894239f89e3f303
|
[
"Apache-2.0"
] | 4
|
2018-05-17T16:34:54.000Z
|
2020-09-24T02:12:40.000Z
|
#==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
import InsightToolkit as itk
import sys
reader = itk.itkImageFileReaderF2_New()
reader.SetFileName( sys.argv[1] )
diffusion = itk.itkGradientAnisotropicDiffusionImageFilterF2F2_New()
diffusion.SetInput(reader.GetOutput())
diffusion.SetTimeStep(0.0625)
diffusion.SetConductanceParameter(9.0)
diffusion.SetNumberOfIterations( 5 );
gradient = itk.itkGradientMagnitudeImageFilterF2F2_New()
gradient.SetInput(diffusion.GetOutput())
watershed = itk.itkWatershedImageFilterF2_New()
watershed.SetInput(gradient.GetOutput())
watershed.SetThreshold(0.01)
watershed.SetLevel(0.2)
writer = itk.itkImageFileWriterUL2_New()
writer.SetFileName( sys.argv[2] )
writer.SetInput( watershed.GetOutput() )
writer.Update()
| 33.217391
| 78
| 0.676702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 771
| 0.504581
|
1815ed2b6c358f6414fe0404d22b0c279e749b59
| 1,520
|
py
|
Python
|
study_roadmaps/python_sample_examples/gluon/3_aux_functions/train.py
|
Shreyashwaghe/monk_v1
|
4ee4d9483e8ffac9b73a41f3c378e5abf5fc799b
|
[
"Apache-2.0"
] | 7
|
2020-07-26T08:37:29.000Z
|
2020-10-30T10:23:11.000Z
|
study_roadmaps/python_sample_examples/gluon/3_aux_functions/train.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 9
|
2020-01-28T21:40:39.000Z
|
2022-02-10T01:24:06.000Z
|
study_roadmaps/python_sample_examples/gluon/3_aux_functions/train.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | 1
|
2020-10-07T12:57:44.000Z
|
2020-10-07T12:57:44.000Z
|
import os
import sys
sys.path.append("../../../monk/");
import psutil
from gluon_prototype import prototype
gtf = prototype(verbose=1);
gtf.Prototype("sample-project-1", "sample-experiment-1");
gtf.Default(dataset_path="../../../monk/system_check_tests/datasets/dataset_cats_dogs_train",
model_name="resnet18_v1", freeze_base_network=True, num_epochs=2);
######################################################## Summary #####################################################
gtf.Summary()
###########################################################################################################################
##################################################### EDA - Find Num images per class #####################################
gtf.EDA(show_img=True, save_img=True);
###########################################################################################################################
##################################################### EDA - Find Missing and corrupted images #####################################
gtf.EDA(check_missing=True, check_corrupt=True);
###########################################################################################################################
##################################################### Estimate Training Time #####################################
gtf.Estimate_Train_Time(num_epochs=50);
###########################################################################################################################
| 33.043478
| 131
| 0.309211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,118
| 0.735526
|
18173f17dd015c09e3b1cfc44c736b20bfea7170
| 126
|
py
|
Python
|
ppa-mirror/config.py
|
elprup/ppa-mirror
|
29e8a5027bbb698fcb36a250484b08ea945f65cf
|
[
"MIT"
] | null | null | null |
ppa-mirror/config.py
|
elprup/ppa-mirror
|
29e8a5027bbb698fcb36a250484b08ea945f65cf
|
[
"MIT"
] | null | null | null |
ppa-mirror/config.py
|
elprup/ppa-mirror
|
29e8a5027bbb698fcb36a250484b08ea945f65cf
|
[
"MIT"
] | 1
|
2021-03-04T13:43:34.000Z
|
2021-03-04T13:43:34.000Z
|
cache_root = '/home/ubuntu/ppa-mirror/cache/'
mirror_root = '/home/ubuntu/ppa-mirror/repo'
http_proxy = "188.112.194.222:8080"
| 42
| 45
| 0.746032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 84
| 0.666667
|
181aa4e686c7e2eb75b68979882bfaab2af06de9
| 3,031
|
py
|
Python
|
downloader.py
|
tuxetuxe/downloader
|
76a1ac01189a6946b15ac6f58661931551dfc0ef
|
[
"Apache-2.0"
] | 3
|
2016-11-09T13:02:46.000Z
|
2020-06-04T10:38:11.000Z
|
downloader.py
|
tuxetuxe/downloader
|
76a1ac01189a6946b15ac6f58661931551dfc0ef
|
[
"Apache-2.0"
] | null | null | null |
downloader.py
|
tuxetuxe/downloader
|
76a1ac01189a6946b15ac6f58661931551dfc0ef
|
[
"Apache-2.0"
] | null | null | null |
import sys, getopt
import sched
import time
import csv
from pprint import pprint
import urllib, urllib2
from random import randint
import threading
proxies_file = ""
targets_file = ""
proxies = []
targets = []
scheduler = sched.scheduler(time.time, time.sleep)
def pick_random_proxy():
proxy_count = len(proxies) - 1;
if proxy_count == 0:
return None
proxy_index = randint(0, proxy_count )
proxy = proxies[ proxy_index ]
return proxy[ "host" ] + ":" + proxy[ "port" ]
def download_file(interval, url):
threading.Thread( target = lambda: download_file_impl(interval, url) )
#randomize the interval
new_interval = interval + randint( -1 * interval, interval )
if new_interval < interval:
new_interval = interval
#repeat itself forever
scheduler.enter(new_interval, 1 , download_file, (new_interval, url) )
print "==> Next download of " + url + " in " + str( new_interval ) + " seconds"
def download_file_impl(interval, url):
selected_proxy = pick_random_proxy();
download_was_ok = True
try:
request = urllib2.Request(url)
if selected_proxy is None:
print "NO PROXY!"
else:
request.set_proxy(selected_proxy, 'http')
response = urllib2.urlopen(request)
print "Response code: " + str( response.code )
download_was_ok = response.code == 200
except urllib2.URLError, e:
download_was_ok = False
pprint( e )
if( download_was_ok ):
print " OK! "
else:
print " NOK! "
def main(argv):
global scheduler
parse_command_line_parameters(argv)
proxiesReader = csv.DictReader(open(proxies_file), dialect='excel', delimiter=',')
for row in proxiesReader:
proxies.append( row )
targetsReader = csv.DictReader(open(targets_file), dialect='excel', delimiter=',')
for row in targetsReader:
targets.append( row )
print "==============================================================================="
print "Proxies file: " + proxies_file
print "Targets file: " + targets_file
print "-------------------------------------------------------------------------------"
print "Proxies (total:" + str( len(proxies) ) + ")"
pprint( proxies )
print "Targets (total:" + str( len(targets) ) + ")"
pprint( targets )
print "==============================================================================="
for target in targets:
interval = int( target[ "interval" ] )
url = target[ "url" ]
scheduler.enter(interval, 1 , download_file, (interval, url ) )
scheduler.run()
def parse_command_line_parameters(argv):
global proxies_file
global targets_file
try:
opts, args = getopt.getopt(argv,"hp:t:",["proxies=","targets="])
except getopt.GetoptError:
print 'downloader.py -p <proxiesfile> -t <targetsfile>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'downloader.py -p <proxiesfile> -t <targetsfile>'
sys.exit()
elif opt in ("-p", "--proxiesfile"):
proxies_file = arg
elif opt in ("-t", "--targetsfile"):
targets_file = arg
if __name__ == "__main__":
main(sys.argv[1:])
| 25.470588
| 88
| 0.626856
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 679
| 0.224018
|
181b018a34f9e83a9ca0468d516a71155390ba8b
| 1,799
|
py
|
Python
|
backend/api/views/utils.py
|
pm5/Disfactory
|
2cceec2544b1bd5bb624882be626494d54a08119
|
[
"MIT"
] | null | null | null |
backend/api/views/utils.py
|
pm5/Disfactory
|
2cceec2544b1bd5bb624882be626494d54a08119
|
[
"MIT"
] | null | null | null |
backend/api/views/utils.py
|
pm5/Disfactory
|
2cceec2544b1bd5bb624882be626494d54a08119
|
[
"MIT"
] | null | null | null |
import random
from django.conf import settings
from django.db.models import Prefetch
from django.db.models.functions.math import Radians, Cos, ACos, Sin
from ..models import Factory, ReportRecord, Image, Document
def _sample(objs, k):
list_of_objs = list(objs)
random.shuffle(list_of_objs)
return list_of_objs[:k]
def _get_nearby_factories(latitude, longitude, radius):
"""Return nearby factories based on position and search range."""
# ref: https://stackoverflow.com/questions/574691/mysql-great-circle-distance-haversine-formula
distance = 6371 * ACos(
Cos(Radians(latitude)) * Cos(Radians("lat")) * Cos(Radians("lng") - Radians(longitude))
+ Sin(Radians(latitude)) * Sin(Radians("lat"))
)
radius_km = radius
ids = Factory.objects.annotate(distance=distance).only("id").filter(distance__lt=radius_km).order_by("id")
if len(ids) > settings.MAX_FACTORY_PER_GET:
ids = _sample(ids, settings.MAX_FACTORY_PER_GET)
return (
Factory.objects.filter(id__in=[obj.id for obj in ids])
.prefetch_related(Prefetch('report_records', queryset=ReportRecord.objects.only("created_at").all()))
.prefetch_related(Prefetch('images', queryset=Image.objects.only("id").all()))
.prefetch_related(Prefetch('documents', queryset=Document.objects.only('created_at', 'display_status').all()))
.all()
)
def _get_client_ip(request):
# ref: https://stackoverflow.com/a/30558984
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
return x_forwarded_for.split(",")[-1].strip()
elif request.META.get("HTTP_X_REAL_IP"):
return request.META.get("HTTP_X_REAL_IP")
else:
return request.META.get("REMOTE_ADDR")
| 36.714286
| 125
| 0.692051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 375
| 0.208449
|
181cfdf188f95cef8715790def585eab0fdb4f44
| 886
|
py
|
Python
|
tests/test_pyros_schemas/test_decorators.py
|
pyros-dev/pyros-schemas
|
a460920260ee77a1b5b6d5c0b97df52f1572ff79
|
[
"MIT"
] | 3
|
2018-01-01T17:10:16.000Z
|
2018-11-15T15:41:46.000Z
|
tests/test_pyros_schemas/test_decorators.py
|
pyros-dev/pyros-schemas
|
a460920260ee77a1b5b6d5c0b97df52f1572ff79
|
[
"MIT"
] | 7
|
2018-02-02T10:05:55.000Z
|
2018-02-17T15:15:46.000Z
|
tests/test_pyros_schemas/test_decorators.py
|
pyros-dev/pyros-schemas
|
a460920260ee77a1b5b6d5c0b97df52f1572ff79
|
[
"MIT"
] | 2
|
2017-09-27T09:46:31.000Z
|
2018-02-02T09:37:13.000Z
|
from __future__ import absolute_import
from __future__ import print_function
import pytest
import std_srvs.srv as std_srvs
# public decorators
from pyros_schemas.ros import with_service_schemas
#
# Testing with_service_schemas decorator
#
@with_service_schemas(std_srvs.Trigger)
def service_callback(data, data_dict, error):
# From spec http://docs.ros.org/api/std_srvs/html/srv/Trigger.html
assert len(data_dict) == len(data.__slots__) == 0
return {
'success': True,
'message': 'fortytwo',
}
def test_decorated_service():
resp = service_callback(std_srvs.TriggerRequest())
assert isinstance(resp, std_srvs.TriggerResponse)
assert resp.success == True
assert resp.message == 'fortytwo'
# Just in case we run this directly
if __name__ == '__main__':
pytest.main([
'test_decorators.py::test_decorated_service'
])
| 22.717949
| 70
| 0.72912
| 0
| 0
| 0
| 0
| 286
| 0.322799
| 0
| 0
| 254
| 0.286682
|
181dd4525734f8cc34fa28f835971bb355463f95
| 516
|
py
|
Python
|
src/removeElement.py
|
ianxin/algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | 2
|
2018-03-13T08:59:14.000Z
|
2018-03-13T08:59:25.000Z
|
src/removeElement.py
|
ianxin/Algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | null | null | null |
src/removeElement.py
|
ianxin/Algorithm
|
22214b6c81bee926f5a1c74c9417b2e7edd3ceed
|
[
"MIT"
] | null | null | null |
"""
@param: A: A list of integers
@param: elem: An integer
@return: The new length after remove
"""
#倒序遍历list
def removeElement(self, A, elem):
# write your code here
for i in range(len(A)-1,-1,-1):
if A[i] == elem:
A.pop(i)
return len(A)
#遍历拷贝的list ,操作原始list
def removeElement(self, A, elem):
# write your code here
for i in A[:]:
if i == elem:
A.remove(i)
return len(A)
| 23.454545
| 40
| 0.484496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 219
| 0.402574
|
181e8052c8ceced20aed0b9306fa76476c4461fb
| 2,057
|
py
|
Python
|
setup.py
|
codespider/flagon
|
d94a50844025ea88fd67dc7651c4a860c3be6d1a
|
[
"MIT"
] | 3
|
2018-08-29T19:01:10.000Z
|
2018-09-14T16:07:30.000Z
|
setup.py
|
codespider/flagon
|
d94a50844025ea88fd67dc7651c4a860c3be6d1a
|
[
"MIT"
] | 8
|
2018-08-24T08:56:09.000Z
|
2018-09-15T11:13:27.000Z
|
setup.py
|
codespider/flagon
|
d94a50844025ea88fd67dc7651c4a860c3be6d1a
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
import io
from collections import OrderedDict
with io.open('README.rst', 'rt', encoding='utf8') as f:
readme = f.read()
def get_version():
return '0.1.1'
setup(
name='Flask-Wired',
version=get_version(),
license='MIT',
author='Karthikkannan Maruthamuthu',
author_email='karthikkannan@gmail.com',
maintainer='Karthikkannan Maruthamuthu',
maintainer_email='karthikkannan@gmail.com',
description='Package for Flask wiring.',
long_description=readme,
url='https://github.com/treebohotels/Flask-Wired',
project_urls=OrderedDict((
('Documentation', 'https://github.com/treebohotels/Flask-Wired'),
('Code', 'https://github.com/treebohotels/Flask-Wired'),
('Issue tracker', 'https://github.com/treebohotels/Flask-Wired/issues'),
)),
package_dir={'': '.'},
packages=find_packages(".", exclude=['tests', 'sample_app']),
include_package_data=True,
zip_safe=False,
platforms='any',
python_requires='>=3.6',
install_requires=[
'Flask==1.0.2',
'Flask-Script==2.0.6',
'Flask-Migrate==2.2.1',
'flask-marshmallow==0.9.0',
'Flask-SQLAlchemy==2.3.2',
'marshmallow-sqlalchemy==0.14.1',
'psycopg2==2.7.5',
],
entry_points={
},
test_suite="tests",
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Flask',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| 31.646154
| 80
| 0.616918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,149
| 0.55858
|
181ed57e3eb39153ad141aa8f03aeb15ee7f7127
| 510
|
py
|
Python
|
idManager/view/authentication_view.py
|
lgarciasbr/idm-api
|
3517d29d55eb2a06fb5b4b21359b6cf6d11529a0
|
[
"Apache-2.0"
] | 2
|
2018-01-14T22:43:43.000Z
|
2018-01-14T22:43:48.000Z
|
idManager/view/authentication_view.py
|
lgarciasbr/idm-api
|
3517d29d55eb2a06fb5b4b21359b6cf6d11529a0
|
[
"Apache-2.0"
] | null | null | null |
idManager/view/authentication_view.py
|
lgarciasbr/idm-api
|
3517d29d55eb2a06fb5b4b21359b6cf6d11529a0
|
[
"Apache-2.0"
] | null | null | null |
from flask import jsonify
def auth_login(http_status_code, message, token):
view = jsonify({'status_code': http_status_code, 'message': message, '_token': token})
return view
def auth_is_valid(http_status_code, message, token):
view = jsonify({'status_code': http_status_code, 'message': message, '_token': token})
return view
def auth_logout(http_status_code, message, token):
view = jsonify({'status_code': http_status_code, 'message': message, '_token': token})
return view
| 25.5
| 90
| 0.721569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 90
| 0.176471
|
181efed1a7997edb4c8e051cadb0058f5afd1105
| 604
|
py
|
Python
|
setup.py
|
TheSriram/deuce
|
9e8a7a342275aa02d0a59953b5a8c96ffb760b51
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
TheSriram/deuce
|
9e8a7a342275aa02d0a59953b5a8c96ffb760b51
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
TheSriram/deuce
|
9e8a7a342275aa02d0a59953b5a8c96ffb760b51
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
REQUIRES = ['six', 'pecan', 'setuptools >= 1.1.6',
'cassandra-driver', 'pymongo']
setup(
name='deuce',
version='0.1',
description='Deuce - Block-level de-duplication as-a-service',
author='Rackspace',
author_email='',
install_requires=REQUIRES,
test_suite='deuce',
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['tests'])
)
| 25.166667
| 66
| 0.680464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 171
| 0.283113
|
1820ae4e6fd68c69f37f4266bffb6793e643a89a
| 6,580
|
py
|
Python
|
script.py
|
rahulkmr1/heroku-python-script
|
053be38dc8c6c6ab9929ca5af772d19c57f5e498
|
[
"MIT"
] | null | null | null |
script.py
|
rahulkmr1/heroku-python-script
|
053be38dc8c6c6ab9929ca5af772d19c57f5e498
|
[
"MIT"
] | null | null | null |
script.py
|
rahulkmr1/heroku-python-script
|
053be38dc8c6c6ab9929ca5af772d19c57f5e498
|
[
"MIT"
] | null | null | null |
import telepot
import time
import requests
from bs4 import BeautifulSoup as bs
import cPickle
import csv
RAHUL_ID = 931906767
# You can leave this bit out if you're using a paid PythonAnywhere account
# proxy_url = "http://proxy.server:3128"
# telepot.api._pools = {
# 'default': urllib3.ProxyManager(proxy_url=proxy_url, num_pools=3, maxsize=10, retries=False, timeout=30),
# }
# telepot.api._onetime_pool_spec = (urllib3.ProxyManager, dict(proxy_url=proxy_url, num_pools=1, maxsize=1, retries=False, timeout=30))
# end of the stuff that's only needed for free accounts
########################
login_url = 'https://www.placement.iitbhu.ac.in/accounts/login/'
client = requests.session()
login = client.get(login_url)
login = bs(login.content, "html.parser")
payload = {
"login": "rahul.kumar.cse15@itbhu.ac.in",
"password": "rahulkmr",
"csrfmiddlewaretoken": login.input['value']
}
result = client.post(
login_url,
data = payload,
headers = dict(referer=login_url)
)
forum = client.get("https://www.placement.iitbhu.ac.in/forum/c/notice-board/2019-20/")
soup = bs(forum.content, "html.parser")
#load last message delivred to users
try:
with open("posts", "rb") as f:
posts = cPickle.load(f);
except Exception as e:
print e
posts = soup.findAll("td", "topic-name")
for i in range(len(posts)):
posts[i] = posts[i].a
posts.pop(0)
posts.pop(0)
updated = soup.findAll('td','topic-last-post')
# updated.pop()
# updated.pop(0)
#########################
bot = telepot.Bot('940251504:AAG19YYQYtkiEOCrW0fZETvmYQSskElARcc')
# chat_ids = {RAHUL_ID}
with open("IDs", "rb") as f:
chat_ids = cPickle.load(f)
print '#################No of IDs loaded: ', len(chat_ids)
####### Commands ########
def start(msg):
# with open("users.csv", "w") as f:
# writer = csv.writer(f)
# writer.writerow(msg['from'].values())
bot.sendMessage(msg['chat']['id'],"Hello " + msg['from']['first_name'])
def add_cmd(chat_id, msg, *argv):
if chat_id not in chat_ids:
chat_ids.add(chat_id)
with open("IDs", "wb") as f:
cPickle.dump(chat_ids, f);
with open("users.txt", "a") as f:
writer = csv.writer(f)
writer.writerow(msg['from'].values())
bot.sendMessage(chat_id, "Added your ID for notifications. Note that it may take upto 5 minutes to get update of a recent post")
bot.sendMessage(RAHUL_ID, "Added:\n" + str(msg['from'].values()))
else:
bot.sendMessage(chat_id, "You are already added")
def remove_cmd(chat_id, *argv):
try:
chat_ids.remove(chat_id)
with open("IDs", "wb") as f:
cPickle.dump(chat_ids, f);
bot.sendMessage(chat_id, "Removed your ID")
except KeyError:
bot.sendMessage(chat_id, "You are not in the list")
def allPosts(chat_id, *argv):
msg = ''
for i in range(len(posts)):
msg += gen_msg(posts[i]) + '\n<b>Last Updated: </b>' + updated[i].string.encode() + '\n\n'
bot.sendMessage(chat_id, text=msg, parse_mode="HTML")
def top(chat_id, param, *argv):
total = 3
if len(param) > 1 and param[1].isdigit():
total = min(15, int(param[1]))
msg = '.'
for i in range(total):
msg += gen_msg(posts[i])
try:
# print msg
bot.sendMessage(chat_id, text=msg, parse_mode="HTML")
except Exception as e:
bot.sendMessage(chat_id, text=str(e), parse_mode="HTML")
#########################
command = {'/add':add_cmd, '/remove':remove_cmd, '/all':allPosts, '/recent':top}
def handle(msg):
# print msg
content_type, chat_type, chat_id = telepot.glance(msg)
print msg['from']['first_name'], chat_id
if content_type == 'text':
if msg['text'][0] == '/':
tokens = msg['text'].split()
try:
if tokens[0] == '/start':
start(msg)
elif tokens[0] == '/add':
add_cmd(chat_id, msg)
else:
command[tokens[0]](chat_id, tokens)
except KeyError:
bot.sendMessage(chat_id, "Unknown command: {}".format(tokens[0]))
else:
bot.sendMessage(chat_id, "You said '{}'".format(msg["text"]))
bot.message_loop(handle)
print ('Listening ...')
# for chat_id in chat_ids:
# bot.sendMessage(chat_id, text='Server started', parse_mode="HTML")
bot.sendMessage(RAHUL_ID, text='Server started', parse_mode="HTML")
def gen_msg(post):
string = str(post)
string = string[:8] + '"https://www.placement.iitbhu.ac.in' + string[9:] + '\n-----------------\n<b>Last Updated: </b>' + updated[posts.index(post)].string.encode() + '\n'
# string += '
post = client.get("https://www.placement.iitbhu.ac.in/" + post['href'])
post = bs(post.content, "html.parser")
post = post.find("td", "post-content")
# print post.contents
for x in post.contents:
if type(x) is type(post.contents[0]):
string += x + '\n'
post = post.find("div", "attachments").a
if post is not None:
tmp = '<a href=' + '"https://www.placement.iitbhu.ac.in' + post['href'] + '">'
tmp += post.contents[1].split()[0]
tmp += '</a>'
string += tmp
string += '\n\n'
return string
def on_new():
global updated
global posts
posts2 = soup.findAll("td", "topic-name")
for i in range(len(posts2)):
posts2[i] = posts2[i].a
#find how many new posts
try:
total = posts2.index(posts[0])
except ValueError:
total = len(posts2)
print total, "new posts, users = ", len(chat_ids)
posts = posts2
updated = soup.findAll('td','topic-last-post')
msg = '<b>Note that you need to be logged in before opening these links, else youll see 500 error in your browser</b>\n\n'
for i in range(total):
msg += gen_msg(posts[i])
for chat_id in chat_ids:
# print "sending update to ", chat_id
bot.sendMessage(chat_id, text=msg, parse_mode="HTML")
#save last message delivred to users
with open("posts", "wb") as f:
cPickle.dump(posts, f);
# Keep the program running.
def main():
while 1:
# bot.sendMessage(RAHUL_ID, text="Dynamic code update", parse_mode="HTML")
global forum
global soup
try:
forum = requests.get("https://www.placement.iitbhu.ac.in/forum/c/notice-board/2019-20/")
soup = bs(forum.content, "html.parser")
if len(posts) == 0 or soup.td.a['href'] != posts[0]['href']:
on_new()
except Exception as e:
bot.sendMessage(RAHUL_ID, text="<b>Exception:</b>\n" + str(e), parse_mode="HTML")
# else:
# bot.sendMessage(RAHUL_ID, text="Error in polling TPO forum", parse_mode="HTML")
try:
time.sleep(1000 * 60 *1)
finally:
# for chat_id in chat_ids:
# bot.sendMessage(chat_id, text='Server closing for maintenance, you might miss updates', parse_mode="HTML")
bot.sendMessage(RAHUL_ID, text='Server closing for maintenance, you might miss updates', parse_mode="HTML")
if __name__ == '__main__':
main()
| 25.019011
| 173
| 0.655623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,779
| 0.42234
|
1824cd98e77d7661e6eb7f082d5655ec1a45fa19
| 1,607
|
py
|
Python
|
examples/4-tensorflow-mnist/tensorflow_mnist/train.py
|
awcchungster/baklava
|
ad301afd7aa163ccf662efe08d00eeff68cdb667
|
[
"Apache-2.0"
] | 3
|
2021-08-24T03:10:14.000Z
|
2022-01-07T20:53:37.000Z
|
examples/4-tensorflow-mnist/tensorflow_mnist/train.py
|
awcchungster/baklava
|
ad301afd7aa163ccf662efe08d00eeff68cdb667
|
[
"Apache-2.0"
] | 5
|
2021-07-15T20:19:26.000Z
|
2021-08-18T23:26:46.000Z
|
examples/4-tensorflow-mnist/tensorflow_mnist/train.py
|
LaudateCorpus1/baklava
|
0e029097983db6cea00a7d779b887b149975fbc4
|
[
"Apache-2.0"
] | 5
|
2021-07-03T17:46:15.000Z
|
2022-02-24T08:05:39.000Z
|
"""
Train
=====
Defines functions which train models and write model artifacts to disk.
"""
from __future__ import print_function
import os
import tempfile
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow_mnist import model, paths
def train(path):
"""
Train a decision tree classifier using a floating point feature matrix and
a categorical classification target.
Arguments:
path (str): The path indicating where to save the final model artifacts
"""
# Construct the model graph
graph, x, y, step, initializer, accuracy, prediction = model.build()
# Start a training session
with tf.Session(graph=graph) as sess:
# Initialize the graph
sess.run(initializer)
# Train the model for 1000 steps
mnist = input_data.read_data_sets(tempfile.mkdtemp(), one_hot=True)
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(step, feed_dict={x: batch_xs, y: batch_ys})
# Display accuracy measurement
print(sess.run(accuracy, feed_dict={x: mnist.test.images,
y: mnist.test.labels}))
# Save the variable data to disk
os.makedirs(path)
saver = tf.train.Saver()
saver.save(sess, path)
print('Success!')
def main():
"""
Load features and labels, train the neural network, and serialize model
artifact.
Note: This is the training entrypoint used by baklava!
"""
path = paths.model('mnist')
train(path)
| 26.344262
| 79
| 0.653391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 665
| 0.413815
|
18257b1e23725fb3440c7a7dd07da911552a0f1a
| 16,942
|
py
|
Python
|
google/cloud/binaryauthorization/v1/binaryauthorization-v1-py/google/cloud/binaryauthorization_v1/types/resources.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/cloud/binaryauthorization/v1/binaryauthorization-v1-py/google/cloud/binaryauthorization_v1/types/resources.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/cloud/binaryauthorization/v1/binaryauthorization-v1-py/google/cloud/binaryauthorization_v1/types/resources.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.binaryauthorization.v1',
manifest={
'Policy',
'AdmissionWhitelistPattern',
'AdmissionRule',
'Attestor',
'UserOwnedGrafeasNote',
'PkixPublicKey',
'AttestorPublicKey',
},
)
class Policy(proto.Message):
r"""A [policy][google.cloud.binaryauthorization.v1.Policy] for container
image binary authorization.
Attributes:
name (str):
Output only. The resource name, in the format
``projects/*/policy``. There is at most one policy per
project.
description (str):
Optional. A descriptive comment.
global_policy_evaluation_mode (google.cloud.binaryauthorization_v1.types.Policy.GlobalPolicyEvaluationMode):
Optional. Controls the evaluation of a
Google-maintained global admission policy for
common system-level images. Images not covered
by the global policy will be subject to the
project admission policy. This setting has no
effect when specified inside a global admission
policy.
admission_whitelist_patterns (Sequence[google.cloud.binaryauthorization_v1.types.AdmissionWhitelistPattern]):
Optional. Admission policy allowlisting. A
matching admission request will always be
permitted. This feature is typically used to
exclude Google or third-party infrastructure
images from Binary Authorization policies.
cluster_admission_rules (Sequence[google.cloud.binaryauthorization_v1.types.Policy.ClusterAdmissionRulesEntry]):
Optional. Per-cluster admission rules. Cluster spec format:
``location.clusterId``. There can be at most one admission
rule per cluster spec. A ``location`` is either a compute
zone (e.g. us-central1-a) or a region (e.g. us-central1).
For ``clusterId`` syntax restrictions see
https://cloud.google.com/container-engine/reference/rest/v1/projects.zones.clusters.
kubernetes_namespace_admission_rules (Sequence[google.cloud.binaryauthorization_v1.types.Policy.KubernetesNamespaceAdmissionRulesEntry]):
Optional. Per-kubernetes-namespace admission rules. K8s
namespace spec format: [a-z.-]+, e.g. 'some-namespace'
kubernetes_service_account_admission_rules (Sequence[google.cloud.binaryauthorization_v1.types.Policy.KubernetesServiceAccountAdmissionRulesEntry]):
Optional. Per-kubernetes-service-account admission rules.
Service account spec format: ``namespace:serviceaccount``.
e.g. 'test-ns:default'
istio_service_identity_admission_rules (Sequence[google.cloud.binaryauthorization_v1.types.Policy.IstioServiceIdentityAdmissionRulesEntry]):
Optional. Per-istio-service-identity
admission rules. Istio service identity spec
format:
spiffe://<domain>/ns/<namespace>/sa/<serviceaccount>
or <domain>/ns/<namespace>/sa/<serviceaccount>
e.g. spiffe://example.com/ns/test-ns/sa/default
default_admission_rule (google.cloud.binaryauthorization_v1.types.AdmissionRule):
Required. Default admission rule for a
cluster without a per-cluster, per- kubernetes-
service-account, or per-istio-service-identity
admission rule.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Time when the policy was last
updated.
"""
class GlobalPolicyEvaluationMode(proto.Enum):
r""""""
GLOBAL_POLICY_EVALUATION_MODE_UNSPECIFIED = 0
ENABLE = 1
DISABLE = 2
name = proto.Field(
proto.STRING,
number=1,
)
description = proto.Field(
proto.STRING,
number=6,
)
global_policy_evaluation_mode = proto.Field(
proto.ENUM,
number=7,
enum=GlobalPolicyEvaluationMode,
)
admission_whitelist_patterns = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='AdmissionWhitelistPattern',
)
cluster_admission_rules = proto.MapField(
proto.STRING,
proto.MESSAGE,
number=3,
message='AdmissionRule',
)
kubernetes_namespace_admission_rules = proto.MapField(
proto.STRING,
proto.MESSAGE,
number=10,
message='AdmissionRule',
)
kubernetes_service_account_admission_rules = proto.MapField(
proto.STRING,
proto.MESSAGE,
number=8,
message='AdmissionRule',
)
istio_service_identity_admission_rules = proto.MapField(
proto.STRING,
proto.MESSAGE,
number=9,
message='AdmissionRule',
)
default_admission_rule = proto.Field(
proto.MESSAGE,
number=4,
message='AdmissionRule',
)
update_time = proto.Field(
proto.MESSAGE,
number=5,
message=timestamp_pb2.Timestamp,
)
class AdmissionWhitelistPattern(proto.Message):
r"""An [admission allowlist
pattern][google.cloud.binaryauthorization.v1.AdmissionWhitelistPattern]
exempts images from checks by [admission
rules][google.cloud.binaryauthorization.v1.AdmissionRule].
Attributes:
name_pattern (str):
An image name pattern to allowlist, in the form
``registry/path/to/image``. This supports a trailing ``*``
wildcard, but this is allowed only in text after the
``registry/`` part. This also supports a trailing ``**``
wildcard which matches subdirectories of a given entry.
"""
name_pattern = proto.Field(
proto.STRING,
number=1,
)
class AdmissionRule(proto.Message):
r"""An [admission
rule][google.cloud.binaryauthorization.v1.AdmissionRule] specifies
either that all container images used in a pod creation request must
be attested to by one or more
[attestors][google.cloud.binaryauthorization.v1.Attestor], that all
pod creations will be allowed, or that all pod creations will be
denied.
Images matching an [admission allowlist
pattern][google.cloud.binaryauthorization.v1.AdmissionWhitelistPattern]
are exempted from admission rules and will never block a pod
creation.
Attributes:
evaluation_mode (google.cloud.binaryauthorization_v1.types.AdmissionRule.EvaluationMode):
Required. How this admission rule will be
evaluated.
require_attestations_by (Sequence[str]):
Optional. The resource names of the attestors that must
attest to a container image, in the format
``projects/*/attestors/*``. Each attestor must exist before
a policy can reference it. To add an attestor to a policy
the principal issuing the policy change request must be able
to read the attestor resource.
Note: this field must be non-empty when the evaluation_mode
field specifies REQUIRE_ATTESTATION, otherwise it must be
empty.
enforcement_mode (google.cloud.binaryauthorization_v1.types.AdmissionRule.EnforcementMode):
Required. The action when a pod creation is
denied by the admission rule.
"""
class EvaluationMode(proto.Enum):
r""""""
EVALUATION_MODE_UNSPECIFIED = 0
ALWAYS_ALLOW = 1
REQUIRE_ATTESTATION = 2
ALWAYS_DENY = 3
class EnforcementMode(proto.Enum):
r"""Defines the possible actions when a pod creation is denied by
an admission rule.
"""
ENFORCEMENT_MODE_UNSPECIFIED = 0
ENFORCED_BLOCK_AND_AUDIT_LOG = 1
DRYRUN_AUDIT_LOG_ONLY = 2
evaluation_mode = proto.Field(
proto.ENUM,
number=1,
enum=EvaluationMode,
)
require_attestations_by = proto.RepeatedField(
proto.STRING,
number=2,
)
enforcement_mode = proto.Field(
proto.ENUM,
number=3,
enum=EnforcementMode,
)
class Attestor(proto.Message):
r"""An [attestor][google.cloud.binaryauthorization.v1.Attestor] that
attests to container image artifacts. An existing attestor cannot be
modified except where indicated.
Attributes:
name (str):
Required. The resource name, in the format:
``projects/*/attestors/*``. This field may not be updated.
description (str):
Optional. A descriptive comment. This field
may be updated. The field may be displayed in
chooser dialogs.
user_owned_grafeas_note (google.cloud.binaryauthorization_v1.types.UserOwnedGrafeasNote):
This specifies how an attestation will be
read, and how it will be used during policy
enforcement.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Time when the attestor was last
updated.
"""
name = proto.Field(
proto.STRING,
number=1,
)
description = proto.Field(
proto.STRING,
number=6,
)
user_owned_grafeas_note = proto.Field(
proto.MESSAGE,
number=3,
oneof='attestor_type',
message='UserOwnedGrafeasNote',
)
update_time = proto.Field(
proto.MESSAGE,
number=4,
message=timestamp_pb2.Timestamp,
)
class UserOwnedGrafeasNote(proto.Message):
r"""An [user owned Grafeas
note][google.cloud.binaryauthorization.v1.UserOwnedGrafeasNote]
references a Grafeas Attestation.Authority Note created by the user.
Attributes:
note_reference (str):
Required. The Grafeas resource name of a
Attestation.Authority Note, created by the user, in the
format: ``projects/*/notes/*``. This field may not be
updated.
An attestation by this attestor is stored as a Grafeas
Attestation.Authority Occurrence that names a container
image and that links to this Note. Grafeas is an external
dependency.
public_keys (Sequence[google.cloud.binaryauthorization_v1.types.AttestorPublicKey]):
Optional. Public keys that verify
attestations signed by this attestor. This
field may be updated.
If this field is non-empty, one of the specified
public keys must verify that an attestation was
signed by this attestor for the image specified
in the admission request.
If this field is empty, this attestor always
returns that no valid attestations exist.
delegation_service_account_email (str):
Output only. This field will contain the service account
email address that this Attestor will use as the principal
when querying Container Analysis. Attestor administrators
must grant this service account the IAM role needed to read
attestations from the [note_reference][Note] in Container
Analysis (``containeranalysis.notes.occurrences.viewer``).
This email address is fixed for the lifetime of the
Attestor, but callers should not make any other assumptions
about the service account email; future versions may use an
email based on a different naming pattern.
"""
note_reference = proto.Field(
proto.STRING,
number=1,
)
public_keys = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='AttestorPublicKey',
)
delegation_service_account_email = proto.Field(
proto.STRING,
number=3,
)
class PkixPublicKey(proto.Message):
r"""A public key in the PkixPublicKey format (see
https://tools.ietf.org/html/rfc5280#section-4.1.2.7 for
details). Public keys of this type are typically textually
encoded using the PEM format.
Attributes:
public_key_pem (str):
A PEM-encoded public key, as described in
https://tools.ietf.org/html/rfc7468#section-13
signature_algorithm (google.cloud.binaryauthorization_v1.types.PkixPublicKey.SignatureAlgorithm):
The signature algorithm used to verify a message against a
signature using this key. These signature algorithm must
match the structure and any object identifiers encoded in
``public_key_pem`` (i.e. this algorithm must match that of
the public key).
"""
class SignatureAlgorithm(proto.Enum):
r"""Represents a signature algorithm and other information
necessary to verify signatures with a given public key. This is
based primarily on the public key types supported by Tink's
PemKeyType, which is in turn based on KMS's supported signing
algorithms. See https://cloud.google.com/kms/docs/algorithms. In
the future, BinAuthz might support additional public key types
independently of Tink and/or KMS.
"""
_pb_options = {'allow_alias': True}
SIGNATURE_ALGORITHM_UNSPECIFIED = 0
RSA_PSS_2048_SHA256 = 1
RSA_PSS_3072_SHA256 = 2
RSA_PSS_4096_SHA256 = 3
RSA_PSS_4096_SHA512 = 4
RSA_SIGN_PKCS1_2048_SHA256 = 5
RSA_SIGN_PKCS1_3072_SHA256 = 6
RSA_SIGN_PKCS1_4096_SHA256 = 7
RSA_SIGN_PKCS1_4096_SHA512 = 8
ECDSA_P256_SHA256 = 9
EC_SIGN_P256_SHA256 = 9
ECDSA_P384_SHA384 = 10
EC_SIGN_P384_SHA384 = 10
ECDSA_P521_SHA512 = 11
EC_SIGN_P521_SHA512 = 11
public_key_pem = proto.Field(
proto.STRING,
number=1,
)
signature_algorithm = proto.Field(
proto.ENUM,
number=2,
enum=SignatureAlgorithm,
)
class AttestorPublicKey(proto.Message):
r"""An [attestor public
key][google.cloud.binaryauthorization.v1.AttestorPublicKey] that
will be used to verify attestations signed by this attestor.
Attributes:
comment (str):
Optional. A descriptive comment. This field
may be updated.
id (str):
The ID of this public key. Signatures verified by BinAuthz
must include the ID of the public key that can be used to
verify them, and that ID must match the contents of this
field exactly. Additional restrictions on this field can be
imposed based on which public key type is encapsulated. See
the documentation on ``public_key`` cases below for details.
ascii_armored_pgp_public_key (str):
ASCII-armored representation of a PGP public key, as the
entire output by the command
``gpg --export --armor foo@example.com`` (either LF or CRLF
line endings). When using this field, ``id`` should be left
blank. The BinAuthz API handlers will calculate the ID and
fill it in automatically. BinAuthz computes this ID as the
OpenPGP RFC4880 V4 fingerprint, represented as upper-case
hex. If ``id`` is provided by the caller, it will be
overwritten by the API-calculated ID.
pkix_public_key (google.cloud.binaryauthorization_v1.types.PkixPublicKey):
A raw PKIX SubjectPublicKeyInfo format public key.
NOTE: ``id`` may be explicitly provided by the caller when
using this type of public key, but it MUST be a valid
RFC3986 URI. If ``id`` is left blank, a default one will be
computed based on the digest of the DER encoding of the
public key.
"""
comment = proto.Field(
proto.STRING,
number=1,
)
id = proto.Field(
proto.STRING,
number=2,
)
ascii_armored_pgp_public_key = proto.Field(
proto.STRING,
number=3,
oneof='public_key',
)
pkix_public_key = proto.Field(
proto.MESSAGE,
number=5,
oneof='public_key',
message='PkixPublicKey',
)
__all__ = tuple(sorted(__protobuf__.manifest))
| 37.986547
| 156
| 0.656416
| 15,892
| 0.938024
| 0
| 0
| 0
| 0
| 0
| 0
| 12,439
| 0.734211
|
1825d71ce3841cab87835439bc5331f28ba2643a
| 4,841
|
py
|
Python
|
builtinPlugins/plugin_spending.py
|
jscherer26/Icarra
|
5bc8b298ae21dcde7e8e2253b9ed9db95fd0d164
|
[
"BSD-3-Clause"
] | 1
|
2021-11-09T04:36:57.000Z
|
2021-11-09T04:36:57.000Z
|
builtinPlugins/plugin_spending.py
|
jscherer26/Icarra
|
5bc8b298ae21dcde7e8e2253b9ed9db95fd0d164
|
[
"BSD-3-Clause"
] | null | null | null |
builtinPlugins/plugin_spending.py
|
jscherer26/Icarra
|
5bc8b298ae21dcde7e8e2253b9ed9db95fd0d164
|
[
"BSD-3-Clause"
] | 2
|
2020-03-28T02:55:19.000Z
|
2021-11-09T04:37:08.000Z
|
# Copyright (c) 2006-2010, Jesse Liesch
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE IMPLIED
# DISCLAIMED. IN NO EVENT SHALL JESSE LIESCH BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import copy
from editGrid import *
from plugin import *
from portfolio import *
import appGlobal
class SpendingModel(EditGridModel):
def __init__(self, parent = None, *args):
EditGridModel.__init__(self, parent, *args)
self.days = 30
self.categorized = True
def rebuildSpending(self):
portfolio = appGlobal.getApp().portfolio
table = portfolio.getSpendingTable(days = self.days, categorize = self.categorized)
self.setColumns(table[0])
self.setData(table[1])
class SpendingWidget(QWidget):
def __init__(self, parent):
QWidget.__init__(self, parent)
self.model = SpendingModel(self)
portfolio = appGlobal.getApp().portfolio
vbox = QVBoxLayout(self)
vbox.setMargin(0)
hor = QHBoxLayout()
hor.setMargin(0)
vbox.addLayout(hor)
hor.addWidget(QLabel("Period:"))
self.periods = ["One Week", "One Month", "Three Months", "One Year", "Two Years", "Three Years", "Five Years", "Ten Years", "Portfolio Inception"]
value = portfolio.portPrefs.getPositionPeriod()
self.period = QComboBox()
self.period.addItems(self.periods)
if value in self.periods:
self.period.setCurrentIndex(self.periods.index(value))
else:
self.period.setCurrentIndex(self.periods.index("Portfolio Inception"))
hor.addWidget(self.period)
self.connect(self.period, SIGNAL("currentIndexChanged(int)"), self.newPeriod)
showCategorized = QCheckBox("Show Categorized")
if True or portfolio.portPrefs.getPerformanceCurrent():
showCategorized.setChecked(True)
self.model.categorized = True
hor.addWidget(showCategorized)
# Redraw when checkbox is changed
self.connect(showCategorized, SIGNAL("stateChanged(int)"), self.changeCategorized)
hor.addStretch(1000)
self.table = EditGrid(self.model)
self.newPeriod()
vbox.addWidget(self.table)
self.table.setSortingEnabled(True)
self.table.sortByColumn(1, Qt.DescendingOrder)
self.table.resizeColumnsToContents()
def newPeriod(self):
period = self.periods[self.period.currentIndex()]
appGlobal.getApp().portfolio.portPrefs.setPositionPeriod(period)
period = self.periods[self.period.currentIndex()]
if period == "One Week":
self.model.days = 7
elif period == "One Month":
self.model.days = 30
elif period == "Three Months":
self.model.days = 91
elif period == "One Year":
self.model.days = 365
elif period == "Two Years":
self.model.days = 365 * 2
elif period == "Three Years":
self.model.days = 365 * 3
elif period == "Five Years":
self.model.days = 365 * 5 + 1
elif period == "Ten Years":
self.model.days = 365 * 10 + 2
else:
self.model.days = 365 * 100 # 100 years
self.model.rebuildSpending()
self.table.resizeColumnsToContents()
def changeCategorized(self, state):
self.model.categorized = state != 0
#appGlobal.getApp().portfolio.portPrefs.setPerformanceCurrent(self.model.current)
self.model.rebuildSpending()
self.table.resizeColumnsToContents()
class Plugin(PluginBase):
def __init__(self):
PluginBase.__init__(self)
def name(self):
return "Spending"
def icarraVersion(self):
return (0, 0, 0)
def version(self):
return (1, 0, 0)
def forInvestment(self):
return False
def createWidget(self, parent):
return SpendingWidget(parent)
| 33.157534
| 148
| 0.736625
| 3,143
| 0.649246
| 0
| 0
| 0
| 0
| 0
| 0
| 1,949
| 0.402603
|
18299c6187e63ee39b775b8ca8e59d659c576c75
| 5,913
|
py
|
Python
|
pyro_examples/dpgmm_full.py
|
hanyas/pyro_examples
|
7c8784bd9ac498cfaf2983da158a8209db21966e
|
[
"MIT"
] | 1
|
2021-01-05T04:58:10.000Z
|
2021-01-05T04:58:10.000Z
|
pyro_examples/dpgmm_full.py
|
hanyas/pyro_examples
|
7c8784bd9ac498cfaf2983da158a8209db21966e
|
[
"MIT"
] | null | null | null |
pyro_examples/dpgmm_full.py
|
hanyas/pyro_examples
|
7c8784bd9ac498cfaf2983da158a8209db21966e
|
[
"MIT"
] | null | null | null |
import torch
from torch.distributions import Gamma
import torch.nn.functional as F
import matplotlib.pyplot as plt
from tqdm import tqdm
from pyro.distributions import *
import pyro
from pyro.optim import Adam
from pyro.infer import SVI, Trace_ELBO, Predictive
assert pyro.__version__.startswith('1')
pyro.enable_validation(True)
pyro.set_rng_seed(1337)
torch.set_num_threads(1)
# device = torch.device("cuda:0") if torch.cuda.is_available() else torch.device("cpu")
device = torch.device("cpu")
data = torch.cat((MultivariateNormal(-2 * torch.ones(2), 0.1 * torch.eye(2)).sample([25]),
MultivariateNormal(2 * torch.ones(2), 0.1 * torch.eye(2)).sample([25]),
MultivariateNormal(torch.tensor([0., 0.]), 0.1 * torch.eye(2)).sample([25])))
data = data.to(device)
N = data.shape[0]
D = data.shape[1]
def mix_weights(beta):
beta1m_cumprod = (1 - beta).cumprod(-1)
return F.pad(beta, (0, 1), value=1) * F.pad(beta1m_cumprod, (1, 0), value=1)
def model(data, **kwargs):
with pyro.plate("beta_plate", T - 1):
beta = pyro.sample("beta", Beta(1, alpha))
zeta = 2. * torch.ones(T * D, device=device)
delta = 2. * torch.ones(T * D, device=device)
with pyro.plate("prec_plate", T * D):
prec = pyro.sample("prec", Gamma(zeta, delta))
for t in pyro.plate("corr_chol_plate", T):
corr_chol[t, ...] = pyro.sample("corr_chol_{}".format(t), LKJCorrCholesky(d=D, eta=torch.ones(1, device=device)))
with pyro.plate("mu_plate", T):
_std = torch.sqrt(1. / prec.view(-1, D))
sigma_chol = torch.bmm(torch.diag_embed(_std), corr_chol)
mu = pyro.sample("mu", MultivariateNormal(torch.zeros(T, D, device=device), scale_tril=sigma_chol))
with pyro.plate("data", N):
z = pyro.sample("z", Categorical(mix_weights(beta)))
pyro.sample("obs", MultivariateNormal(mu[z], scale_tril=sigma_chol[z]), obs=data)
def guide(data, **kwargs):
gamma = pyro.param('gamma', alpha * torch.ones(T - 1, device=device), constraint=constraints.positive)
zeta = pyro.param('zeta', lambda: Uniform(1., 2.).sample([T * D]).to(device), constraint=constraints.positive)
delta = pyro.param('delta', lambda: Uniform(1., 2.).sample([T * D]).to(device), constraint=constraints.positive)
psi = pyro.param('psi', lambda: Uniform(1., 2.).sample([T]).to(device), constraint=constraints.positive)
tau = pyro.param('tau', lambda: MultivariateNormal(torch.zeros(D), 10 * torch.eye(2)).sample([T]).to(device))
pi = pyro.param('pi', torch.ones(N, T, device=device) / T, constraint=constraints.simplex)
with pyro.plate("beta_plate", T - 1):
q_beta = pyro.sample("beta", Beta(torch.ones(T - 1, device=device), gamma))
with pyro.plate("prec_plate", T * D):
q_prec = pyro.sample("prec", Gamma(zeta, delta))
q_corr_chol = torch.zeros(T, D, D, device=device)
for t in pyro.plate("corr_chol_plate", T):
q_corr_chol[t, ...] = pyro.sample("corr_chol_{}".format(t), LKJCorrCholesky(d=D, eta=psi[t]))
with pyro.plate("mu_plate", T):
_q_std = torch.sqrt(1. / q_prec.view(-1, D))
q_sigma_chol = torch.bmm(torch.diag_embed(_q_std), q_corr_chol)
q_mu = pyro.sample("mu", MultivariateNormal(tau, scale_tril=q_sigma_chol))
with pyro.plate("data", N):
z = pyro.sample("z", Categorical(pi))
T = 5
optim = Adam({"lr": 0.01})
svi = SVI(model, guide, optim, loss=Trace_ELBO(num_particles=35))
def train(num_iterations):
losses = []
pyro.clear_param_store()
fig = plt.figure(figsize=(5, 5))
for j in tqdm(range(num_iterations)):
loss = svi.step(data)
losses.append(loss)
if (j % 100) == 0:
centers, covars = marginal(guide, num_samples=250)
animate(fig.gca(), centers, covars)
plt.draw()
plt.axis('equal')
plt.pause(0.001)
plt.clf()
return losses
def truncate(alpha, centers, perc, corrs, weights):
threshold = alpha**-1 / 100.
true_centers = centers[weights > threshold]
prec = perc.view(T, D)
true_prec = prec[weights > threshold]
true_corrs = corrs[weights > threshold, ...]
_stds = torch.sqrt(1. / true_prec.view(-1, D))
_sigmas = torch.bmm(torch.diag_embed(_stds), true_corrs)
true_sigmas = torch.zeros(len(_sigmas), D, D)
for n in range(len(_sigmas)):
true_sigmas[n, ...] = torch.mm(_sigmas[n, ...], _sigmas[n, ...].T)
true_weights = weights[weights > threshold] / torch.sum(weights[weights > threshold])
return true_centers, true_sigmas, true_weights
def marginal(guide, num_samples=25):
posterior_predictive = Predictive(guide, num_samples=num_samples)
posterior_samples = posterior_predictive.forward(data)
mu_mean = posterior_samples['mu'].detach().mean(dim=0)
prec_mean = posterior_samples['prec'].detach().mean(dim=0)
corr_mean = torch.zeros(T, D, D)
for t in range(T):
corr_mean[t, ...] = posterior_samples['corr_chol_{}'.format(t)].detach().mean(dim=0)
beta_mean = posterior_samples['beta'].detach().mean(dim=0)
weights_mean = mix_weights(beta_mean)
centers, sigmas, _ = truncate(alpha, mu_mean, prec_mean, corr_mean, weights_mean)
return centers, sigmas
def animate(axes, centers, covars):
plt.scatter(data[:, 0], data[:, 1], color="blue", marker="+")
from math import pi
t = torch.arange(0, 2 * pi, 0.01)
circle = torch.stack([torch.sin(t), torch.cos(t)], dim=0)
axes.scatter(centers[:, 0], centers[:, 1], color="red")
for n in range(len(covars)):
ellipse = torch.mm(torch.cholesky(covars[n, ...]), circle)
axes.plot(ellipse[0, :] + centers[n, 0], ellipse[1, :] + centers[n, 1],
linestyle='-', linewidth=2, color='g', alpha=1.)
alpha = 0.1 * torch.ones(1, device=device)
elbo = train(5000)
plt.figure()
plt.plot(elbo)
| 33.596591
| 121
| 0.641468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 375
| 0.06342
|
1829f18c9a4a6999de1f057e3d27520859bfe66b
| 539
|
py
|
Python
|
calplus/tests/unit/v1/test_utils.py
|
nghiadt16/CALplus
|
68c108e6abf7eeac4937b870dc7462dd6ee2fcc3
|
[
"Apache-2.0"
] | null | null | null |
calplus/tests/unit/v1/test_utils.py
|
nghiadt16/CALplus
|
68c108e6abf7eeac4937b870dc7462dd6ee2fcc3
|
[
"Apache-2.0"
] | 4
|
2017-04-05T16:14:07.000Z
|
2018-12-14T14:19:15.000Z
|
calplus/tests/unit/v1/test_utils.py
|
nghiadt16/CALplus
|
68c108e6abf7eeac4937b870dc7462dd6ee2fcc3
|
[
"Apache-2.0"
] | 2
|
2017-04-18T16:53:58.000Z
|
2018-12-04T05:42:51.000Z
|
from calplus.tests import base
from calplus.v1 import utils
class TestUtils(base.TestCase):
"""docstring for TestUtils"""
def setUp(self):
super(TestUtils, self).setUp()
def test_get_all_driver(self):
drivers = utils.get_all_driver()
self.assertEqual([], drivers)
def test_validate_driver(self):
@utils.validate_driver
def test(request, drivers):
pass
import inspect
args = inspect.getargspec(test).args
self.assertEqual(['request'], args)
| 22.458333
| 44
| 0.641929
| 476
| 0.883117
| 0
| 0
| 75
| 0.139147
| 0
| 0
| 38
| 0.070501
|
182a6b769a1cd6d38014902642d94977a040e698
| 4,213
|
py
|
Python
|
luna_pathology/cli/load_slide.py
|
msk-mind-apps/luna-pathology
|
f0e17ccfeb3dc93de150aed5bbef9fcd7443d6d0
|
[
"Apache-2.0"
] | null | null | null |
luna_pathology/cli/load_slide.py
|
msk-mind-apps/luna-pathology
|
f0e17ccfeb3dc93de150aed5bbef9fcd7443d6d0
|
[
"Apache-2.0"
] | 3
|
2021-07-21T20:28:37.000Z
|
2021-08-02T18:52:32.000Z
|
luna_pathology/cli/load_slide.py
|
msk-mind-apps/luna-pathology
|
f0e17ccfeb3dc93de150aed5bbef9fcd7443d6d0
|
[
"Apache-2.0"
] | null | null | null |
# General imports
import os, json, logging
import click
from pathlib import Path
import yaml
# From common
from luna_core.common.custom_logger import init_logger
from luna_core.common.DataStore import DataStore_v2
from luna_core.common.Node import Node
from luna_core.common.config import ConfigSet
from luna_core.common.sparksession import SparkConfig
@click.command()
@click.option('-a', '--app_config', required=True,
help="application configuration yaml file. See config.yaml.template for details.")
@click.option('-s', '--datastore_id', required=True,
help='datastore name. usually a slide id.')
@click.option('-m', '--method_param_path', required=True,
help='json parameter file with path to a WSI delta table.')
def cli(app_config, datastore_id, method_param_path):
"""Load a slide to the datastore from the whole slide image table.
app_config - application configuration yaml file. See config.yaml.template for details.
datastore_id - datastore name. usually a slide id.
method_param_path - json parameter file with path to a WSI delta table.
- job_tag: job tag to use for loading the slide
- table_path: path to the whole slide image table
- datastore_path: path to store data
"""
init_logger()
with open(method_param_path, 'r') as yaml_file:
method_data = yaml.safe_load(yaml_file)
load_slide_with_datastore(app_config, datastore_id, method_data)
def load_slide_with_datastore(app_config, datastore_id, method_data):
"""Load a slide to the datastore from the whole slide image table.
Args:
app_config (string): path to application configuration file.
datastore_id (string): datastore name. usually a slide id.
method_data (dict): method parameters including input, output details.
Returns:
None
"""
logger = logging.getLogger(f"[datastore={datastore_id}]")
# Do some setup
cfg = ConfigSet("APP_CFG", config_file=app_config)
datastore = DataStore_v2(method_data["datastore_path"])
method_id = method_data["job_tag"]
# fetch patient_id column
patient_id_column = method_data.get("patient_id_column_name", None)
if patient_id_column == "": patient_id_column = None
try:
spark = SparkConfig().spark_session("APP_CFG", "query_slide")
slide_id = datastore_id
if patient_id_column:
# assumes if patient_id column, source is parquet from dremio
# right now has nested row-type into dict, todo: account for map type representation of dict in dremio
df = spark.read.parquet(method_data['table_path'])\
.where(f"UPPER(slide_id)='{slide_id}'")\
.select("path", "metadata", patient_id_column)\
.toPandas()
if not len(df) == 1:
print(df)
raise ValueError(f"Resulting query record is not singular, multiple scan's exist given the container address {slide_id}")
record = df.loc[0]
properties = record['metadata']
properties['patient_id'] = str(record[patient_id_column])
else:
df = spark.read.format("delta").load(method_data['table_path'])\
.where(f"UPPER(slide_id)='{slide_id}'")\
.select("path", "metadata")\
.toPandas()
if not len(df) == 1:
print(df)
raise ValueError(f"Resulting query record is not singular, multiple scan's exist given the container address {slide_id}")
record = df.loc[0]
properties = record['metadata']
spark.stop()
except Exception as e:
logger.exception (f"{e}, stopping job execution...")
raise e
# Put results in the data store
data_path = Path(record['path'].split(':')[-1])
print(data_path)
datastore.put(data_path, datastore_id, method_id, "WholeSlideImage", symlink=True)
with open(os.path.join(method_data["datastore_path"], datastore_id, method_id, "WholeSlideImage", "metadata.json"), "w") as fp:
json.dump(properties, fp)
if __name__ == "__main__":
cli()
| 36.318966
| 137
| 0.657963
| 0
| 0
| 0
| 0
| 1,105
| 0.262283
| 0
| 0
| 1,867
| 0.443152
|
182ab8edcc4ae73b49deea3cf51426229fb8e5ad
| 442
|
py
|
Python
|
classifiers/CornerDetector.py
|
Vivek2018/OSM_Building-Detection-Custom-Repo
|
278b1f5a46e49cb547162d495979056c36945e43
|
[
"MIT"
] | null | null | null |
classifiers/CornerDetector.py
|
Vivek2018/OSM_Building-Detection-Custom-Repo
|
278b1f5a46e49cb547162d495979056c36945e43
|
[
"MIT"
] | null | null | null |
classifiers/CornerDetector.py
|
Vivek2018/OSM_Building-Detection-Custom-Repo
|
278b1f5a46e49cb547162d495979056c36945e43
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
from matplotlib import pyplot as plt
image = cv2.imread('champaigneditedcompressed.png')
kernel = np.ones((20, 20), np.float32) / 25
img = cv2.filter2D(image, -1, kernel)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
corners = cv2.goodFeaturesToTrack(gray,10,0.01,10)
corners = np.int0(corners)
print(corners)
for i in corners:
x,y = i.ravel()
cv2.circle(img,(x,y),3,255,-1)
plt.imshow(img),plt.show()
| 23.263158
| 51
| 0.714932
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 31
| 0.070136
|
182bb85b10503c8fb7bd8a2c09551b2160fe497c
| 25,581
|
py
|
Python
|
ECUSimulation/io_processing/surveillance_handler.py
|
arturmrowca/IVNS
|
8915142d16debe4af780a9eb6859e44dea2ca7e6
|
[
"MIT"
] | null | null | null |
ECUSimulation/io_processing/surveillance_handler.py
|
arturmrowca/IVNS
|
8915142d16debe4af780a9eb6859e44dea2ca7e6
|
[
"MIT"
] | null | null | null |
ECUSimulation/io_processing/surveillance_handler.py
|
arturmrowca/IVNS
|
8915142d16debe4af780a9eb6859e44dea2ca7e6
|
[
"MIT"
] | 2
|
2018-08-04T07:43:51.000Z
|
2018-12-14T14:59:46.000Z
|
'''
Created on 12 Jun, 2015
@author: artur.mrowca
'''
from enum import Enum
from PyQt5.Qt import QObject
from PyQt5 import QtCore
from tools.ecu_logging import ECULogger
import copy
class AbstractInputHandler(QObject):
publish_infos_sig = QtCore.pyqtSignal(list)
def __init__(self):
QObject.__init__(self)
self.next = None
self._recs = []
def set_next(self, input_handler):
self.next = input_handler
def subscribe(self, obj, func_name):
'''
all objects that subscribe to this function
publish their information here
'''
self._recs.append(obj)
exec('self.publish_infos_sig.connect(obj.%s)' % func_name)
def publish(self, cur_time, monitor_inputs):
# emit the signal to all connected receivers then call next publish
try:
res = [[monitor_input.time_called, str(monitor_input.mon_id), str(monitor_input.asc_id), str(monitor_input.tag), monitor_input.msg_id, str(monitor_input.message), \
monitor_input.msg_size, monitor_input.stream_id, str(monitor_input.unique_id), str(monitor_input.data)] \
for monitor_input in monitor_inputs.get() if monitor_input.tag in self._get_tags()]
# if there is a result only:
if res:
self.publish_infos_sig.emit(copy.deepcopy(res))
except:
ECULogger().log_traceback()
if self.next != None:
self.next.publish(cur_time, monitor_inputs)
def _get_tags(self):
return []
class BufferHandler(AbstractInputHandler):
def __init__(self):
AbstractInputHandler.__init__(self)
def _get_tags(self):
return [MonitorTags.BT_ECU_RECEIVE_BUFFER, MonitorTags.BT_ECU_TRANSMIT_BUFFER]
class CanBusHandler(AbstractInputHandler):
def __init__(self):
AbstractInputHandler.__init__(self)
def publish(self, cur_time, monitor_inputs):
# emit the signal to all connected receivers then call next publish
try:
res = [[monitor_input.time_called, str(monitor_input.mon_id), str(monitor_input.asc_id), str(monitor_input.tag), monitor_input.msg_id, str(monitor_input.message), \
monitor_input.msg_size, monitor_input.stream_id, str(monitor_input.unique_id), str(monitor_input.data)] \
for monitor_input in monitor_inputs.get() if monitor_input.tag in self._get_tags()]
if res:
self.publish_infos_sig.emit(copy.deepcopy([cur_time, res]))
except:
ECULogger().log_traceback()
if self.next != None:
self.next.publish(cur_time, monitor_inputs)
def _get_tags(self):
return [MonitorTags.CB_DONE_PROCESSING_MESSAGE, MonitorTags.CB_PROCESSING_MESSAGE]
class ConstellationHandler(AbstractInputHandler):
def __init__(self):
AbstractInputHandler.__init__(self)
self.first = True
def publish(self, values, monitor_inputs):
''' pushes the initial constellation exactly once
'''
try:
if self.first:
self.publish_infos_sig.emit(values)
self.first = False
except:
pass
if self.next != None:
self.next.publish(values, monitor_inputs)
def _get_tags(self):
return [MonitorTags.CONSELLATION_INFORMATION]
class EventlineHandler(AbstractInputHandler):
def __init__(self):
AbstractInputHandler.__init__(self)
self._tags_list =[MonitorTags.CP_SEC_INIT_AUTHENTICATION, \
MonitorTags.CP_SEC_RECEIVE_REG_MESSAGE, \
MonitorTags.CP_SEC_DECRYPTED_INNER_REG_MESSAGE, \
MonitorTags.CP_SEC_DECRYPTED_OUTER_REG_MESSAGE, \
MonitorTags.CP_SEC_VALIDATED_ECU_CERTIFICATE, \
MonitorTags.CP_SEC_CREATED_CMP_HASH_REG_MSG, \
MonitorTags.CP_SEC_COMPARED_HASH_REG_MSG, \
MonitorTags.CP_SEC_RECEIVE_REQ_MESSAGE, \
MonitorTags.CP_SEC_DECRYPTED_REQ_MESSAGE, \
MonitorTags.CP_SEC_ECNRYPTED_CONFIRMATION_MESSAGE, \
MonitorTags.CP_SEC_GENERATED_SESSION_KEY, \
MonitorTags.CP_SEC_ENCRYPTED_DENY_MESSAGE, \
MonitorTags.CP_SEC_ENCRYPTED_GRANT_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_INTENT_SEND_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_SEND_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_SEC_MOD_ADVERTISEMENT, \
MonitorTags.CP_ECU_VALIDATED_SEC_MOD_CERTIFICATE, \
MonitorTags.CP_ECU_START_CREATION_REG_MESSAGE, \
MonitorTags.CP_ECU_CREATED_ECU_KEY_REG_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_INNER_REG_MESSAGE, \
MonitorTags.CP_ECU_HASHED_INNER_REG_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_OUTER_REG_MESSAGE, \
MonitorTags.CP_ECU_SEND_REG_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_CONF_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_CONF_MESSAGE, \
MonitorTags.CP_ECU_START_CREATE_REQ_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_REQ_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_DENY_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_DENY_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_GRANT_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_GRANT_MESSAGE, \
MonitorTags.CP_SESSION_AVAILABLE_SEND_MESSAGE, \
MonitorTags.CP_SEND_CLIENT_HELLO, \
MonitorTags.CP_RECEIVE_CLIENT_HELLO, \
MonitorTags.CP_SEND_ALERT_NO_CIPHERSUITE, \
MonitorTags.CP_SEND_SERVER_HELLO, \
MonitorTags.CP_SEND_SERVER_CERTIFICATE, \
MonitorTags.CP_SEND_SERVER_KEYEXCHANGE,
MonitorTags.CP_SEND_CERTIFICATE_REQUEST , \
MonitorTags.CP_SEND_SERVER_HELLO_DONE , \
MonitorTags.CP_RECEIVE_SERVER_HELLO , \
MonitorTags.CP_RECEIVE_SERVER_CERTIFICATE , \
MonitorTags.CP_RECEIVE_SERVER_KEYEXCHANGE , \
MonitorTags.CP_RECEIVE_CERTIFICATE_REQUEST , \
MonitorTags.CP_RECEIVE_SERVER_HELLO_DONE , \
MonitorTags.CP_SERVER_HELLO_DONE_VALIDATED_CERT , \
MonitorTags.CP_SEND_CLIENT_CERTIFICATE , \
MonitorTags.CP_INIT_SEND_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_ENCRYPTED_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_GENERATED_MASTERSEC_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_INIT_SEND_CERTIFICATE_VERIFY , \
MonitorTags.CP_ENCRYPTED_CERTIFICATE_VERIFY , \
MonitorTags.CP_SEND_CIPHER_SPEC , \
MonitorTags.CP_INIT_CLIENT_FINISHED , \
MonitorTags.CP_HASHED_CLIENT_FINISHED , \
MonitorTags.CP_GENERATED_HASH_FROM_PRF_CLIENT_FINISHED , \
MonitorTags.CP_RECEIVE_CLIENT_CERTIFICATE , \
MonitorTags.CP_CLIENT_CERTIFICATE_VALIDATED , \
MonitorTags.CP_RECEIVE_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_DECRYPTED_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_RECEIVE_CERTIFICATE_VERIFY , \
MonitorTags.CP_DECRYPTED_CERTIFICATE_VERIFY , \
MonitorTags.CP_GENERATED_MASTER_SECRET_CERT_VERIFY , \
MonitorTags.CP_RECEIVED_CHANGE_CIPHER_SPEC , \
MonitorTags.CP_RECEIVE_CLIENT_FINISHED , \
MonitorTags.CP_CLIENT_FINISHED_HASHED_COMPARISON_HASH , \
MonitorTags.CP_CLIENT_FINISHED_GENERATED_HASH_PRF , \
MonitorTags.CP_RECEIVE_SERVER_FINISHED , \
MonitorTags.CP_SERVER_FINISHED_HASHED_COMPARISON_HASH , \
MonitorTags.CP_SERVER_FINISHED_GENERATED_HASH_PRF , \
MonitorTags.CP_INIT_SERVER_FINISHED , \
MonitorTags.CP_HASHED_SERVER_FINISHED , \
MonitorTags.CP_GENERATED_HASH_FROM_PRF_SERVER_FINISHED , \
MonitorTags.CP_SERVER_AUTHENTICATED , \
MonitorTags.CP_CLIENT_AUTHENTICATED, \
MonitorTags.CP_RECEIVE_SIMPLE_MESSAGE, \
MonitorTags.CP_INIT_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_ENCRYPTED_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_SETUP_INIT_CREATE_KEYS, \
MonitorTags.CP_SETUP_FINISHED_CREATE_KEYS, \
MonitorTags.CP_INIT_TRANSMIT_MESSAGE, \
MonitorTags.CP_MACED_TRANSMIT_MESSAGE, \
MonitorTags.CP_RECEIVED_SIMPLE_MESSAGE, \
MonitorTags.CP_BUFFERED_SIMPLE_MESSAGE, \
MonitorTags.CP_RETURNED_AUTHENTICATED_SIMPLE_MESSAGE, \
MonitorTags.CP_RECEIVED_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_DECRYPTED_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_CHECKED_KEY_LEGID, \
MonitorTags.CP_INIT_CHECK_KEY_LEGID, \
MonitorTags.CP_INIT_VERIFYING_BUFFER_MESSAGE, \
MonitorTags.CP_FINISHED_VERIFYING_BUFFER_MESSAGE, \
MonitorTags.CP_SEND_SYNC_MESSAGE, \
MonitorTags.CP_SEND_SYNC_RESPONSE_MESSAGE, \
MonitorTags.CP_RECEIVE_SYNC_RESPONSE_MESSAGE]
def publish(self, values, monitor_inputs):
''' pushes the ecu ids or the
view
'''
try:
if values.tag == MonitorTags.ECU_ID_LIST:
self.publish_infos_sig.emit([ecu.ecu_id for ecu in values.data])
else:
AbstractInputHandler.publish(self, values, monitor_inputs)
except:
try:
AbstractInputHandler.publish(self, values, monitor_inputs)
except:
pass
if self.next != None:
self.next.publish(values, monitor_inputs)
def _get_tags(self):
return self._tags_list
def register_tag(self, tag):
self._tags_list += [tag]
def register_tags(self, tags_list):
self._tags_list += tags_list
self._tags_list = list(set(self._tags_list))
class CheckpointHandler(AbstractInputHandler):
''' reads and publishes all Checkpoint Monitor values'''
def __init__(self):
AbstractInputHandler.__init__(self)
self._tags_list = [MonitorTags.CP_SEC_INIT_AUTHENTICATION, \
MonitorTags.CP_SEC_RECEIVE_REG_MESSAGE, \
MonitorTags.CP_SEC_DECRYPTED_INNER_REG_MESSAGE, \
MonitorTags.CP_SEC_DECRYPTED_OUTER_REG_MESSAGE, \
MonitorTags.CP_SEC_VALIDATED_ECU_CERTIFICATE, \
MonitorTags.CP_SEC_CREATED_CMP_HASH_REG_MSG, \
MonitorTags.CP_SEC_COMPARED_HASH_REG_MSG, \
MonitorTags.CP_SEC_RECEIVE_REQ_MESSAGE, \
MonitorTags.CP_SEC_DECRYPTED_REQ_MESSAGE, \
MonitorTags.CP_SEC_ECNRYPTED_CONFIRMATION_MESSAGE, \
MonitorTags.CP_SEC_GENERATED_SESSION_KEY, \
MonitorTags.CP_SEC_ENCRYPTED_DENY_MESSAGE, \
MonitorTags.CP_SEC_ENCRYPTED_GRANT_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_INTENT_SEND_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_SEND_SIMPLE_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_SEC_MOD_ADVERTISEMENT, \
MonitorTags.CP_ECU_VALIDATED_SEC_MOD_CERTIFICATE, \
MonitorTags.CP_ECU_START_CREATION_REG_MESSAGE, \
MonitorTags.CP_ECU_CREATED_ECU_KEY_REG_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_INNER_REG_MESSAGE, \
MonitorTags.CP_ECU_HASHED_INNER_REG_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_OUTER_REG_MESSAGE, \
MonitorTags.CP_ECU_SEND_REG_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_CONF_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_CONF_MESSAGE, \
MonitorTags.CP_ECU_START_CREATE_REQ_MESSAGE, \
MonitorTags.CP_ECU_ENCRYPTED_REQ_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_DENY_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_DENY_MESSAGE, \
MonitorTags.CP_ECU_RECEIVE_GRANT_MESSAGE, \
MonitorTags.CP_ECU_DECRYPTED_GRANT_MESSAGE, \
MonitorTags.CP_SESSION_AVAILABLE_SEND_MESSAGE, \
MonitorTags.CP_SEND_CLIENT_HELLO, \
MonitorTags.CP_RECEIVE_CLIENT_HELLO, \
MonitorTags.CP_SEND_ALERT_NO_CIPHERSUITE, \
MonitorTags.CP_SEND_SERVER_HELLO, \
MonitorTags.CP_SEND_SERVER_CERTIFICATE, \
MonitorTags.CP_SEND_SERVER_KEYEXCHANGE,
MonitorTags.CP_SEND_CERTIFICATE_REQUEST , \
MonitorTags.CP_SEND_SERVER_HELLO_DONE , \
MonitorTags.CP_RECEIVE_SERVER_HELLO , \
MonitorTags.CP_RECEIVE_SERVER_CERTIFICATE , \
MonitorTags.CP_RECEIVE_SERVER_KEYEXCHANGE , \
MonitorTags.CP_RECEIVE_CERTIFICATE_REQUEST , \
MonitorTags.CP_RECEIVE_SERVER_HELLO_DONE , \
MonitorTags.CP_SERVER_HELLO_DONE_VALIDATED_CERT , \
MonitorTags.CP_SEND_CLIENT_CERTIFICATE , \
MonitorTags.CP_INIT_SEND_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_ENCRYPTED_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_GENERATED_MASTERSEC_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_INIT_SEND_CERTIFICATE_VERIFY , \
MonitorTags.CP_ENCRYPTED_CERTIFICATE_VERIFY , \
MonitorTags.CP_SEND_CIPHER_SPEC , \
MonitorTags.CP_INIT_CLIENT_FINISHED , \
MonitorTags.CP_HASHED_CLIENT_FINISHED , \
MonitorTags.CP_GENERATED_HASH_FROM_PRF_CLIENT_FINISHED , \
MonitorTags.CP_RECEIVE_CLIENT_CERTIFICATE , \
MonitorTags.CP_CLIENT_CERTIFICATE_VALIDATED , \
MonitorTags.CP_RECEIVE_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_DECRYPTED_CLIENT_KEYEXCHANGE , \
MonitorTags.CP_RECEIVE_CERTIFICATE_VERIFY , \
MonitorTags.CP_DECRYPTED_CERTIFICATE_VERIFY , \
MonitorTags.CP_GENERATED_MASTER_SECRET_CERT_VERIFY , \
MonitorTags.CP_RECEIVED_CHANGE_CIPHER_SPEC , \
MonitorTags.CP_RECEIVE_CLIENT_FINISHED , \
MonitorTags.CP_CLIENT_FINISHED_HASHED_COMPARISON_HASH , \
MonitorTags.CP_CLIENT_FINISHED_GENERATED_HASH_PRF , \
MonitorTags.CP_RECEIVE_SERVER_FINISHED , \
MonitorTags.CP_SERVER_FINISHED_HASHED_COMPARISON_HASH , \
MonitorTags.CP_SERVER_FINISHED_GENERATED_HASH_PRF , \
MonitorTags.CP_INIT_SERVER_FINISHED , \
MonitorTags.CP_HASHED_SERVER_FINISHED , \
MonitorTags.CP_GENERATED_HASH_FROM_PRF_SERVER_FINISHED , \
MonitorTags.CP_SERVER_AUTHENTICATED , \
MonitorTags.CP_CLIENT_AUTHENTICATED, \
MonitorTags.CP_RECEIVE_SIMPLE_MESSAGE, \
MonitorTags.CP_INIT_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_ENCRYPTED_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_SETUP_INIT_CREATE_KEYS, \
MonitorTags.CP_SETUP_FINISHED_CREATE_KEYS, \
MonitorTags.CP_INIT_TRANSMIT_MESSAGE, \
MonitorTags.CP_MACED_TRANSMIT_MESSAGE, \
MonitorTags.CP_RECEIVED_SIMPLE_MESSAGE, \
MonitorTags.CP_BUFFERED_SIMPLE_MESSAGE, \
MonitorTags.CP_RETURNED_AUTHENTICATED_SIMPLE_MESSAGE, \
MonitorTags.CP_RECEIVED_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_DECRYPTED_EXCHANGE_FIRST_KEY_KN, \
MonitorTags.CP_INIT_CHECK_KEY_LEGID, \
MonitorTags.CP_CHECKED_KEY_LEGID, \
MonitorTags.CP_INIT_VERIFYING_BUFFER_MESSAGE, \
MonitorTags.CP_FINISHED_VERIFYING_BUFFER_MESSAGE, \
MonitorTags.CP_SEND_SYNC_MESSAGE, \
MonitorTags.CP_SEND_SYNC_RESPONSE_MESSAGE, \
MonitorTags.CP_RECEIVE_SYNC_RESPONSE_MESSAGE]
# override
def publish(self, cur_time, monitor_inputs):
# emit the signal to all connected receivers then call next publish
try:
res = [[monitor_input.time_called, str(monitor_input.mon_id), str(monitor_input.asc_id), str(monitor_input.tag), monitor_input.msg_id, str(monitor_input.message), \
monitor_input.msg_size, monitor_input.stream_id, str(monitor_input.unique_id), str(monitor_input.data)] \
for monitor_input in monitor_inputs.get() if monitor_input.tag in self._get_tags()]
self.publish_infos_sig.emit([None, None])
except:
ECULogger().log_traceback()
if self.next != None:
self.next.publish(cur_time, monitor_inputs)
def _get_tags(self):
return self._tags_list
def register_tag(self, tag):
self._tags_list += [tag]
def register_tags(self, tags_list):
self._tags_list += tags_list
class InputHandlerChain(object):
def add_handler(self, handler):
try:
self._next_handler.set_next(handler)
except:
self._handler = handler
self._next_handler = handler
return handler
def handler(self):
return self._handler
class MonitorInput(object):
'''
Keeps the input data of a monitor. Tag defines the type of data arriving
Possible Tags are MonitorTags
'''
def __init__(self, data, monitor_tag, mon_id=False, time_called=False, asc_id=None, \
msg_id=-1, message=None, msg_size=-1, stream_id=-1, unique_id=None):
self.data = data
self.tag = monitor_tag
self.mon_id = mon_id
self.time_called = time_called
self.calling_object = None
self.asc_id = asc_id
self.msg_id = msg_id
self.message = message
self.msg_size = msg_size
self.stream_id = stream_id
self.unique_id = unique_id
class MonitorTags(Enum):
# Buffer Tags
BT_ECU_TRANSMIT_BUFFER = 1
BT_ECU_RECEIVE_BUFFER = 2
# Receiving/Sending Times, Message Contents SPAETER MIT CHECKPOINTS ZUSAMMENFUEHREN
# Checkpoints - SEC MOD SIDE
CP_SEC_INIT_AUTHENTICATION = 7 # Sec Mod. initialized authentication
CP_SEC_RECEIVE_REG_MESSAGE = 8 # Sec Mod receive the registration message
CP_SEC_DECRYPTED_INNER_REG_MESSAGE = 10 # Sec Mod decrypted inner reg Msg
CP_SEC_DECRYPTED_OUTER_REG_MESSAGE = 11 # Sec Mod decrypted outer reg Msg
CP_SEC_VALIDATED_ECU_CERTIFICATE = 12 # Sec Mod. validated the ECU Certificate
CP_SEC_CREATED_CMP_HASH_REG_MSG = 13 # Create the comparision hash for validation of reg msg hash
CP_SEC_COMPARED_HASH_REG_MSG = 14 # Finished comparing the reg msg hash to the created hash and sends the message
CP_SEC_RECEIVE_REQ_MESSAGE = 15 # Sec Mod. received the request message
CP_SEC_DECRYPTED_REQ_MESSAGE = 16 # Sec Mod decrypted the request message
CP_SEC_ECNRYPTED_CONFIRMATION_MESSAGE = 9 # Sec Mod created confirmation message and sends it
CP_SEC_GENERATED_SESSION_KEY = 17 # Sec Mod. generated the session key
CP_SEC_ENCRYPTED_DENY_MESSAGE = 18 # Sec. Mod encrypted the deny message
CP_SEC_ENCRYPTED_GRANT_MESSAGE = 19 # Sec. Mod encrypted the grant message
# Checkpoints - ECU SIDE
CP_ECU_RECEIVE_SIMPLE_MESSAGE = 20 # ECU receives a encrypted simple message
CP_ECU_DECRYPTED_SIMPLE_MESSAGE = 21 # ECU decrypted the received simple message
CP_ECU_INTENT_SEND_SIMPLE_MESSAGE = 22 # ECU decides on comm. module that it wants to send a simple message
CP_ECU_ENCRYPTED_SEND_SIMPLE_MESSAGE = 23 # ECU encrypted message and sends it
CP_ECU_RECEIVE_SEC_MOD_ADVERTISEMENT = 24 # ECU receives the SEC Module advertisement
CP_ECU_VALIDATED_SEC_MOD_CERTIFICATE = 25 # ECU validated the sec. mod. certificate
CP_ECU_START_CREATION_REG_MESSAGE = 26 # ECU starts to create the registration message
CP_ECU_CREATED_ECU_KEY_REG_MESSAGE = 27 # ECU created the sym. ECU key
CP_ECU_ENCRYPTED_INNER_REG_MESSAGE = 28 # ECU encrypted inner reg. msg
CP_ECU_HASHED_INNER_REG_MESSAGE = 29 # ECU hashed inner reg. msg
CP_ECU_ENCRYPTED_OUTER_REG_MESSAGE = 30 # ECU encrypted the outer reg. msg
CP_ECU_SEND_REG_MESSAGE = 31 # ECU sends the reg. message
CP_ECU_RECEIVE_CONF_MESSAGE = 32 # ECU receives a confirmation message
CP_ECU_DECRYPTED_CONF_MESSAGE = 33 # ECU decrypted the confirmation message
CP_ECU_START_CREATE_REQ_MESSAGE = 34 # ECU Starts creation of request message
CP_ECU_ENCRYPTED_REQ_MESSAGE = 35 # ECU encrypted the request message and sends it
CP_ECU_RECEIVE_DENY_MESSAGE = 36 # ECU receives a deny message
CP_ECU_DECRYPTED_DENY_MESSAGE = 37 # ECU decrypted the deny message
CP_ECU_RECEIVE_GRANT_MESSAGE = 38 # ECU receives a grant message
CP_ECU_DECRYPTED_GRANT_MESSAGE = 39 # ECU decrypted the grant message
CP_ECU_ALREADY_AUTHENTICATED = 40 # The ECU is already authenticated and the message is discareded
# Checkpoints - TLS
CP_SESSION_AVAILABLE_SEND_MESSAGE = 43 # There is a session available for this stream and the message is transmitted
CP_SEND_CLIENT_HELLO = 44 # No session is available for that stream. Send the client hello message
CP_RECEIVE_CLIENT_HELLO = 45 # Receive the client hello and answer
CP_SEND_ALERT_NO_CIPHERSUITE = 46 # alert message if the wrong ciphersuite was chosen
CP_SEND_SERVER_HELLO = 47 # send the server Hello message
CP_SEND_SERVER_CERTIFICATE = 48 # send the server Certificate message
CP_SEND_SERVER_KEYEXCHANGE = 49 # send the server Keyexchange message
CP_SEND_CERTIFICATE_REQUEST = 50 # send the certificate request message
CP_SEND_SERVER_HELLO_DONE = 51 # send the server Hello done message
CP_RECEIVE_SERVER_HELLO = 52
CP_RECEIVE_SERVER_CERTIFICATE = 53
CP_RECEIVE_SERVER_KEYEXCHANGE = 54
CP_RECEIVE_CERTIFICATE_REQUEST = 55
CP_RECEIVE_SERVER_HELLO_DONE = 56
CP_SERVER_HELLO_DONE_VALIDATED_CERT = 57
CP_SEND_CLIENT_CERTIFICATE = 58
CP_INIT_SEND_CLIENT_KEYEXCHANGE = 59
CP_ENCRYPTED_CLIENT_KEYEXCHANGE = 60
CP_GENERATED_MASTERSEC_CLIENT_KEYEXCHANGE = 61
CP_INIT_SEND_CERTIFICATE_VERIFY = 62
CP_ENCRYPTED_CERTIFICATE_VERIFY = 63
CP_SEND_CIPHER_SPEC = 64 # Send the cipher spec message
CP_INIT_CLIENT_FINISHED = 65 # start to create client finished message
CP_HASHED_CLIENT_FINISHED = 66 # finished to hash the client finished message data
CP_GENERATED_HASH_FROM_PRF_CLIENT_FINISHED = 67 # Hash generated and sending message started
CP_RECEIVE_CLIENT_CERTIFICATE = 68 # receive the client certificate
CP_CLIENT_CERTIFICATE_VALIDATED = 69 # Finished validation of client certificate
CP_RECEIVE_CLIENT_KEYEXCHANGE = 70
CP_DECRYPTED_CLIENT_KEYEXCHANGE = 71
CP_RECEIVE_CERTIFICATE_VERIFY = 72
CP_DECRYPTED_CERTIFICATE_VERIFY = 73
CP_GENERATED_MASTER_SECRET_CERT_VERIFY = 74
CP_RECEIVED_CHANGE_CIPHER_SPEC = 75
# CP_RECEIVED_CLIENT_FINISHED = 76
CP_RECEIVE_CLIENT_FINISHED = 83
CP_CLIENT_FINISHED_HASHED_COMPARISON_HASH = 84
CP_CLIENT_FINISHED_GENERATED_HASH_PRF = 85
CP_RECEIVE_SERVER_FINISHED = 80
CP_SERVER_FINISHED_HASHED_COMPARISON_HASH = 81
CP_SERVER_FINISHED_GENERATED_HASH_PRF = 82
CP_INIT_SERVER_FINISHED = 77 # start to create SERVER finished message
CP_HASHED_SERVER_FINISHED = 78 # finished to hash the SERVER finished message data
CP_GENERATED_HASH_FROM_PRF_SERVER_FINISHED = 79 # Hash generated and sending message started
CP_SERVER_AUTHENTICATED = 86
CP_CLIENT_AUTHENTICATED = 87
CP_RECEIVE_SIMPLE_MESSAGE = 88
# Checkpoints - TESLA
CP_INIT_EXCHANGE_FIRST_KEY_KN = 89 # Intention to send the Key K N to receiver xy
CP_ENCRYPTED_EXCHANGE_FIRST_KEY_KN = 90 # Encryption finished for Key K_N to receiver xy
CP_SETUP_INIT_CREATE_KEYS = 91 # Start the creation of keys
CP_SETUP_FINISHED_CREATE_KEYS = 92 # Finished creating keys
CP_INIT_TRANSMIT_MESSAGE = 93 # Intention to send a simple message
CP_MACED_TRANSMIT_MESSAGE = 94 # Finished MAC Creation of message now send it
CP_RECEIVED_SIMPLE_MESSAGE = 95 # Received a simple message
CP_BUFFERED_SIMPLE_MESSAGE = 96 # Added simple message to buffer
CP_RETURNED_AUTHENTICATED_SIMPLE_MESSAGE = 97 # Authenticated messages are returned
CP_RECEIVED_EXCHANGE_FIRST_KEY_KN = 98 # received first key message
CP_DECRYPTED_EXCHANGE_FIRST_KEY_KN = 99 # decrypted first key message
CP_INIT_CHECK_KEY_LEGID = 99.5 # start to check if key legid
CP_CHECKED_KEY_LEGID = 100 # checked the key legidity
CP_INIT_VERIFYING_BUFFER_MESSAGE = 101 # Start validation of message in buffer
CP_FINISHED_VERIFYING_BUFFER_MESSAGE = 102 # Done validation of message in buffer
CP_SEND_SYNC_MESSAGE = 103 # send the time sync message from the ECU
CP_SEND_SYNC_RESPONSE_MESSAGE = 104
CP_RECEIVE_SYNC_RESPONSE_MESSAGE = 105 # End message
CP_RECEIVE_SYNC_MESSAGE = 106 # sync message was received
# CAN BUS TAGS
CB_DONE_PROCESSING_MESSAGE = 41
CB_PROCESSING_MESSAGE = 42
# Constellation Handler
CONSELLATION_INFORMATION = 107
ECU_ID_LIST = 108
| 46.008993
| 176
| 0.692741
| 25,302
| 0.989093
| 0
| 0
| 0
| 0
| 0
| 0
| 3,831
| 0.14976
|
182bd0de90019e26f6a862933d6591b76c148320
| 1,994
|
py
|
Python
|
breadp/checks/pid.py
|
tgweber/breadp
|
12b97b9d2d997b1345a8e026690d57b3286a04d0
|
[
"Apache-2.0"
] | null | null | null |
breadp/checks/pid.py
|
tgweber/breadp
|
12b97b9d2d997b1345a8e026690d57b3286a04d0
|
[
"Apache-2.0"
] | null | null | null |
breadp/checks/pid.py
|
tgweber/breadp
|
12b97b9d2d997b1345a8e026690d57b3286a04d0
|
[
"Apache-2.0"
] | null | null | null |
################################################################################
# Copyright: Tobias Weber 2019
#
# Apache 2.0 License
#
# This file contains all code related to pid check objects
#
################################################################################
import re
import requests
from breadp.checks import Check
from breadp.checks.result import BooleanResult
class IsValidDoiCheck(Check):
""" Checks whether an RDP has a valid DOI as PID
Methods
-------
_do_check(self, rdp)
returns a BooleanResult
"""
def __init__(self):
Check.__init__(self)
self.id = 0
self.version = "0.0.1"
def _do_check(self, rdp):
if not rdp.pid:
msg = "RDP has no PID"
return BooleanResult(False, msg, False)
if re.match(r"^10\.\d{4}\d*/.*", rdp.pid):
return BooleanResult(True, "", True)
msg = "{} is not a valid DOI".format(rdp.pid)
return BooleanResult(False, msg, True)
class DoiResolvesCheck(Check):
""" Checks whether the DOI of an RDP resolves
Methods
-------
_do_check(self, rdp)
returns a BooleanResult
"""
def __init__(self):
Check.__init__(self)
self.id = 1
self.version = "0.0.1"
def _do_check(self, rdp):
if not rdp.pid:
msg = "RDP has no PID"
return BooleanResult(False, msg, False)
try:
response = requests.head('https://doi.org/' + rdp.pid)
except Exception as e:
msg = "{}: {}".format(type(e).__name__, e)
return BooleanResult(False, msg, False)
if response.status_code != 302:
msg = "Could not resolve {}, status code: {}".format(
rdp.pid, response.status_code)
return BooleanResult(False, msg, True)
msg = "Location of resolved doi: {}".format(
response.headers.get('Location'))
return BooleanResult(True, msg, True)
| 28.898551
| 80
| 0.533099
| 1,604
| 0.804413
| 0
| 0
| 0
| 0
| 0
| 0
| 739
| 0.370612
|
182cba7e9952331f563ef145511a6c92d1f0f8eb
| 495
|
py
|
Python
|
tests/infrastructure/persistence/test_holiday_dynamo_repository.py
|
gabrielleandro0801/holidays-importer
|
4a698ded80048ee37161b1f1ff4b4af64f085ab7
|
[
"MIT"
] | null | null | null |
tests/infrastructure/persistence/test_holiday_dynamo_repository.py
|
gabrielleandro0801/holidays-importer
|
4a698ded80048ee37161b1f1ff4b4af64f085ab7
|
[
"MIT"
] | null | null | null |
tests/infrastructure/persistence/test_holiday_dynamo_repository.py
|
gabrielleandro0801/holidays-importer
|
4a698ded80048ee37161b1f1ff4b4af64f085ab7
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from src.domain.holiday import Holiday
import src.infrastructure.persistence.holiday_dynamo_repository as repository
HOLIDAY = Holiday(
date='2021-12-25',
name='Natal',
category='NATIONAL'
)
class TestHolidayDynamoRepository(TestCase):
def test_holiday_must_be_dict_when_given_as_object(self):
response = repository.translate_holiday_to_dynamo(HOLIDAY)
self.assertIsInstance(response, dict, 'Holiday was not returned as a dict')
| 29.117647
| 83
| 0.779798
| 258
| 0.521212
| 0
| 0
| 0
| 0
| 0
| 0
| 65
| 0.131313
|
182e6f7b7c70dcc5da411a03395acac1d83ee9e9
| 3,136
|
py
|
Python
|
src/models/Models.py
|
nbrutti/uol-export
|
c79a1a6b5c68e61a85952a60b935943aec27cdda
|
[
"MIT"
] | null | null | null |
src/models/Models.py
|
nbrutti/uol-export
|
c79a1a6b5c68e61a85952a60b935943aec27cdda
|
[
"MIT"
] | null | null | null |
src/models/Models.py
|
nbrutti/uol-export
|
c79a1a6b5c68e61a85952a60b935943aec27cdda
|
[
"MIT"
] | null | null | null |
from config.defs import *
import peewee
db = peewee.SqliteDatabase(DATABASE_NAME)
class BaseModel(peewee.Model):
class Meta:
database = db
class Partida(BaseModel):
id_time_casa = peewee.CharField()
id_time_visitante = peewee.CharField()
time_casa = peewee.CharField()
time_visitante = peewee.CharField()
data = peewee.DateField()
time_da_casa_venceu = peewee.IntegerField()
HG = peewee.FloatField()
AG = peewee.FloatField()
PH = peewee.FloatField()
PD = peewee.FloatField()
PA = peewee.FloatField()
MAX_H = peewee.FloatField()
MAX_D = peewee.FloatField()
MAX_A = peewee.FloatField()
AVG_H = peewee.FloatField()
AVG_D = peewee.FloatField()
AVG_A = peewee.FloatField()
class Meta:
db_table = 'partidas'
class Substituicao(BaseModel):
# Pode ser INTERVALO
tempo = peewee.CharField()
tipo_tatico = peewee.CharField(null=True)
efetividade = peewee.IntegerField()
class Meta:
db_table = 'substituicoes'
class Penalti(BaseModel):
tempo = peewee.CharField()
class Meta:
db_table = 'penaltis'
class CartaoAmarelo(BaseModel):
tempo = peewee.CharField()
id_jogador = peewee.CharField()
class Meta:
db_table = 'cartoes_amarelos'
class CartaoVermelho(BaseModel):
tempo = peewee.CharField()
id_jogador = peewee.CharField()
class Meta:
db_table = 'cartoes_vermelhos'
class GolContra(BaseModel):
tempo = peewee.CharField()
id_jogador = peewee.CharField()
class Meta:
db_table = 'gols_contra'
class Gol(BaseModel):
tempo = peewee.CharField()
id_jogador = peewee.CharField()
class Meta:
db_table = 'gols'
class Time(BaseModel):
api_id = peewee.IntegerField()
nome = peewee.CharField()
class Meta:
db_table = "times"
### Relacionamentos ###
class PartidasSubstituicoes(BaseModel):
partida = peewee.ForeignKeyField(Partida)
substituicao = peewee.ForeignKeyField(Substituicao)
class Meta:
db_table = 'partidas_substituicoes'
class PartidasPenaltis(BaseModel):
partida = peewee.ForeignKeyField(Partida)
penalti = peewee.ForeignKeyField(Penalti)
class Meta:
db_table = 'partidas_penaltis'
class PartidasCartoesAmarelos(BaseModel):
partida = peewee.ForeignKeyField(Partida)
cartoes_amarelos = peewee.ForeignKeyField(CartaoAmarelo)
class Meta:
db_table = 'partidas_cartoes_amarelos'
class PartidasCartoesVermelhos(BaseModel):
partida = peewee.ForeignKeyField(Partida)
cartoes_vermelhos = peewee.ForeignKeyField(CartaoVermelho)
class Meta:
db_table = 'partidas_cartoes_vermelhos'
class PartidasGolsContra(BaseModel):
partida = peewee.ForeignKeyField(Partida)
gols_contra = peewee.ForeignKeyField(GolContra)
class Meta:
db_table = 'partidas_gols_contra'
class PartidasGols(BaseModel):
partida = peewee.ForeignKeyField(Partida)
gols = peewee.ForeignKeyField(Gol)
class Meta:
db_table = 'partidas_gols'
db.create_tables([Partida, Substituicao, Penalti, CartaoAmarelo, CartaoVermelho, GolContra, Gol, Time])
db.create_tables([PartidasSubstituicoes, PartidasPenaltis, PartidasCartoesAmarelos, PartidasCartoesVermelhos, PartidasGolsContra, PartidasGols])
| 24.888889
| 144
| 0.748724
| 2,749
| 0.876594
| 0
| 0
| 0
| 0
| 0
| 0
| 276
| 0.08801
|
182e9e4609ddebb22d97860bec2e861331fa8d6e
| 1,386
|
py
|
Python
|
stubs.min/System/Windows/Media/Animation_parts/AnimationException.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/System/Windows/Media/Animation_parts/AnimationException.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/Windows/Media/Animation_parts/AnimationException.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class AnimationException(SystemException,ISerializable,_Exception):
""" The exception that is thrown when an error occurs while animating a property. """
def add_SerializeObjectState(self,*args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def remove_SerializeObjectState(self,*args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
Clock=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the clock that generates the animated values.
Get: Clock(self: AnimationException) -> AnimationClock
"""
Property=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the animated dependency property.
Get: Property(self: AnimationException) -> DependencyProperty
"""
Target=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the animated object.
Get: Target(self: AnimationException) -> IAnimatable
"""
| 36.473684
| 215
| 0.730159
| 1,380
| 0.995671
| 0
| 0
| 0
| 0
| 0
| 0
| 828
| 0.597403
|
182eadd7acbf4364e0c9b88cd120533f1ae8e1e3
| 1,165
|
py
|
Python
|
quantnn/__init__.py
|
simonpf/qrnn
|
1de11ce8cede6b4b3de0734bcc8c198c10226188
|
[
"MIT"
] | null | null | null |
quantnn/__init__.py
|
simonpf/qrnn
|
1de11ce8cede6b4b3de0734bcc8c198c10226188
|
[
"MIT"
] | 3
|
2022-01-11T08:41:03.000Z
|
2022-02-11T14:25:09.000Z
|
quantnn/__init__.py
|
simonpf/qrnn
|
1de11ce8cede6b4b3de0734bcc8c198c10226188
|
[
"MIT"
] | 5
|
2020-12-11T03:18:32.000Z
|
2022-02-14T10:32:09.000Z
|
r"""
=======
quantnn
=======
The quantnn package provides functionality for probabilistic modeling and prediction
using deep neural networks.
The two main features of the quantnn package are implemented by the
:py:class:`~quantnn.qrnn.QRNN` and :py:class:`~quantnn.qrnn.DRNN` classes, which implement
quantile regression neural networks (QRNNs) and density regression neural networks (DRNNs),
respectively.
The modules :py:mod:`quantnn.quantiles` and :py:mod:`quantnn.density` provide generic
(backend agnostic) functions to manipulate probabilistic predictions.
"""
import logging as _logging
import os
from rich.logging import RichHandler
from quantnn.neural_network_model import set_default_backend, get_default_backend
from quantnn.qrnn import QRNN
from quantnn.drnn import DRNN
from quantnn.quantiles import (
cdf,
pdf,
posterior_mean,
probability_less_than,
probability_larger_than,
sample_posterior,
sample_posterior_gaussian,
quantile_loss,
)
_LOG_LEVEL = os.environ.get("QUANTNN_LOG_LEVEL", "WARNING").upper()
_logging.basicConfig(
level=_LOG_LEVEL, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()]
)
| 29.871795
| 91
| 0.775107
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 616
| 0.528755
|
182f0fecd4c6abc4561282446bbffe0f48f4cc60
| 805
|
py
|
Python
|
habitat_baselines/motion_planning/robot_target.py
|
srama2512/habitat-api
|
bc85d0961cef3b4a08bc9263869606109fb6ff0a
|
[
"MIT"
] | 355
|
2020-08-18T03:48:26.000Z
|
2022-03-30T00:22:50.000Z
|
habitat_baselines/motion_planning/robot_target.py
|
srama2512/habitat-api
|
bc85d0961cef3b4a08bc9263869606109fb6ff0a
|
[
"MIT"
] | 328
|
2020-08-12T21:25:09.000Z
|
2022-03-31T10:39:21.000Z
|
habitat_baselines/motion_planning/robot_target.py
|
srama2512/habitat-api
|
bc85d0961cef3b4a08bc9263869606109fb6ff0a
|
[
"MIT"
] | 159
|
2020-08-12T22:23:36.000Z
|
2022-03-30T22:56:52.000Z
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import attr
import magnum as mn
import numpy as np
@attr.s(auto_attribs=True, slots=True)
class RobotTarget:
"""
Data class to define the target needed as input for the motion planner.
"""
# End-effector in world coordinate frame.
ee_target_pos: np.ndarray = None
obj_id_target: int = None
joints_target: np.ndarray = None
is_guess: bool = False
@attr.s(auto_attribs=True, slots=True)
class ObjectGraspTarget:
"""
Data class to define the target needed as input for the grasp planner.
"""
# Bounding Box
bb: mn.Range3D
translation: mn.Matrix4
| 23.676471
| 75
| 0.70559
| 470
| 0.583851
| 0
| 0
| 548
| 0.680745
| 0
| 0
| 422
| 0.524224
|
18323906f8da6c858e162af77f828aa7dc3d5141
| 1,314
|
py
|
Python
|
leetcode/445.Add_Two_Numbers_II/python/add_two_numbers_v1.py
|
realXuJiang/research_algorithms
|
8f2876288cb607b9eddb2aa75f51a1d574b51ec4
|
[
"Apache-2.0"
] | 1
|
2019-08-12T09:32:30.000Z
|
2019-08-12T09:32:30.000Z
|
leetcode/445.Add_Two_Numbers_II/python/add_two_numbers_v1.py
|
realXuJiang/research_algorithms
|
8f2876288cb607b9eddb2aa75f51a1d574b51ec4
|
[
"Apache-2.0"
] | null | null | null |
leetcode/445.Add_Two_Numbers_II/python/add_two_numbers_v1.py
|
realXuJiang/research_algorithms
|
8f2876288cb607b9eddb2aa75f51a1d574b51ec4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class TwoNumbers(object):
@staticmethod
def builderListNode(nums):
if nums is not None:
head = ListNode(str(nums)[0])
a = head
for i in str(nums)[1:]:
b = ListNode(i)
a.next = b
a = a.next
return head
@staticmethod
def addTwoNumbers(n1, n2):
s1 = ""
s2 = ""
while n1 is not None:
s1 += str(n1.val)
n1 = n1.next
while n2 is not None:
s2 += str(n2.val)
n2 = n2.next
summation = str(int(s1) + int(s2))
head = ListNode(summation[0])
temp = head
for val in summation[1:]:
temp.next = ListNode(val)
temp = temp.next
return head
@staticmethod
def printLS(node):
if not node:
return None
res = ''
while node:
res += str(node.val) + ' -> '
node = node.next
print res
if __name__ == "__main__":
tn = TwoNumbers()
l1 = tn.builderListNode(1234)
l2 = tn.builderListNode(34)
tn.printLS(tn.addTwoNumbers(l1, l2))
| 22.655172
| 42
| 0.47793
| 1,108
| 0.843227
| 0
| 0
| 969
| 0.737443
| 0
| 0
| 65
| 0.049467
|
1833d1d97b94601d7c7672bd7240b57d03e2cddf
| 3,961
|
py
|
Python
|
recsys/util/feature_helper.py
|
manazhao/tf_recsys
|
6053712d11165c068e5d618989f716b2a0f60186
|
[
"Apache-2.0"
] | 1
|
2019-04-20T15:05:37.000Z
|
2019-04-20T15:05:37.000Z
|
recsys/util/feature_helper.py
|
manazhao/tf_recsys
|
6053712d11165c068e5d618989f716b2a0f60186
|
[
"Apache-2.0"
] | null | null | null |
recsys/util/feature_helper.py
|
manazhao/tf_recsys
|
6053712d11165c068e5d618989f716b2a0f60186
|
[
"Apache-2.0"
] | null | null | null |
import logging
import tensorflow as tf
import recsys.util.proto.config_pb2 as config
def int64_feature(val):
return tf.train.Feature(int64_list = tf.train.Int64List(value=[val]))
def int64_list_feature(val_list):
return tf.train.Feature(int64_list = tf.train.Int64List(value=val_list))
def bytes_feature(val):
return tf.train.Feature(bytes_list = tf.train.BytesList(value=[val]))
def bytes_list_feature(val_list):
return tf.train.Feature(bytes_list = tf.train.BytesList(value=val_list))
def float_feature(val):
return tf.train.Feature(float_list = tf.train.FloatList(value=[val]))
def float_list_feature(val_list):
return tf.train.Feature(float_list = tf.train.FloatList(value=val_list))
def string_feature(str):
return tf.train.Feature(bytes_list = tf.train.BytesList(value=[str.encode('utf-8')]))
def string_list_feature(str_list):
str_bytes_list = [k.encode('utf-8') for k in str_list]
return bytes_list_feature(str_bytes_list)
# Constructs a tf.Example with feature dictionary where key is feature name and
# value is tf.train.Feature
def example_from_feature_dict(feature_dict):
return tf.train.Example(features = tf.train.Features(feature = feature_dict))
def get_int64_feature(example, feature_name):
return example.features.feature[feature_name].int64_list.value[0]
def get_int64_list_feature(example, feature_name):
return list(example.features.feature[feature_name].int64_list.value)
def get_float_feature(example, feature_name):
return example.features.feature[feature_name].float_list.value[0]
def get_float_list_feature(example, feature_name):
return list(example.features.feature[feature_name].float_list.value)
def get_bytes_feature(example, feature_name):
return example.features.feature[feature_name].bytes_list.value[0]
def get_bytes_list_feature(example, feature_name):
return example.features.feature[feature_name].bytes_list.value
def get_string_feature(example, feature_name):
return example.features.feature[feature_name].bytes_list.value[0].decode('utf-8')
def get_string_list_feature(example, feature_name):
return [s.decode('utf-8') for s in example.features.feature[feature_name].bytes_list.value]
# Reads batched features and labels from given files, and consumes them through
# callback function "consum_batch_fn".
# feature_spec: dictionary specifying the type of each feature.
# input_config: settings for generating batched features and labels.
# consume_batch_fn: callback function that defines the consumption of the
# batched features and labels.
def fetch_and_process_features(filenames, feature_spec, input_config, consume_batch_fn):
# Reads examples from the filenames and parse them into features.
def _read_and_decode(filename_queue, feature_spec, batch_size = 2, capacity = 30, num_threads = 2, min_after_dequeue = 10):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example, features = feature_spec)
batched_features = tf.train.shuffle_batch(features,
batch_size = batch_size,
capacity = capacity,
num_threads = num_threads,
min_after_dequeue = min_after_dequeue)
return batched_features
filename_queue = tf.train.string_input_producer(
filenames, num_epochs = input_config.num_epochs)
features = _read_and_decode(
filename_queue,
feature_spec,
batch_size = input_config.batch_size,
capacity = input_config.capacity,
num_threads = input_config.num_threads,
min_after_dequeue = input_config.min_after_dequeue
)
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
with tf.Session() as sess:
sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
for i in range(input_config.num_batches):
logging.info('current batch:{}'.format(i))
consume_batch_fn(sess, features)
coord.request_stop()
coord.join(threads)
| 39.61
| 124
| 0.789447
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 569
| 0.143651
|
18343ff0759e4173734193d8fad780c280807cc1
| 1,894
|
py
|
Python
|
components/handlers/star_modules.py
|
nus-mtp/another-cs-study-planner
|
02b52871a34f580b779ede08750f2d4e887bcf65
|
[
"MIT"
] | 1
|
2017-04-30T17:59:08.000Z
|
2017-04-30T17:59:08.000Z
|
components/handlers/star_modules.py
|
nus-mtp/another-cs-study-planner
|
02b52871a34f580b779ede08750f2d4e887bcf65
|
[
"MIT"
] | 87
|
2017-02-13T09:06:13.000Z
|
2017-04-14T09:23:08.000Z
|
components/handlers/star_modules.py
|
nus-mtp/another-cs-study-planner
|
02b52871a34f580b779ede08750f2d4e887bcf65
|
[
"MIT"
] | 1
|
2017-04-11T05:26:00.000Z
|
2017-04-11T05:26:00.000Z
|
'''
This module handles starring of modules.
'''
import web
from app import RENDER
from components import model, session
class StarModule(object):
'''
Class handles starring and unstarring of modules.
'''
def GET(self):
'''
This function is called when /starModule is accessed.
'''
web.header('X-Frame-Options', 'SAMEORIGIN')
web.header('X-Content-Type-Options', 'nosniff')
web.header('X-XSS-Protection', '1')
if not session.validate_session():
raise web.seeother('/login')
else:
module_code = web.input().code
action = web.input().action
return_path = web.input().return_path
# modify return path if individual module info to include aySem
if return_path == '/individualModuleInfo':
target_ay = web.input().aysem
return_path = return_path + '?code=' + module_code + '&aysem=' + target_ay
if action == "star":
model.star_module(module_code, web.cookies().get('user'))
else:
model.unstar_module(module_code, web.cookies().get('user'))
raise web.seeother(return_path)
class StarredModulesList(object):
'''
Class handles showing of starredModules
'''
URL_THIS_PAGE = '/starredModules'
def GET(self):
'''
This function is called when /starredModules is accessed.
'''
web.header('X-Frame-Options', 'SAMEORIGIN')
web.header('X-Content-Type-Options', 'nosniff')
web.header('X-XSS-Protection', '1')
if not session.validate_session():
raise web.seeother('/login')
else:
starred_module_infos = model.get_starred_modules(web.cookies().get('user'))
return RENDER.starredModulesListing(starred_module_infos)
| 33.22807
| 90
| 0.594509
| 1,762
| 0.930306
| 0
| 0
| 0
| 0
| 0
| 0
| 672
| 0.354805
|
183882e7bff2e8589b66d5bada377b9d753cd440
| 27,362
|
py
|
Python
|
src/features/smarterdb.py
|
cnr-ibba/SMARTER-database
|
837f7d514c33e458ad0e39e26784c761df29e004
|
[
"MIT"
] | null | null | null |
src/features/smarterdb.py
|
cnr-ibba/SMARTER-database
|
837f7d514c33e458ad0e39e26784c761df29e004
|
[
"MIT"
] | 44
|
2021-05-25T16:00:34.000Z
|
2022-03-12T01:12:45.000Z
|
src/features/smarterdb.py
|
cnr-ibba/SMARTER-database
|
837f7d514c33e458ad0e39e26784c761df29e004
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 23 16:21:35 2021
@author: Paolo Cozzi <paolo.cozzi@ibba.cnr.it>
"""
import os
import logging
import pathlib
import pycountry
import mongoengine
from enum import Enum
from typing import Union
from pymongo import database, ReturnDocument
from dotenv import find_dotenv, load_dotenv
from .utils import get_project_dir
SPECIES2CODE = {
"Sheep": "OA",
"Goat": "CH"
}
SMARTERDB = "smarter"
DB_ALIAS = "smarterdb"
# Get an instance of a logger
logger = logging.getLogger(__name__)
class SmarterDBException(Exception):
pass
def global_connection(database_name: str = SMARTERDB):
# find .env automagically by walking up directories until it's found, then
# load up the .env entries as environment variables
load_dotenv(find_dotenv())
# TODO: track connection somewhere
return mongoengine.connect(
database_name,
username=os.getenv("MONGODB_SMARTER_USER"),
password=os.getenv("MONGODB_SMARTER_PASS"),
authentication_source='admin',
alias=DB_ALIAS)
def complement(genotype: str):
bases = {
"A": "T",
"T": "A",
"C": "G",
"G": "C",
"/": "/"
}
result = ""
for base in genotype:
result += bases[base]
return result
class SmarterInfo(mongoengine.Document):
"""A class to track database status informations"""
id = mongoengine.StringField(primary_key=True)
version = mongoengine.StringField(required=True)
working_assemblies = mongoengine.DictField()
plink_specie_opt = mongoengine.DictField()
last_updated = mongoengine.DateTimeField()
meta = {
'db_alias': DB_ALIAS,
'collection': 'smarterInfo'
}
def __str__(self):
return f"{self.id}: {self.version}"
class Counter(mongoengine.Document):
"""A class to deal with counter collection (created when initializing
smarter database)
"""
id = mongoengine.StringField(primary_key=True)
sequence_value = mongoengine.IntField(required=True, default=0)
meta = {
'db_alias': DB_ALIAS,
'collection': 'counters'
}
def __str__(self):
return f"{self.id}: {self.sequence_value}"
class SupportedChip(mongoengine.Document):
name = mongoengine.StringField(required=True, unique=True)
species = mongoengine.StringField(required=True)
manifacturer = mongoengine.StringField()
n_of_snps = mongoengine.IntField(default=0)
meta = {
'db_alias': DB_ALIAS,
'collection': 'supportedChips'
}
def __str__(self):
return f"'{self.name}' ({self.species})"
class BreedAlias(mongoengine.EmbeddedDocument):
fid = mongoengine.StringField(required=True)
dataset = mongoengine.ReferenceField(
'Dataset',
db_field="dataset_id")
country = mongoengine.StringField()
def __str__(self):
return f"{self.fid}: {self.dataset}"
class Breed(mongoengine.Document):
species = mongoengine.StringField(required=True)
name = mongoengine.StringField(required=True)
code = mongoengine.StringField(required=True)
aliases = mongoengine.ListField(
mongoengine.EmbeddedDocumentField(BreedAlias))
n_individuals = mongoengine.IntField()
meta = {
'db_alias': DB_ALIAS,
'collection': 'breeds',
'indexes': [
{
'fields': [
"species",
"code"
],
'unique': True,
'collation': {'locale': 'en', 'strength': 1}
},
{
'fields': [
"species",
"name"
],
'unique': True,
'collation': {'locale': 'en', 'strength': 1}
}
]
}
def __str__(self):
return f"{self.name} ({self.code}) {self.species}"
def get_or_create_breed(
species: str, name: str, code: str, aliases: list = []):
logger.debug(f"Checking: '{species}':'{name}':'{code}'")
# get a breed object relying on parameters
qs = Breed.objects(species=species, name=name, code=code)
modified = False
if qs.count() == 1:
breed = qs.get()
logger.debug(f"Got {breed}")
for alias in aliases:
if alias not in breed.aliases:
# track for update
modified = True
logger.info(f"Adding '{alias}' to '{breed}' aliases")
breed.aliases.append(alias)
elif qs.count() == 0:
logger.debug("Create a new breed object")
modified = True
breed = Breed(
species=species,
name=name,
code=code,
aliases=aliases,
n_individuals=0
)
else:
# should never see this relying on collection unique keys
raise SmarterDBException(
f"Got {qs.count()} results for '{species}':'{name}':'{code}'")
if modified:
logger.debug(f"Save '{breed}' to database")
breed.save()
return breed, modified
class Dataset(mongoengine.Document):
"""Describe a dataset instace with fields owned by data types"""
file = mongoengine.StringField(required=True, unique=True)
uploader = mongoengine.StringField()
size_ = mongoengine.StringField(db_field="size")
partner = mongoengine.StringField()
# HINT: should country, species and breeds be a list of items?
country = mongoengine.StringField()
species = mongoengine.StringField()
breed = mongoengine.StringField()
n_of_individuals = mongoengine.IntField()
n_of_records = mongoengine.IntField()
trait = mongoengine.StringField()
gene_array = mongoengine.StringField()
# add type tag
type_ = mongoengine.ListField(mongoengine.StringField(), db_field="type")
# file contents
contents = mongoengine.ListField(mongoengine.StringField())
# track the original chip_name with dataset
chip_name = mongoengine.StringField()
meta = {
'db_alias': DB_ALIAS,
'collection': 'dataset'
}
def __str__(self):
return f"file={self.file}, uploader={self.uploader}"
@property
def working_dir(self) -> pathlib.PosixPath:
"""returns the locations of dataset working directory. Could exists
or not
Returns:
pathlib.PosixPath: a subdirectory in /data/interim/
"""
if not self.id:
raise SmarterDBException(
"Can't define working dir. Object need to be stored in "
"database")
return get_project_dir() / f"data/interim/{self.id}"
@property
def result_dir(self) -> pathlib.PosixPath:
"""returns the locations of dataset processed directory. Could exists
or not
Returns:
pathlib.PosixPath: a subdirectory in /data/processed/
"""
if not self.id:
raise SmarterDBException(
"Can't define result dir. Object need to be stored in "
"database")
return get_project_dir() / f"data/processed/{self.id}"
def getNextSequenceValue(
sequence_name: str, mongodb: database.Database):
# this method is something similar to findAndModify,
# update a document and after get the UPDATED document
# https://docs.mongodb.com/manual/reference/method/db.collection.findAndModify/index.html#db.collection.findAndModify
sequenceDocument = mongodb.counters.find_one_and_update(
{"_id": sequence_name},
{"$inc": {"sequence_value": 1}},
return_document=ReturnDocument.AFTER
)
return sequenceDocument['sequence_value']
def getSmarterId(
species: str, country: str, breed: str, mongodb: database.Database):
# species, country and breed shold be defined in order to call this func
if not species or not country or not breed:
raise SmarterDBException(
"species, country and breed should be defined when calling "
"getSmarterId"
)
# get species code
if species not in SPECIES2CODE:
raise SmarterDBException(
"Species %s not managed by smarter" % (species))
species_code = SPECIES2CODE[species]
# get country code (two letters)
country = pycountry.countries.get(name=country)
country_code = country.alpha_2
# get breed code from database
breed_code = mongodb.breeds.find_one(
{"species": species, "name": breed})["code"]
# derive sequence_name from species
sequence_name = f"sample{species}"
# get the sequence number and define smarter id
sequence_id = getNextSequenceValue(sequence_name, mongodb)
# padding numbers
sequence_id = str(sequence_id).zfill(9)
smarter_id = f"{country_code}{species_code}-{breed_code}-{sequence_id}"
return smarter_id
class SEX(bytes, Enum):
UNKNOWN = (0, "Unknown")
MALE = (1, "Male")
FEMALE = (2, "Female")
def __new__(cls, value, label):
obj = bytes.__new__(cls, [value])
obj._value_ = value
obj.label = label
return obj
def __str__(self):
return self.label
@classmethod
def from_string(cls, value: str):
"""Get proper type relying on input string
Args:
value (str): required sex as string
Returns:
SEX: A sex instance (MALE, FEMALE, UNKNOWN)
"""
if type(value) != str:
raise SmarterDBException("Provided value should be a 'str' type")
value = value.upper()
if value in ['M', 'MALE', "1"]:
return cls.MALE
elif value in ['F', 'FEMALE', "2"]:
return cls.FEMALE
else:
logger.debug(
f"Unmanaged sex '{value}': return '{cls.UNKNOWN}'")
return cls.UNKNOWN
class Phenotype(mongoengine.DynamicEmbeddedDocument):
"""A class to deal with Phenotype. A dynamic document and not a generic
DictField since that there can be attributes which could be enforced to
have certain values. All other attributes could be set without any
assumptions
"""
purpose = mongoengine.StringField()
chest_girth = mongoengine.FloatField()
height = mongoengine.FloatField()
length = mongoengine.FloatField()
def __str__(self):
return f"{self.to_json()}"
class SAMPLETYPE(Enum):
FOREGROUND = 'foreground'
BACKGROUND = 'background'
class SampleSpecies(mongoengine.Document):
original_id = mongoengine.StringField(required=True)
smarter_id = mongoengine.StringField(required=True, unique=True)
country = mongoengine.StringField(required=True)
species = mongoengine.StringField(required=True)
breed = mongoengine.StringField(required=True)
breed_code = mongoengine.StringField(min_length=3)
# this will be a original_id alias (a different sample name in original
# data file)
alias = mongoengine.StringField()
# required to search a sample relying only on original ID
dataset = mongoengine.ReferenceField(
Dataset,
db_field="dataset_id",
reverse_delete_rule=mongoengine.DENY
)
# add type tag
type_ = mongoengine.EnumField(SAMPLETYPE, db_field="type", required=True)
# track the original chip_name with sample
chip_name = mongoengine.StringField()
# define enum types for sex
sex = mongoengine.EnumField(SEX)
# GPS location
# NOTE: X, Y where X is longitude, Y latitude
locations = mongoengine.ListField(mongoengine.PointField(), default=None)
# additional (not modelled) metadata
metadata = mongoengine.DictField(default=None)
# for phenotypes
phenotype = mongoengine.EmbeddedDocumentField(Phenotype, default=None)
meta = {
'abstract': True,
}
def save(self, *args, **kwargs):
"""Custom save method. Deal with smarter_id before save"""
if not self.smarter_id:
logger.debug(f"Determining smarter id for {self.original_id}")
# get the pymongo connection object
conn = mongoengine.connection.get_db(alias=DB_ALIAS)
# even is species, country and breed are required fields for
# SampleSpecies document, their value will not be evaluated until
# super().save() is called. I can't call it before determining
# a smarter_id
self.smarter_id = getSmarterId(
self.species,
self.country,
self.breed,
conn)
# default save method
super(SampleSpecies, self).save(*args, **kwargs)
def __str__(self):
return f"{self.smarter_id} ({self.breed})"
class SampleSheep(SampleSpecies):
# try to model relationship between samples
father_id = mongoengine.LazyReferenceField(
'SampleSheep',
passthrough=True,
reverse_delete_rule=mongoengine.NULLIFY
)
mother_id = mongoengine.LazyReferenceField(
'SampleSheep',
passthrough=True,
reverse_delete_rule=mongoengine.NULLIFY
)
meta = {
'db_alias': DB_ALIAS,
'collection': 'sampleSheep'
}
class SampleGoat(SampleSpecies):
# try to model relationship between samples
father_id = mongoengine.LazyReferenceField(
'SampleGoat',
passthrough=True,
reverse_delete_rule=mongoengine.NULLIFY
)
mother_id = mongoengine.LazyReferenceField(
'SampleGoat',
passthrough=True,
reverse_delete_rule=mongoengine.NULLIFY
)
meta = {
'db_alias': DB_ALIAS,
'collection': 'sampleGoat'
}
def get_or_create_sample(
SampleSpecies: Union[SampleGoat, SampleSheep],
original_id: str,
dataset: Dataset,
type_: str,
breed: Breed,
country: str,
chip_name: str = None,
sex: SEX = None,
alias: str = None) -> Union[SampleGoat, SampleSheep]:
"""Get or create a sample providing attributes (search for original_id in
provided dataset
Args:
SampleSpecies: (Union[SampleGoat, SampleSheep]): the class required
for insert/update
original_id (str): The original_id in the dataset
dataset (Dataset): the dataset instance used to register sample
type_ (str): "background" or "foreground"
breed (Breed): A breed instance
country (str): Country as a string
chip_name (str): the chip name
sex (SEX): A SEX instance
alias (str): an original_id alias
Returns:
Union[SampleGoat, SampleSheep]: a SampleSpecies instance
"""
created = False
# search for sample in database
qs = SampleSpecies.objects(
original_id=original_id, dataset=dataset)
if qs.count() == 1:
logger.debug(f"Sample '{original_id}' found in database")
sample = qs.get()
elif qs.count() == 0:
# insert sample into database
logger.info(f"Registering sample '{original_id}' in database")
sample = SampleSpecies(
original_id=original_id,
country=country,
species=dataset.species,
breed=breed.name,
breed_code=breed.code,
dataset=dataset,
type_=type_,
chip_name=chip_name,
sex=sex,
alias=alias
)
sample.save()
# incrementing breed n_individuals counter
breed.n_individuals += 1
breed.save()
created = True
else:
raise SmarterDBException(
f"Got {qs.count()} results for '{original_id}'")
return sample, created
def get_sample_type(dataset: Dataset):
"""
test if foreground or background dataset
Args:
dataset (Dataset): the dataset instance used to register sample
Returns:
str: sample type ("background" or "foreground")
"""
type_ = None
for sampletype in SAMPLETYPE:
if sampletype.value in dataset.type_:
logger.debug(
f"Found {sampletype.value} in {dataset.type_}")
type_ = sampletype.value
break
return type_
class Consequence(mongoengine.EmbeddedDocument):
pass
class Location(mongoengine.EmbeddedDocument):
ss_id = mongoengine.StringField()
version = mongoengine.StringField(required=True)
chrom = mongoengine.StringField(required=True)
position = mongoengine.IntField(required=True)
alleles = mongoengine.StringField()
illumina = mongoengine.StringField(required=True)
illumina_forward = mongoengine.StringField()
illumina_strand = mongoengine.StringField()
affymetrix_ab = mongoengine.StringField()
strand = mongoengine.StringField()
imported_from = mongoengine.StringField(required=True)
# this could be the manifactured date or the last updated
date = mongoengine.DateTimeField()
consequences = mongoengine.ListField(
mongoengine.EmbeddedDocumentField(Consequence), default=None)
def __init__(self, *args, **kwargs):
illumina_top = None
# remove illumina top from arguments
if 'illumina_top' in kwargs:
illumina_top = kwargs.pop('illumina_top')
# initialize base object
super(Location, self).__init__(*args, **kwargs)
# fix illumina top if necessary
if illumina_top:
self.illumina_top = illumina_top
@property
def illumina_top(self):
"""Return genotype in illumina top format"""
if self.illumina_strand in ['BOT', 'bottom']:
return complement(self.illumina)
elif (not self.illumina_strand or
self.illumina_strand in ['TOP', 'top']):
return self.illumina
else:
raise SmarterDBException(
f"{self.illumina_strand} not managed")
@illumina_top.setter
def illumina_top(self, genotype: str):
if (not self.illumina_strand or
self.illumina_strand in ['TOP', 'top']):
self.illumina = genotype
elif self.illumina_strand in ['BOT', 'bottom']:
self.illumina = complement(genotype)
else:
raise SmarterDBException(
f"{self.illumina_strand} not managed")
def __str__(self):
return (
f"({self.imported_from}:{self.version}) "
f"{self.chrom}:{self.position} [{self.illumina_top}]"
)
def __eq__(self, other):
if super().__eq__(other):
return True
else:
# check by positions
for attribute in ["chrom", "position"]:
if getattr(self, attribute) != getattr(other, attribute):
return False
# check genotype equality
if self.illumina_top != other.illumina_top:
return False
return True
def __check_coding(self, genotype: list, coding: str, missing: str):
"""Internal method to check genotype coding"""
# get illumina data as an array
data = getattr(self, coding).split("/")
for allele in genotype:
# mind to missing values. If missing can't be equal to illumina_top
if allele in missing:
continue
if allele not in data:
return False
return True
def is_top(self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with illumina TOP coding
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
bool: True if in top coordinates
"""
return self.__check_coding(genotype, "illumina_top", missing)
def is_forward(self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with illumina FORWARD coding
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
bool: True if in top coordinates
"""
return self.__check_coding(genotype, "illumina_forward", missing)
def is_ab(self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with illumina AB coding
Args:
genotype (list): a list of two alleles (ex ['A','B'])
missing (str): missing allele string (def "-")
Returns:
bool: True if in top coordinates
"""
for allele in genotype:
# mind to missing valies
if allele not in ["A", "B"] + missing:
return False
return True
def is_affymetrix(
self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with affymetrix coding
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
bool: True if in top coordinates
"""
return self.__check_coding(genotype, "affymetrix_ab", missing)
def forward2top(self, genotype: list, missing: list = ["0", "-"]) -> list:
"""Convert an illumina forward SNP in a illumina top snp
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
list: The genotype in top format
"""
# get illumina data as an array
forward = self.illumina_forward.split("/")
top = self.illumina_top.split("/")
result = []
for allele in genotype:
# mind to missing values
if allele in missing:
result.append("0")
elif allele not in forward:
raise SmarterDBException(
f"{genotype} is not in forward coding")
else:
result.append(top[forward.index(allele)])
return result
def ab2top(self, genotype: list, missing: list = ["0", "-"]) -> list:
"""Convert an illumina ab SNP in a illumina top snp
Args:
genotype (list): a list of two alleles (ex ['A','B'])
missing (str): missing allele string (def "-")
Returns:
list: The genotype in top format
"""
# get illumina data as a dict
top = self.illumina_top.split("/")
top = {"A": top[0], "B": top[1]}
result = []
for allele in genotype:
# mind to missing values
if allele in missing:
result.append("0")
elif allele not in ["A", "B"]:
raise SmarterDBException(
f"{genotype} is not in ab coding")
else:
result.append(top[allele])
return result
def affy2top(self, genotype: list, missing: list = ["0", "-"]) -> list:
"""Convert an affymetrix SNP in a illumina top snp
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
list: The genotype in top format
"""
# get illumina data as an array
affymetrix = self.affymetrix_ab.split("/")
top = self.illumina_top.split("/")
result = []
for allele in genotype:
# mind to missing values
if allele in missing:
result.append("0")
elif allele not in affymetrix:
raise SmarterDBException(
f"{genotype} is not in affymetrix coding")
else:
result.append(top[affymetrix.index(allele)])
return result
class VariantSpecies(mongoengine.Document):
rs_id = mongoengine.StringField()
chip_name = mongoengine.ListField(mongoengine.StringField())
name = mongoengine.StringField(unique=True)
# sequence should model both illumina or affymetrix sequences
sequence = mongoengine.DictField()
locations = mongoengine.ListField(
mongoengine.EmbeddedDocumentField(Location))
# HINT: should sender be a Location attribute?
sender = mongoengine.StringField()
# Affymetryx specific fields
# more probe could be assigned to the same SNP
probeset_id = mongoengine.ListField(mongoengine.StringField())
affy_snp_id = mongoengine.StringField()
cust_id = mongoengine.StringField()
# abstract class with custom indexes
# TODO: need a index for position (chrom, position, version)
meta = {
'abstract': True,
'indexes': [
{
'fields': [
"locations.chrom",
"locations.position"
],
},
'probeset_id',
'rs_id'
]
}
def __str__(self):
return (f"name='{self.name}', rs_id='{self.rs_id}'")
def save(self, *args, **kwargs):
"""Custom save method. Deal with variant name before save"""
if not self.name and self.affy_snp_id:
logger.debug(f"Set variant name to {self.affy_snp_id}")
self.name = self.affy_snp_id
# default save method
super(VariantSpecies, self).save(*args, **kwargs)
def get_location_index(self, version: str, imported_from='SNPchiMp v.3'):
"""Returns location index for assembly version and imported source
Args:
version (str): assembly version (ex: 'Oar_v3.1')
imported_from (str): coordinates source (ex: 'SNPchiMp v.3')
Returns:
int: the index of the location requested
"""
for index, location in enumerate(self.locations):
if (location.version == version and
location.imported_from == imported_from):
return index
raise SmarterDBException(
f"Location '{version}' '{imported_from}' is not in locations"
)
def get_location(self, version: str, imported_from='SNPchiMp v.3'):
"""Returns location for assembly version and imported source
Args:
version (str): assembly version (ex: 'Oar_v3.1')
imported_from (str): coordinates source (ex: 'SNPchiMp v.3')
Returns:
Location: the genomic coordinates
"""
def custom_filter(location: Location):
if (location.version == version and
location.imported_from == imported_from):
return True
return False
locations = list(filter(custom_filter, self.locations))
if len(locations) != 1:
raise SmarterDBException(
"Couldn't determine a unique location for "
f"'{self.name}' '{version}' '{imported_from}'")
return locations[0]
class VariantSheep(VariantSpecies):
meta = {
'db_alias': DB_ALIAS,
'collection': 'variantSheep'
}
class VariantGoat(VariantSpecies):
meta = {
'db_alias': DB_ALIAS,
'collection': 'variantGoat'
}
| 28.472425
| 121
| 0.604853
| 20,554
| 0.751188
| 0
| 0
| 2,455
| 0.089723
| 0
| 0
| 10,042
| 0.367005
|
1838c0e9c32271122443074ccc035f2557452781
| 6,143
|
py
|
Python
|
test/utils/multi_objective/test_box_decomposition.py
|
SamuelMarks/botorch
|
7801e2f56dc447322b2b6c92cab683d8900e4c7f
|
[
"MIT"
] | 2
|
2021-01-11T18:16:27.000Z
|
2021-11-30T09:34:44.000Z
|
test/utils/multi_objective/test_box_decomposition.py
|
SamuelMarks/botorch
|
7801e2f56dc447322b2b6c92cab683d8900e4c7f
|
[
"MIT"
] | 17
|
2020-12-11T20:07:22.000Z
|
2022-03-27T16:46:42.000Z
|
test/utils/multi_objective/test_box_decomposition.py
|
SamuelMarks/botorch
|
7801e2f56dc447322b2b6c92cab683d8900e4c7f
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import annotations
import torch
from botorch.exceptions.errors import BotorchError, BotorchTensorDimensionError
from botorch.utils.multi_objective.box_decomposition import NondominatedPartitioning
from botorch.utils.testing import BotorchTestCase
class TestNonDominatedPartitioning(BotorchTestCase):
def test_non_dominated_partitioning(self):
tkwargs = {"device": self.device}
for dtype in (torch.float, torch.double):
tkwargs["dtype"] = dtype
partitioning = NondominatedPartitioning(num_outcomes=2)
# assert error is raised if pareto_Y has not been computed
with self.assertRaises(BotorchError):
partitioning.pareto_Y
# test eps
# no pareto_Y
self.assertEqual(partitioning.eps, 1e-6)
partitioning = NondominatedPartitioning(num_outcomes=2, eps=1.0)
# eps set
self.assertEqual(partitioning.eps, 1.0)
# set pareto_Y
partitioning = NondominatedPartitioning(num_outcomes=2)
Y = torch.zeros(1, 2, **tkwargs)
partitioning.update(Y=Y)
self.assertEqual(partitioning.eps, 1e-6 if dtype == torch.float else 1e-8)
# test _update_pareto_Y
partitioning.Y = -Y
self.assertFalse(partitioning._update_pareto_Y())
# test m=2
arange = torch.arange(3, 9, **tkwargs)
pareto_Y = torch.stack([arange, 11 - arange], dim=-1)
Y = torch.cat(
[
pareto_Y,
torch.tensor(
[[8.0, 2.0], [7.0, 1.0]], **tkwargs
), # add some non-pareto elements
],
dim=0,
)
partitioning = NondominatedPartitioning(num_outcomes=2, Y=Y)
sorting = torch.argsort(pareto_Y[:, 0], descending=True)
self.assertTrue(torch.equal(pareto_Y[sorting], partitioning.pareto_Y))
ref_point = torch.zeros(2, **tkwargs)
inf = float("inf")
expected_cell_bounds = torch.tensor(
[
[
[8.0, 0.0],
[7.0, 3.0],
[6.0, 4.0],
[5.0, 5.0],
[4.0, 6.0],
[3.0, 7.0],
[0.0, 8.0],
],
[
[inf, inf],
[8.0, inf],
[7.0, inf],
[6.0, inf],
[5.0, inf],
[4.0, inf],
[3.0, inf],
],
],
**tkwargs
)
cell_bounds = partitioning.get_hypercell_bounds(ref_point)
self.assertTrue(torch.equal(cell_bounds, expected_cell_bounds))
# test compute hypervolume
hv = partitioning.compute_hypervolume(ref_point)
self.assertEqual(hv, 49.0)
# test error when reference is not worse than all pareto_Y
with self.assertRaises(ValueError):
partitioning.compute_hypervolume(pareto_Y.max(dim=0).values)
# test error with partition_non_dominated_space_2d for m=3
partitioning = NondominatedPartitioning(
num_outcomes=3, Y=torch.zeros(1, 3, **tkwargs)
)
with self.assertRaises(BotorchTensorDimensionError):
partitioning.partition_non_dominated_space_2d()
# test m=3
pareto_Y = torch.tensor(
[[1.0, 6.0, 8.0], [2.0, 4.0, 10.0], [3.0, 5.0, 7.0]], **tkwargs
)
partitioning = NondominatedPartitioning(num_outcomes=3, Y=pareto_Y)
sorting = torch.argsort(pareto_Y[:, 0], descending=True)
self.assertTrue(torch.equal(pareto_Y[sorting], partitioning.pareto_Y))
ref_point = torch.tensor([-1.0, -2.0, -3.0], **tkwargs)
expected_cell_bounds = torch.tensor(
[
[
[1.0, 4.0, 7.0],
[-1.0, -2.0, 10.0],
[-1.0, 4.0, 8.0],
[1.0, -2.0, 10.0],
[1.0, 4.0, 8.0],
[-1.0, 6.0, -3.0],
[1.0, 5.0, -3.0],
[-1.0, 5.0, 8.0],
[2.0, -2.0, 7.0],
[2.0, 4.0, 7.0],
[3.0, -2.0, -3.0],
[2.0, -2.0, 8.0],
[2.0, 5.0, -3.0],
],
[
[2.0, 5.0, 8.0],
[1.0, 4.0, inf],
[1.0, 5.0, inf],
[2.0, 4.0, inf],
[2.0, 5.0, inf],
[1.0, inf, 8.0],
[2.0, inf, 8.0],
[2.0, inf, inf],
[3.0, 4.0, 8.0],
[3.0, 5.0, 8.0],
[inf, 5.0, 8.0],
[inf, 5.0, inf],
[inf, inf, inf],
],
],
**tkwargs
)
cell_bounds = partitioning.get_hypercell_bounds(ref_point)
# cell bounds can have different order
num_matches = (
(cell_bounds.unsqueeze(0) == expected_cell_bounds.unsqueeze(1))
.all(dim=-1)
.any(dim=0)
.sum()
)
self.assertTrue(num_matches, 9)
# test compute hypervolume
hv = partitioning.compute_hypervolume(ref_point)
self.assertEqual(hv, 358.0)
# TODO: test approximate decomposition
| 40.414474
| 86
| 0.453524
| 5,674
| 0.923653
| 0
| 0
| 0
| 0
| 0
| 0
| 637
| 0.103695
|
183903f43cbf11f71276277d26afb62e4bb54ab6
| 34,139
|
py
|
Python
|
tests/pyupgrade_test.py
|
sloria/pyupgrade
|
18c625150c7118d05e6f15facf77a0423b764230
|
[
"MIT"
] | null | null | null |
tests/pyupgrade_test.py
|
sloria/pyupgrade
|
18c625150c7118d05e6f15facf77a0423b764230
|
[
"MIT"
] | null | null | null |
tests/pyupgrade_test.py
|
sloria/pyupgrade
|
18c625150c7118d05e6f15facf77a0423b764230
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import ast
import sys
import pytest
from pyupgrade import _fix_dict_set
from pyupgrade import _fix_escape_sequences
from pyupgrade import _fix_format_literals
from pyupgrade import _fix_fstrings
from pyupgrade import _fix_new_style_classes
from pyupgrade import _fix_percent_format
from pyupgrade import _fix_six
from pyupgrade import _fix_super
from pyupgrade import _fix_tokens
from pyupgrade import _fix_unicode_literals
from pyupgrade import _imports_unicode_literals
from pyupgrade import _is_bytestring
from pyupgrade import _percent_to_format
from pyupgrade import _simplify_conversion_flag
from pyupgrade import main
from pyupgrade import parse_format
from pyupgrade import parse_percent_format
from pyupgrade import unparse_parsed_string
@pytest.mark.parametrize(
's',
(
'', 'foo', '{}', '{0}', '{named}', '{!r}', '{:>5}', '{{', '}}',
'{0!s:15}'
),
)
def test_roundtrip_text(s):
assert unparse_parsed_string(parse_format(s)) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
('{:}', '{}'),
('{0:}', '{0}'),
('{0!r:}', '{0!r}'),
),
)
def test_intentionally_not_round_trip(s, expected):
# Our unparse simplifies empty parts, whereas stdlib allows them
ret = unparse_parsed_string(parse_format(s))
assert ret == expected
@pytest.mark.parametrize(
('s', 'expected'),
(
# Don't touch empty set literals
('set()', 'set()'),
# Don't touch set(empty literal) with newlines in them (may create
# syntax errors)
('set((\n))', 'set((\n))'),
# Don't touch weird looking function calls -- use autopep8 or such
# first
('set (())', 'set (())'),
('set ((1, 2))', 'set ((1, 2))'),
# Take a set literal with an empty tuple / list and remove the arg
('set(())', 'set()'),
('set([])', 'set()'),
# Remove spaces in empty set literals
('set(( ))', 'set()'),
# Some "normal" test cases
('set((1, 2))', '{1, 2}'),
('set([1, 2])', '{1, 2}'),
('set(x for x in y)', '{x for x in y}'),
('set([x for x in y])', '{x for x in y}'),
# These are strange cases -- the ast doesn't tell us about the parens
# here so we have to parse ourselves
('set((x for x in y))', '{x for x in y}'),
('set(((1, 2)))', '{1, 2}'),
# The ast also doesn't tell us about the start of the tuple in this
# generator expression
('set((a, b) for a, b in y)', '{(a, b) for a, b in y}'),
# The ast also doesn't tell us about the start of the tuple for
# tuple of tuples
('set(((1, 2), (3, 4)))', '{(1, 2), (3, 4)}'),
# Lists where the first element is a tuple also gives the ast trouble
# The first element lies about the offset of the element
('set([(1, 2), (3, 4)])', '{(1, 2), (3, 4)}'),
(
'set(\n'
' [(1, 2)]\n'
')',
'{\n'
' (1, 2)\n'
'}',
),
('set([((1, 2)), (3, 4)])', '{((1, 2)), (3, 4)}'),
# And it gets worse
('set((((1, 2),),))', '{((1, 2),)}'),
# Some multiline cases
('set(\n(1, 2))', '{\n1, 2}'),
('set((\n1,\n2,\n))\n', '{\n1,\n2,\n}\n'),
# Nested sets
(
'set((frozenset(set((1, 2))), frozenset(set((3, 4)))))',
'{frozenset({1, 2}), frozenset({3, 4})}',
),
# Remove trailing commas on inline things
('set((1,))', '{1}'),
('set((1, ))', '{1}'),
# Remove trailing commas after things
('set([1, 2, 3,],)', '{1, 2, 3}'),
('set((x for x in y),)', '{x for x in y}'),
(
'set(\n'
' (x for x in y),\n'
')',
'{\n'
' x for x in y\n'
'}',
),
),
)
def test_sets(s, expected):
ret = _fix_dict_set(s)
assert ret == expected
@pytest.mark.xfail(sys.version_info >= (3, 7), reason='genexp trailing comma')
@pytest.mark.parametrize(
('s', 'expected'),
(
('set(x for x in y,)', '{x for x in y}'),
(
'set(\n'
' x for x in y,\n'
')',
'{\n'
' x for x in y\n'
'}',
),
),
)
def test_sets_generators_trailing_comas(s, expected):
ret = _fix_dict_set(s)
assert ret == expected
@pytest.mark.parametrize(
('s', 'expected'),
(
# Don't touch irrelevant code
('x = 5', 'x = 5'),
('dict()', 'dict()'),
# Don't touch syntax errors
('(', '('),
# Don't touch strange looking calls
('dict ((a, b) for a, b in y)', 'dict ((a, b) for a, b in y)'),
# dict of generator expression
('dict((a, b) for a, b in y)', '{a: b for a, b in y}'),
('dict((a, b,) for a, b in y)', '{a: b for a, b in y}'),
('dict((a, b, ) for a, b in y)', '{a: b for a, b in y}'),
('dict([a, b] for a, b in y)', '{a: b for a, b in y}'),
# Parenthesized target
('dict(((a, b)) for a, b in y)', '{a: b for a, b in y}'),
# dict of list comprehension
('dict([(a, b) for a, b in y])', '{a: b for a, b in y}'),
# ast doesn't tell us about the tuple in the list
('dict([(a, b), c] for a, b, c in y)', '{(a, b): c for a, b, c in y}'),
# ast doesn't tell us about parenthesized keys
('dict(((a), b) for a, b in y)', '{(a): b for a, b in y}'),
# Nested dictcomps
(
'dict((k, dict((k2, v2) for k2, v2 in y2)) for k, y2 in y)',
'{k: {k2: v2 for k2, v2 in y2} for k, y2 in y}',
),
# This doesn't get fixed by autopep8 and can cause a syntax error
('dict((a, b)for a, b in y)', '{a: b for a, b in y}'),
# Need to remove trailing commas on the element
(
'dict(\n'
' (\n'
' a,\n'
' b,\n'
' )\n'
' for a, b in y\n'
')',
# Ideally, this'll go through some other formatting tool before
# being committed. Shrugs!
'{\n'
' a:\n'
' b\n'
' for a, b in y\n'
'}',
),
# Don't rewrite kwargd dicts
(
'dict(((a, b) for a, b in y), x=1)',
'dict(((a, b) for a, b in y), x=1)',
),
(
'dict(((a, b) for a, b in y), **kwargs)',
'dict(((a, b) for a, b in y), **kwargs)',
),
# Don't gobble the last paren in a dictcomp
(
'x(\n'
' dict(\n'
' (a, b) for a, b in y\n'
' )\n'
')',
'x(\n'
' {\n'
' a: b for a, b in y\n'
' }\n'
')',
)
),
)
def test_dictcomps(s, expected):
ret = _fix_dict_set(s)
assert ret == expected
@pytest.mark.parametrize(
('s', 'expected'),
(
# Don't touch syntax errors
('"{0}"format(1)', '"{0}"format(1)'),
# Don't touch py27 format strings
("'{}'.format(1)", "'{}'.format(1)"),
# Don't touch invalid format strings
("'{'.format(1)", "'{'.format(1)"),
("'}'.format(1)", "'}'.format(1)"),
# Don't touch non-format strings
("x = ('{0} {1}',)\n", "x = ('{0} {1}',)\n"),
# Don't touch non-incrementing integers
("'{0} {0}'.format(1)", "'{0} {0}'.format(1)"),
# Simplest case
("'{0}'.format(1)", "'{}'.format(1)"),
("'{0:x}'.format(30)", "'{:x}'.format(30)"),
("x = '{0}'.format(1)", "x = '{}'.format(1)"),
# Multiline strings
("'''{0}\n{1}\n'''.format(1, 2)", "'''{}\n{}\n'''.format(1, 2)"),
# Multiple implicitly-joined strings
("'{0}' '{1}'.format(1, 2)", "'{}' '{}'.format(1, 2)"),
# Multiple implicitly-joined strings over lines
(
'print(\n'
" 'foo{0}'\n"
" 'bar{1}'.format(1, 2)\n"
')',
'print(\n'
" 'foo{}'\n"
" 'bar{}'.format(1, 2)\n"
')',
),
# Multiple implicitly-joind strings over lines with comments
(
'print(\n'
" 'foo{0}' # ohai\n"
" 'bar{1}'.format(1, 2)\n"
')',
'print(\n'
" 'foo{}' # ohai\n"
" 'bar{}'.format(1, 2)\n"
')',
),
# Formats can be embedded in formats, leave these alone?
("'{0:<{1}}'.format(1, 4)", "'{0:<{1}}'.format(1, 4)"),
# joined by backslash
(
'x = "foo {0}" \\\n'
' "bar {1}".format(1, 2)',
'x = "foo {}" \\\n'
' "bar {}".format(1, 2)',
)
),
)
def test_format_literals(s, expected):
ret = _fix_format_literals(s)
assert ret == expected
@pytest.mark.parametrize(
('s', 'expected'),
(
('import x', False),
('from foo import bar', False),
('x = 5', False),
('from __future__ import unicode_literals', True),
(
'"""docstring"""\n'
'from __future__ import unicode_literals',
True,
),
(
'from __future__ import absolute_import\n'
'from __future__ import unicode_literals\n',
True,
),
),
)
def test_imports_unicode_literals(s, expected):
assert _imports_unicode_literals(s) is expected
@pytest.mark.parametrize(
('s', 'py3_plus', 'expected'),
(
# Syntax errors are unchanged
('(', False, '('),
# Without py3-plus, no replacements
("u''", False, "u''"),
# With py3-plus, it removes u prefix
("u''", True, "''"),
# Importing unicode_literals also cause it to remove it
(
'from __future__ import unicode_literals\n'
'u""\n',
False,
'from __future__ import unicode_literals\n'
'""\n',
),
# Regression: string containing newline
('"""with newline\n"""', True, '"""with newline\n"""'),
),
)
def test_unicode_literals(s, py3_plus, expected):
ret = _fix_unicode_literals(s, py3_plus=py3_plus)
assert ret == expected
@pytest.mark.parametrize(
's',
(
'""',
r'r"\d"', r"r'\d'", r'r"""\d"""', r"r'''\d'''",
# python2 has a bug where `rb'foo'` is tokenized as NAME + STRING
r'rb"\d"',
# make sure we don't replace an already valid string
r'"\\d"',
# in python2 `'\u2603'` is literally \\u2603, but transforming based
# on that would be incorrect in python3.
# intentionally timid here to avoid breaking working python3 code
'"\\u2603"',
# don't touch already valid escapes
r'"\r\n"',
# don't touch escaped newlines
'"""\\\n"""', '"""\\\r\n"""', '"""\\\r"""',
),
)
def test_fix_escape_sequences_noop(s):
assert _fix_escape_sequences(s) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
# no valid escape sequences, make a raw literal
(r'"\d"', r'r"\d"'),
# when there are valid escape sequences, need to use backslashes
(r'"\n\d"', r'"\n\\d"'),
# `ur` is not a valid string prefix in python3.x
(r'u"\d"', r'u"\\d"'),
# `rb` is not a valid string prefix in python2.x
(r'b"\d"', r'br"\d"'),
# 8 and 9 aren't valid octal digits
(r'"\8"', r'r"\8"'), (r'"\9"', r'r"\9"'),
# explicit byte strings should not honor string-specific escapes
('b"\\u2603"', 'br"\\u2603"'),
),
)
def test_fix_escape_sequences(s, expected):
assert _fix_escape_sequences(s) == expected
@pytest.mark.xfail(sys.version_info >= (3,), reason='python2 "feature"')
@pytest.mark.parametrize(
('s', 'expected'),
(
('5L', '5'),
('5l', '5'),
('123456789123456789123456789L', '123456789123456789123456789'),
),
)
def test_long_literals(s, expected):
assert _fix_tokens(s) == expected
@pytest.mark.parametrize(
's',
(
# Any number of zeros is considered a legal token
'0', '00',
# Don't modify non octal literals
'1', '12345', '1.2345',
),
)
def test_noop_octal_literals(s):
assert _fix_tokens(s) == s
@pytest.mark.xfail(sys.version_info >= (3,), reason='python2 "feature"')
@pytest.mark.parametrize(
('s', 'expected'),
(
('0755', '0o755'),
('05', '5'),
),
)
def test_fix_octal_literal(s, expected):
assert _fix_tokens(s) == expected
@pytest.mark.parametrize('s', ("b''", 'b""', 'B""', "B''", "rb''", "rb''"))
def test_is_bytestring_true(s):
assert _is_bytestring(s) is True
@pytest.mark.parametrize('s', ('', '""', "''", 'u""', '"b"'))
def test_is_bytestring_false(s):
assert _is_bytestring(s) is False
@pytest.mark.parametrize(
('s', 'expected'),
(
(
'""', (
('""', None),
),
),
(
'"%%"', (
('"', (None, None, None, None, '%')),
('"', None),
),
),
(
'"%s"', (
('"', (None, None, None, None, 's')),
('"', None),
),
),
(
'"%s two! %s"', (
('"', (None, None, None, None, 's')),
(' two! ', (None, None, None, None, 's')),
('"', None),
),
),
(
'"%(hi)s"', (
('"', ('hi', None, None, None, 's')),
('"', None),
),
),
(
'"%()s"', (
('"', ('', None, None, None, 's')),
('"', None),
),
),
(
'"%#o"', (
('"', (None, '#', None, None, 'o')),
('"', None),
),
),
(
'"% #0-+d"', (
('"', (None, ' #0-+', None, None, 'd')),
('"', None),
),
),
(
'"%5d"', (
('"', (None, None, '5', None, 'd')),
('"', None),
),
),
(
'"%*d"', (
('"', (None, None, '*', None, 'd')),
('"', None),
)
),
(
'"%.f"', (
('"', (None, None, None, '.', 'f')),
('"', None),
),
),
(
'"%.5f"', (
('"', (None, None, None, '.5', 'f')),
('"', None),
),
),
(
'"%.*f"', (
('"', (None, None, None, '.*', 'f')),
('"', None),
),
),
(
'"%ld"', (
('"', (None, None, None, None, 'd')),
('"', None),
),
),
(
'"%(complete)#4.4f"', (
('"', ('complete', '#', '4', '.4', 'f')),
('"', None),
),
),
),
)
def test_parse_percent_format(s, expected):
assert parse_percent_format(s) == expected
@pytest.mark.parametrize(
('s', 'expected'),
(
('%s', '{}'),
('%%%s', '%{}'),
('%(foo)s', '{foo}'),
('%2f', '{:2f}'),
('%r', '{!r}'),
('%a', '{!a}'),
),
)
def test_percent_to_format(s, expected):
assert _percent_to_format(s) == expected
@pytest.mark.parametrize(
('s', 'expected'),
(
('', ''),
(' ', ' '),
(' ', ' '),
('#0- +', '#<+'),
('-', '<'),
),
)
def test_simplify_conversion_flag(s, expected):
assert _simplify_conversion_flag(s) == expected
@pytest.mark.parametrize(
's',
(
# cannot determine whether `unknown_type` is tuple or not
'"%s" % unknown_type',
# format of bytestring cannot be changed to `.format(...)`
'b"%s" % (b"bytestring",)',
# out-of-order parameter consumption
'"%*s" % (5, "hi")', '"%.*s" % (5, "hi")',
# potential conversion to int required
'"%d" % (flt,)', '"%i" % (flt,)', '"%u" % (flt,)',
# potential conversion to character required
'"%c" % (some_string,)',
# different output vs .format() in python 2
'"%#o" % (123,)',
# no format equivalent
'"%()s" % {"": "empty"}',
# different output in python2 / python 3
'"%4%" % ()',
# no equivalent in format specifier
'"%.2r" % (1.25)', '"%.2a" % (1.25)',
# non-string mod
'i % 3',
# dict format but not keyed arguments
'"%s" % {"k": "v"}',
# dict format must have valid identifiers
'"%()s" % {"": "bar"}',
'"%(1)s" % {"1": "bar"}',
# don't trigger `SyntaxError: keyword argument repeated`
'"%(a)s" % {"a": 1, "a": 2}',
# don't rewrite string-joins in dict literal
'"%(ab)s" % {"a" "b": 1}',
# don't rewrite strangely styled things
'"%(a)s" % {"a" : 1}',
# don't rewrite non-str keys
'"%(1)s" % {1: 2, "1": 2}',
# don't rewrite keyword keys
'"%(and)s" % {"and": 2}',
),
)
def test_percent_format_noop(s):
assert _fix_percent_format(s) == s
def _has_16806_bug():
# See https://bugs.python.org/issue16806
return ast.parse('"""\n"""').body[0].value.col_offset == -1
@pytest.mark.xfail(not _has_16806_bug(), reason='multiline string parse bug')
def test_percent_format_noop_if_bug_16806():
s = '"""%s\n""" % ("issue16806",)'
assert _fix_percent_format(s) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
# tuple
('"trivial" % ()', '"trivial".format()'),
('"%s" % ("simple",)', '"{}".format("simple")'),
('"%s" % ("%s" % ("nested",),)', '"{}".format("{}".format("nested"))'),
('"%s%% percent" % (15,)', '"{}% percent".format(15)'),
('"%3f" % (15,)', '"{:3f}".format(15)'),
('"%-5s" % ("hi",)', '"{:<5}".format("hi")'),
('"%9s" % (5,)', '"{:>9}".format(5)'),
('"brace {} %s" % (1,)', '"brace {{}} {}".format(1)'),
(
'"%s" % (\n'
' "trailing comma",\n'
')\n',
'"{}".format(\n'
' "trailing comma",\n'
')\n',
),
# dict
('"%(k)s" % {"k": "v"}', '"{k}".format(k="v")'),
('"%(to_list)s" % {"to_list": []}', '"{to_list}".format(to_list=[])'),
),
)
def test_percent_format(s, expected):
assert _fix_percent_format(s) == expected
@pytest.mark.xfail
@pytest.mark.parametrize(
('s', 'expected'),
(
# currently the approach does not attempt to consider joined strings
(
'paren_continue = (\n'
' "foo %s "\n'
' "bar %s" % (x, y)\n'
')\n',
'paren_continue = (\n'
' "foo {} "\n'
' "bar {}".format(x, y)\n'
')\n',
),
(
'paren_string = (\n'
' "foo %s "\n'
' "bar %s"\n'
') % (x, y)\n',
'paren_string = (\n'
' "foo {} "\n'
' "bar {}"\n'
').format(x, y)\n',
),
(
'paren_continue = (\n'
' "foo %(foo)s "\n'
' "bar %(bar)s" % {"foo": x, "bar": y}\n'
')\n',
'paren_continue = (\n'
' "foo {foo} "\n'
' "bar {bar}".format(foo=x, bar=y)\n'
')\n',
),
(
'paren_string = (\n'
' "foo %(foo)s "\n'
' "bar %(bar)s"\n'
') % {"foo": x, "bar": y}\n',
'paren_string = (\n'
' "foo {foo} "\n'
' "bar {bar}"\n'
').format(foo=x, bar=y)\n',
),
),
)
def test_percent_format_todo(s, expected):
assert _fix_percent_format(s) == expected
@pytest.mark.parametrize(
's',
(
# syntax error
'x(',
'class C(Base):\n'
' def f(self):\n'
' super().f()\n',
# super class doesn't match class name
'class C(Base):\n'
' def f(self):\n'
' super(Base, self).f()\n',
# super outside of a class (technically legal!)
'def f(self):\n'
' super(C, self).f()\n',
# super used in a comprehension
'class C(Base):\n'
' def f(self):\n'
' return [super(C, self).f() for _ in ()]\n',
'class C(Base):\n'
' def f(self):\n'
' return {super(C, self).f() for _ in ()}\n',
'class C(Base):\n'
' def f(self):\n'
' return (super(C, self).f() for _ in ())\n',
'class C(Base):\n'
' def f(self):\n'
' return {True: super(C, self).f() for _ in ()}\n',
# nested comprehension
'class C(Base):\n'
' def f(self):\n'
' return [\n'
' (\n'
' [_ for _ in ()],\n'
' super(C, self).f(),\n'
' )\n'
' for _ in ()'
' ]\n',
# super in a closure
'class C(Base):\n'
' def f(self):\n'
' def g():\n'
' super(C, self).f()\n'
' g()\n',
'class C(Base):\n'
' def f(self):\n'
' g = lambda: super(C, self).f()\n'
' g()\n',
),
)
def test_fix_super_noop(s):
assert _fix_super(s) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
(
'class C(Base):\n'
' def f(self):\n'
' super(C, self).f()\n',
'class C(Base):\n'
' def f(self):\n'
' super().f()\n',
),
(
'class C(Base):\n'
' def f(self):\n'
' super (C, self).f()\n',
'class C(Base):\n'
' def f(self):\n'
' super ().f()\n',
),
(
'class Outer(object):\n'
' class C(Base):\n'
' def f(self):\n'
' super (C, self).f()\n',
'class Outer(object):\n'
' class C(Base):\n'
' def f(self):\n'
' super ().f()\n',
),
(
'class C(Base):\n'
' f = lambda self: super(C, self).f()\n',
'class C(Base):\n'
' f = lambda self: super().f()\n'
),
(
'class C(Base):\n'
' @classmethod\n'
' def f(cls):\n'
' super(C, cls).f()\n',
'class C(Base):\n'
' @classmethod\n'
' def f(cls):\n'
' super().f()\n',
),
),
)
def test_fix_super(s, expected):
assert _fix_super(s) == expected
@pytest.mark.parametrize(
's',
(
# syntax error
'x = (',
# does not inherit from `object`
'class C(B): pass',
),
)
def test_fix_new_style_classes_noop(s):
assert _fix_new_style_classes(s) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
(
'class C(object): pass',
'class C: pass',
),
(
'class C(\n'
' object,\n'
'): pass',
'class C: pass',
),
(
'class C(B, object): pass',
'class C(B): pass',
),
(
'class C(B, (object)): pass',
'class C(B): pass',
),
(
'class C(B, ( object )): pass',
'class C(B): pass',
),
(
'class C((object)): pass',
'class C: pass',
),
(
'class C(\n'
' B,\n'
' object,\n'
'): pass\n',
'class C(\n'
' B,\n'
'): pass\n',
),
(
'class C(\n'
' B,\n'
' object\n'
'): pass\n',
'class C(\n'
' B\n'
'): pass\n',
),
# only legal in python2
(
'class C(object, B): pass',
'class C(B): pass',
),
(
'class C((object), B): pass',
'class C(B): pass',
),
(
'class C(( object ), B): pass',
'class C(B): pass',
),
(
'class C(\n'
' object,\n'
' B,\n'
'): pass',
'class C(\n'
' B,\n'
'): pass',
),
),
)
def test_fix_new_style_classes(s, expected):
assert _fix_new_style_classes(s) == expected
@pytest.mark.parametrize(
's',
(
# syntax error
'x = (',
# weird attributes
'isinstance(s, six . string_types)',
# weird space at beginning of decorator
'@ six.python_2_unicode_compatible\n'
'class C: pass',
# unrelated
'from os import path',
'from six import moves',
# unrelated decorator
'@mydec\n'
'class C: pass',
# renaming things for weird reasons
'from six import StringIO as text_type\n'
'isinstance(s, text_type)\n',
# weird spaces at begining of calls
'six.u ("bar")',
'from six import u\nu ("bar")',
'six.raise_from (exc, exc_from)',
'from six import raise_from\nraise_from (exc, exc_from)',
# don't rewrite things that would become `raise` in non-statements
'print(six.raise_from(exc, exc_from))',
)
)
def test_fix_six_noop(s):
assert _fix_six(s) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
(
'isinstance(s, six.text_type)',
'isinstance(s, str)',
),
(
'isinstance(s, six.string_types)',
'isinstance(s, str)',
),
(
'issubclass(tp, six.string_types)',
'issubclass(tp, str)',
),
(
'STRING_TYPES = six.string_types',
'STRING_TYPES = (str,)',
),
(
'from six import string_types\n'
'isinstance(s, string_types)\n',
'from six import string_types\n'
'isinstance(s, str)\n',
),
(
'from six import string_types\n'
'STRING_TYPES = string_types\n',
'from six import string_types\n'
'STRING_TYPES = (str,)\n',
),
(
'@six.python_2_unicode_compatible\n'
'class C: pass',
'class C: pass',
),
(
'@six.python_2_unicode_compatible\n'
'@other_decorator\n'
'class C: pass',
'@other_decorator\n'
'class C: pass',
),
(
'from six import python_2_unicode_compatible\n'
'@python_2_unicode_compatible\n'
'class C: pass',
'from six import python_2_unicode_compatible\n'
'class C: pass',
),
(
'six.get_unbound_method(meth)\n',
'meth\n',
),
(
'from six import get_unbound_method\n'
'get_unbound_method(meth)\n',
'from six import get_unbound_method\n'
'meth\n',
),
(
'six.indexbytes(bs, i)\n',
'bs[i]\n',
),
(
'six.assertCountEqual(\n'
' self,\n'
' arg1,\n'
' arg2,\n'
')',
'self.assertCountEqual(\n'
' arg1,\n'
' arg2,\n'
')',
),
(
'six.assertCountEqual(\n'
' self,\\\n'
' arg1,\n'
' arg2,\n'
')',
'self.assertCountEqual(\\\n'
' arg1,\n'
' arg2,\n'
')',
),
(
'six.assertCountEqual(\n'
' self, # hello\n'
' arg1,\n'
' arg2,\n'
')',
'self.assertCountEqual(\n'
' arg1,\n'
' arg2,\n'
')',
),
(
'six.assertCountEqual(\n'
' self,\n'
' arg1,\n'
' (1, 2, 3),\n'
')',
'self.assertCountEqual(\n'
' arg1,\n'
' (1, 2, 3),\n'
')',
),
(
'six.raise_from(exc, exc_from)\n',
'raise exc from exc_from\n',
),
(
'six.reraise(tp, exc, tb)\n',
'raise exc.with_traceback(tb)\n',
),
(
'from six import raise_from\n'
'raise_from(exc, exc_from)\n',
'from six import raise_from\n'
'raise exc from exc_from\n',
),
(
'six.reraise(\n'
' tp,\n'
' exc,\n'
' tb,\n'
')\n',
'raise exc.with_traceback(tb)\n',
),
),
)
def test_fix_six(s, expected):
assert _fix_six(s) == expected
@pytest.mark.xfail(sys.version_info < (3,), reason='py3+ metaclass')
@pytest.mark.parametrize(
('s', 'expected'),
(
(
'class C(object, metaclass=ABCMeta): pass',
'class C(metaclass=ABCMeta): pass',
),
),
)
def test_fix_new_style_classes_py3only(s, expected):
assert _fix_new_style_classes(s) == expected
@pytest.mark.parametrize(
's',
(
# syntax error
'(',
# weird syntax
'"{}" . format(x)',
# spans multiple lines
'"{}".format(\n a,\n)',
# starargs
'"{} {}".format(*a)', '"{foo} {bar}".format(**b)"',
# likely makes the format longer
'"{0} {0}".format(arg)', '"{x} {x}".format(arg)',
'"{x.y} {x.z}".format(arg)',
# bytestrings don't participate in `.format()` or `f''`
# but are legal in python 2
'b"{} {}".format(a, b)',
# for now, too difficult to rewrite correctly
'"{:{}}".format(x, y)',
),
)
def test_fix_fstrings_noop(s):
assert _fix_fstrings(s) == s
@pytest.mark.parametrize(
('s', 'expected'),
(
('"{} {}".format(a, b)', 'f"{a} {b}"'),
('"{1} {0}".format(a, b)', 'f"{b} {a}"'),
('"{x.y}".format(x=z)', 'f"{z.y}"'),
('"{.x} {.y}".format(a, b)', 'f"{a.x} {b.y}"'),
('"{} {}".format(a.b, c.d)', 'f"{a.b} {c.d}"'),
('"hello {}!".format(name)', 'f"hello {name}!"'),
('"{}{{}}{}".format(escaped, y)', 'f"{escaped}{{}}{y}"'),
# TODO: poor man's f-strings?
# '"{foo}".format(**locals())'
),
)
def test_fix_fstrings(s, expected):
assert _fix_fstrings(s) == expected
def test_main_trivial():
assert main(()) == 0
def test_main_noop(tmpdir):
f = tmpdir.join('f.py')
f.write('x = 5\n')
assert main((f.strpath,)) == 0
assert f.read() == 'x = 5\n'
def test_main_changes_a_file(tmpdir, capsys):
f = tmpdir.join('f.py')
f.write('x = set((1, 2, 3))\n')
assert main((f.strpath,)) == 1
out, _ = capsys.readouterr()
assert out == 'Rewriting {}\n'.format(f.strpath)
assert f.read() == 'x = {1, 2, 3}\n'
def test_main_keeps_line_endings(tmpdir, capsys):
f = tmpdir.join('f.py')
f.write_binary(b'x = set((1, 2, 3))\r\n')
assert main((f.strpath,)) == 1
assert f.read_binary() == b'x = {1, 2, 3}\r\n'
def test_main_syntax_error(tmpdir):
f = tmpdir.join('f.py')
f.write('from __future__ import print_function\nprint 1\n')
assert main((f.strpath,)) == 0
def test_main_non_utf8_bytes(tmpdir, capsys):
f = tmpdir.join('f.py')
f.write_binary('# -*- coding: cp1252 -*-\nx = €\n'.encode('cp1252'))
assert main((f.strpath,)) == 1
out, _ = capsys.readouterr()
assert out == '{} is non-utf-8 (not supported)\n'.format(f.strpath)
def test_keep_percent_format(tmpdir):
f = tmpdir.join('f.py')
f.write('"%s" % (1,)')
assert main((f.strpath, '--keep-percent-format')) == 0
assert f.read() == '"%s" % (1,)'
assert main((f.strpath,)) == 1
assert f.read() == '"{}".format(1)'
def test_py3_plus_argument_unicode_literals(tmpdir):
f = tmpdir.join('f.py')
f.write('u""')
assert main((f.strpath,)) == 0
assert f.read() == 'u""'
assert main((f.strpath, '--py3-plus')) == 1
assert f.read() == '""'
def test_py3_plus_super(tmpdir):
f = tmpdir.join('f.py')
f.write(
'class C(Base):\n'
' def f(self):\n'
' super(C, self).f()\n',
)
assert main((f.strpath,)) == 0
assert f.read() == (
'class C(Base):\n'
' def f(self):\n'
' super(C, self).f()\n'
)
assert main((f.strpath, '--py3-plus')) == 1
assert f.read() == (
'class C(Base):\n'
' def f(self):\n'
' super().f()\n'
)
def test_py3_plus_new_style_classes(tmpdir):
f = tmpdir.join('f.py')
f.write('class C(object): pass\n')
assert main((f.strpath,)) == 0
assert f.read() == 'class C(object): pass\n'
assert main((f.strpath, '--py3-plus')) == 1
assert f.read() == 'class C: pass\n'
def test_py36_plus_fstrings(tmpdir):
f = tmpdir.join('f.py')
f.write('"{} {}".format(hello, world)')
assert main((f.strpath,)) == 0
assert f.read() == '"{} {}".format(hello, world)'
assert main((f.strpath, '--py36-plus')) == 1
assert f.read() == 'f"{hello} {world}"'
| 27.982787
| 79
| 0.42365
| 0
| 0
| 0
| 0
| 30,325
| 0.888228
| 0
| 0
| 16,550
| 0.484754
|
1839ffd1101b5584269c5f29639d17cc7d6a6e7c
| 194
|
py
|
Python
|
Preprocessing/preprocessing.py
|
nadineazhalia/CSH4H3-TEXT-MINING
|
77b2ffb862314d664f575757a40038cc69f86c60
|
[
"Apache-2.0"
] | null | null | null |
Preprocessing/preprocessing.py
|
nadineazhalia/CSH4H3-TEXT-MINING
|
77b2ffb862314d664f575757a40038cc69f86c60
|
[
"Apache-2.0"
] | null | null | null |
Preprocessing/preprocessing.py
|
nadineazhalia/CSH4H3-TEXT-MINING
|
77b2ffb862314d664f575757a40038cc69f86c60
|
[
"Apache-2.0"
] | null | null | null |
file_berita = open("berita.txt", "r")
berita = file_berita.read()
berita = berita.split()
berita = [x.lower() for x in berita]
berita = list(set(berita))
berita = sorted(berita)
print (berita)
| 21.555556
| 37
| 0.695876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 15
| 0.07732
|
183a36737605defc576589d45932fdf08d365a08
| 2,139
|
py
|
Python
|
demo_scripts/charts/bar_chart_index_translator_demo.py
|
webclinic017/qf-lib
|
96463876719bba8a76c8269cef76addf3a2d836d
|
[
"Apache-2.0"
] | 198
|
2019-08-16T15:09:23.000Z
|
2022-03-30T12:44:00.000Z
|
demo_scripts/charts/bar_chart_index_translator_demo.py
|
webclinic017/qf-lib
|
96463876719bba8a76c8269cef76addf3a2d836d
|
[
"Apache-2.0"
] | 13
|
2021-01-07T10:15:19.000Z
|
2022-03-29T13:01:47.000Z
|
demo_scripts/charts/bar_chart_index_translator_demo.py
|
webclinic017/qf-lib
|
96463876719bba8a76c8269cef76addf3a2d836d
|
[
"Apache-2.0"
] | 29
|
2019-08-16T15:21:28.000Z
|
2022-02-23T09:53:49.000Z
|
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import matplotlib.pyplot as plt
import pandas as pd
from qf_lib.common.enums.orientation import Orientation
from qf_lib.plotting.charts.bar_chart import BarChart
from qf_lib.plotting.decorators.data_element_decorator import DataElementDecorator
from qf_lib.plotting.helpers.index_translator import IndexTranslator
index = ['constant', 'b', 'c', 'd']
# index = [0, 4, 5, 6]
labels_to_locations_dict = {
'constant': 0,
'b': 4,
'c': 5,
'd': 6
}
colors = ['orange'] + ['forestgreen'] * 3
def main():
# using automatic mapping between labels and locations
bar_chart2 = BarChart(orientation=Orientation.Horizontal, index_translator=IndexTranslator(),
thickness=1.0, color=colors, align='center')
bar_chart2.add_decorator(DataElementDecorator(pd.Series(data=[1, 2, 3, 4], index=index)))
bar_chart2.add_decorator(DataElementDecorator(pd.Series(data=[3, 1, 2, 4], index=index)))
bar_chart2.plot()
# using custom mapping between labels and locations
bar_chart = BarChart(orientation=Orientation.Horizontal, index_translator=IndexTranslator(labels_to_locations_dict),
thickness=1.0, color=colors, align='center')
bar_chart.add_decorator(DataElementDecorator(pd.Series(data=[1, 2, 3, 4], index=index)))
bar_chart.add_decorator(DataElementDecorator(pd.Series(data=[3, 1, 2, 4], index=index)))
bar_chart.plot()
plt.show(block=True)
if __name__ == '__main__':
main()
| 38.196429
| 120
| 0.71482
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 862
| 0.402616
|
183c49112552415248f084e0c358b6ea11192708
| 2,771
|
py
|
Python
|
tests/request/test_parameter_invalid.py
|
Colin-b/pyxelrest
|
5c8db40d1537d0f9c29acd928ec9519b6bb557ec
|
[
"MIT"
] | 7
|
2018-12-07T10:08:53.000Z
|
2021-03-24T07:52:36.000Z
|
tests/request/test_parameter_invalid.py
|
Colin-b/pyxelrest
|
5c8db40d1537d0f9c29acd928ec9519b6bb557ec
|
[
"MIT"
] | 76
|
2018-12-07T10:29:48.000Z
|
2021-11-17T00:54:24.000Z
|
tests/request/test_parameter_invalid.py
|
Colin-b/pyxelrest
|
5c8db40d1537d0f9c29acd928ec9519b6bb557ec
|
[
"MIT"
] | null | null | null |
import pytest
from responses import RequestsMock
from tests import loader
def test_parameter_cannot_be_parsed(responses: RequestsMock, tmpdir):
responses.add(
responses.GET,
url="http://test/",
json={
"swagger": "2.0",
"paths": {
"/test": {
"get": {
"parameters": [
{
"in": "query",
"name": "param",
"schema": {},
}
],
"responses": {
"200": {
"description": "return value",
"schema": {"type": "string"},
}
},
}
}
},
},
match_querystring=True,
)
with pytest.raises(Exception) as exception_info:
loader.load(
tmpdir,
{
"invalid": {
"open_api": {"definition": "http://test/"},
"formulas": {"dynamic_array": {"lock_excel": True}},
}
},
)
assert (
str(exception_info.value)
== "Unable to extract parameters from {'in': 'query', 'name': 'param', 'schema': {}, 'server_param_name': 'param'}"
)
def test_parameter_with_more_than_one_field_type(responses: RequestsMock, tmpdir):
responses.add(
responses.GET,
url="http://test/",
json={
"swagger": "2.0",
"paths": {
"/test": {
"get": {
"parameters": [
{
"in": "query",
"name": "param",
"type": ["string", "integer"],
}
],
"responses": {
"200": {
"description": "return value",
}
},
}
}
},
},
match_querystring=True,
)
with pytest.raises(Exception) as exception_info:
loader.load(
tmpdir,
{
"invalid": {
"open_api": {"definition": "http://test/"},
"formulas": {"dynamic_array": {"lock_excel": True}},
}
},
)
assert (
str(exception_info.value)
== "Unable to guess field type amongst ['string', 'integer']"
)
| 30.119565
| 123
| 0.339589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 639
| 0.230603
|
183cd22d8adcd570cdd6c5eceb4ba00ee9152282
| 61
|
py
|
Python
|
src/yookassa_payout/domain/response/__init__.py
|
yoomoney/yookassa-payout-sdk-python
|
f6953e97573bb4a4ee6f830f726a6fcfdf504e2a
|
[
"MIT"
] | 5
|
2021-03-11T14:38:25.000Z
|
2021-08-13T10:41:50.000Z
|
src/yookassa_payout/domain/common/__init__.py
|
yoomoney/yookassa-payout-sdk-python
|
f6953e97573bb4a4ee6f830f726a6fcfdf504e2a
|
[
"MIT"
] | 2
|
2021-02-15T18:18:34.000Z
|
2021-08-13T13:49:46.000Z
|
src/yookassa_payout/domain/request/__init__.py
|
yoomoney/yookassa-payout-sdk-python
|
f6953e97573bb4a4ee6f830f726a6fcfdf504e2a
|
[
"MIT"
] | 1
|
2022-01-29T08:47:02.000Z
|
2022-01-29T08:47:02.000Z
|
"""Package for YooKassa Payout API Python Client Library."""
| 30.5
| 60
| 0.754098
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 60
| 0.983607
|
183d4dac8cfc4c8ac345fb08043e4248c6a0257b
| 467
|
py
|
Python
|
tests/integration/test_entry_point.py
|
jacksmith15/delfino
|
38972e0e0e610c2694462306250a51537a04b1e9
|
[
"MIT"
] | null | null | null |
tests/integration/test_entry_point.py
|
jacksmith15/delfino
|
38972e0e0e610c2694462306250a51537a04b1e9
|
[
"MIT"
] | null | null | null |
tests/integration/test_entry_point.py
|
jacksmith15/delfino
|
38972e0e0e610c2694462306250a51537a04b1e9
|
[
"MIT"
] | null | null | null |
import toml
from delfino.constants import ENTRY_POINT, PYPROJECT_TOML_FILENAME
from delfino.models.pyproject_toml import PyprojectToml
from tests.constants import PROJECT_ROOT
class TestEntrypointConstant:
@staticmethod
def should_match_entrypoint_in_pyproject_toml():
file_path = PROJECT_ROOT / PYPROJECT_TOML_FILENAME
pyproject_toml = PyprojectToml(**toml.load(file_path))
assert ENTRY_POINT in pyproject_toml.tool.poetry.scripts
| 33.357143
| 66
| 0.807281
| 287
| 0.614561
| 0
| 0
| 253
| 0.541756
| 0
| 0
| 0
| 0
|
183ecccecd1a87d9ecdaf239b0b8acab5f9e8ed2
| 6,888
|
py
|
Python
|
gamble/gamble.py
|
lookma/simple-coin-gamble
|
8f1684e62b62f28a176458606ed193c812d97bc7
|
[
"MIT"
] | null | null | null |
gamble/gamble.py
|
lookma/simple-coin-gamble
|
8f1684e62b62f28a176458606ed193c812d97bc7
|
[
"MIT"
] | null | null | null |
gamble/gamble.py
|
lookma/simple-coin-gamble
|
8f1684e62b62f28a176458606ed193c812d97bc7
|
[
"MIT"
] | null | null | null |
from random import randint
from typing import Callable, List, Optional
class Coin:
"""Simulates a coin."""
def __init__(self) -> None:
self.__head = False
self.__toss_count = 0
self.__head_count = 0
def toss(self) -> None:
"""Toss a coin."""
r = randint(1, 2)
self.__head = True if r == 1 else False
self.__toss_count += 1
if self.__head:
self.__head_count += 1
def get_head_percentage(self) -> float:
"""Returns the percentages of heads relative to the total umber of coin tosses."""
return self.__head_count * 100 / self.__toss_count
def is_head(self) -> bool:
"""Check if the coins shows heads."""
return self.__head
def get_head_count(self) -> int:
"""Return the number of tossed heads."""
return self.__head_count
def get_toss_count(self) -> int:
"""Return the number of tosses."""
return self.__toss_count
class Player:
def __init__(self, name: str, bet_amount: float) -> None:
self.__name = name
self.__amounts = [bet_amount]
@property
def name(self) -> str:
"""Name of the player."""
return self.__name
@property
def is_winner(self) -> bool:
"""
Check if the player is a winner.
If the current amount of a player is greater or equal to the initial amount it is a winner.
"""
return self.__amounts[-1] >= self.__amounts[0]
@property
def is_total_loss(self) -> bool:
"""
Check if player lost everything.
It is assumed that a total lost occurs if amount drops below 1% of the initial bet.
"""
return self.amount < self.amounts[0] / 100
@property
def amount(self) -> float:
"""
Current amount of the player.
"""
return self.__amounts[-1]
@property
def amounts(self) -> List[float]:
"""
The amounts for all rounds of the player.
The initial amount (bet) is stored at index 0.
"""
return self.__amounts
def add_new_amount(self, amount: float) -> None:
self.__amounts.append(amount)
class RoundResults:
def __init__(self, players: List[Player]) -> None:
self.__total_amounts: List[float] = []
self.__number_of_winners: List[int] = []
self.__number_of_losers: List[int] = []
self.__number_of_total_losses: List[int] = []
self.__winner_percentages: List[float] = []
self.__min_amounts: List[float] = []
self.__max_amounts: List[float] = []
self.__avg_amounts: List[float] = []
self.add_round(players)
def add_round(self, players: List[Player]) -> None:
total_amount = 0
number_of_winners = 0
number_of_total_losses = 0
min_amount = players[0].amount
max_amount = 0
for player in players:
total_amount += player.amount
min_amount = min(player.amount, min_amount)
max_amount = max(player.amount, max_amount)
if player.is_winner:
number_of_winners += 1
if player.is_total_loss:
number_of_total_losses += 1
winner_percentage = number_of_winners * 100 / len(players)
self.__total_amounts.append(total_amount)
self.__number_of_winners.append(number_of_winners)
self.__number_of_losers.append(len(players) - number_of_winners)
self.__number_of_total_losses.append(number_of_total_losses)
self.__winner_percentages.append(winner_percentage)
self.__min_amounts.append(min_amount)
self.__max_amounts.append(max_amount)
self.__avg_amounts.append(total_amount / len(players))
@property
def number_of_rounds(self) -> int:
return len(self.__total_amounts) - 1
@property
def total_amounts(self) -> List[float]:
return self.__total_amounts
@property
def avg_amounts(self) -> List[float]:
return self.__avg_amounts
@property
def number_of_winners(self) -> List[int]:
return self.__number_of_winners
@property
def number_of_losers(self) -> List[int]:
return self.__number_of_losers
@property
def number_of_total_losses(self) -> List[int]:
return self.__number_of_total_losses
@property
def winner_percentages(self) -> List[float]:
return self.__winner_percentages
@property
def min_amounts(self) -> List[float]:
return self.__min_amounts
@property
def max_amounts(self) -> List[float]:
return self.__max_amounts
class Gamble:
def __init__(
self,
name: str,
number_of_players: int,
number_of_rounds: int,
bet_amount: float,
gain_percentage: int,
loss_percentage: int,
) -> None:
assert number_of_players > 0
assert number_of_rounds > 0
assert bet_amount > 0
assert gain_percentage >= 0
assert loss_percentage >= 0 and loss_percentage <= 100
self.__coin = Coin()
self.__name: str = name
self.__gain_factor: float = 1.0 + gain_percentage / 100.0
self.__loss_factor: float = 1.0 - loss_percentage / 100.0
self.__number_of_rounds: int = number_of_rounds
self.__progress_callback: Optional[Callable[[str, int, int], None]] = None
self.__players = []
for i in range(1, number_of_players + 1):
self.__players.append(Player(name="p" + str(i), bet_amount=bet_amount))
self.__round_results = RoundResults(self.__players)
def set_progress_callback(self, callback: Callable[[str, int, int], None]) -> None:
self.__progress_callback = callback
@property
def name(self) -> str:
return self.__name
def _apply_rule(self, amount: float) -> float:
self.__coin.toss()
amount = (
amount * self.__gain_factor
if self.__coin.is_head()
else amount * self.__loss_factor
)
return round(amount, 2)
def _play_round(self, round_index: int) -> None:
for player in self.__players:
player.add_new_amount(self._apply_rule(player.amount))
return
def play(self) -> None:
for index in range(1, self.__number_of_rounds + 1):
self._play_round(index)
self.__round_results.add_round(self.__players)
if self.__progress_callback:
self.__progress_callback(self.name, index, self.__number_of_rounds)
return
@property
def results(self) -> RoundResults:
return self.__round_results
@property
def players(self) -> List[Player]:
return self.__players
@property
def max_amount(self) -> float:
return max(self.results.max_amounts)
| 30.477876
| 99
| 0.620499
| 6,806
| 0.988095
| 0
| 0
| 2,100
| 0.304878
| 0
| 0
| 741
| 0.107578
|
18412368254bcf43c33a2c706aa24bebe16b5a08
| 16
|
py
|
Python
|
roomai/games/__init__.py
|
tonyxxq/RoomAI
|
5f28e31e659dd7808127c3c3cc386e6892a93982
|
[
"MIT"
] | 1
|
2018-11-29T01:57:18.000Z
|
2018-11-29T01:57:18.000Z
|
roomai/models/texasholdem/__init__.py
|
tonyxxq/RoomAI
|
5f28e31e659dd7808127c3c3cc386e6892a93982
|
[
"MIT"
] | null | null | null |
roomai/models/texasholdem/__init__.py
|
tonyxxq/RoomAI
|
5f28e31e659dd7808127c3c3cc386e6892a93982
|
[
"MIT"
] | null | null | null |
#!/bin/python
| 4
| 13
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 13
| 0.8125
|
1842a50616fbef1cfe0cb3f52da633c9ff6caecd
| 1,285
|
py
|
Python
|
config.py
|
SevenMoGod/movenet.pytorch
|
95ec8535245228aa4335243e68722810e50bcaf8
|
[
"MIT"
] | 87
|
2021-11-13T11:05:55.000Z
|
2022-03-30T11:00:45.000Z
|
config.py
|
Dyian-snow/movenet.pytorch
|
95ec8535245228aa4335243e68722810e50bcaf8
|
[
"MIT"
] | 18
|
2021-11-16T01:13:19.000Z
|
2022-03-31T16:04:31.000Z
|
config.py
|
Dyian-snow/movenet.pytorch
|
95ec8535245228aa4335243e68722810e50bcaf8
|
[
"MIT"
] | 28
|
2021-11-13T11:22:05.000Z
|
2022-03-29T10:02:09.000Z
|
"""
@Fire
https://github.com/fire717
"""
cfg = {
##### Global Setting
'GPU_ID': '0',
"num_workers":8,
"random_seed":42,
"cfg_verbose":True,
"save_dir": "output/",
"num_classes": 17,
"width_mult":1.0,
"img_size": 192,
##### Train Setting
'img_path':"./data/croped/imgs",
'train_label_path':'./data/croped/train2017.json',
'val_label_path':'./data/croped/val2017.json',
'balance_data':False,
'log_interval':10,
'save_best_only': True,
'pin_memory': True,
##### Train Hyperparameters
'learning_rate':0.001,#1.25e-4
'batch_size':64,
'epochs':120,
'optimizer':'Adam', #Adam SGD
'scheduler':'MultiStepLR-70,100-0.1', #default SGDR-5-2 CVPR step-4-0.8 MultiStepLR
'weight_decay' : 5.e-4,#0.0001,
'class_weight': None,#[1., 1., 1., 1., 1., 1., 1., ]
'clip_gradient': 5,#1,
##### Test
'test_img_path':"./data/croped/imgs",
#"../data/eval/imgs",
#"../data/eval/imgs",
#"../data/all/imgs"
#"../data/true/mypc/crop_upper1"
#../data/coco/small_dataset/imgs
#"../data/testimg"
'exam_label_path':'../data/all/data_all_new.json',
'eval_img_path':'../data/eval/imgs',
'eval_label_path':'../data/eval/mypc.json',
}
| 21.416667
| 91
| 0.568872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 941
| 0.732296
|
18433079856714742d377305353f6075edaf8a57
| 11,038
|
py
|
Python
|
uart.py
|
WRansohoff/nmigen_uart_test
|
d520d3b72698a901f63e3485aadca620f1444350
|
[
"MIT"
] | null | null | null |
uart.py
|
WRansohoff/nmigen_uart_test
|
d520d3b72698a901f63e3485aadca620f1444350
|
[
"MIT"
] | null | null | null |
uart.py
|
WRansohoff/nmigen_uart_test
|
d520d3b72698a901f63e3485aadca620f1444350
|
[
"MIT"
] | null | null | null |
from nmigen import *
from nmigen.back.pysim import *
# Function to calculate a clock divider which creates the
# desired output frequency from a given input frequency.
# Verifies that the divider is a positive integer, and that
# the resulting signal doesn't deviate more than expected.
def clock_divider( inf, outf, max_err ):
# Calculate the divisor.
div = inf // outf
# Check that the rounded clock divider is a positive integer.
if div < 1:
print( "Error: Invalid input / output frequencies to " +
"clock divider (%d / %d)"
%( inf, outf ) )
raise ValueError
# Check that the error value does not exceed the given threshold.
err = ( ( inf / div ) - outf ) / outf
if err > max_err:
print( "Error: Clock divider error rate is too high " +
"(%1.2f > %1.2f)"
%( err, max_err ) )
raise ValueError
# Return the divisor.
return div
# Basic work-in-progress UART modules.
# - TX / RX only, no flow control or USART.
# - Samples during the middle of the clock period, no oversampling.
# - 8-bit words, 1 stop bit, no parity bit.
# - Receives bits LSB-first only.
# - Configurable baud rate.
# UART receiver.
class UART_RX( Elaboratable ):
def __init__( self, clk_freq, baud_rate ):
# Calculate the clock divider for the given frequency / baud rate.
self.clk_div = clock_divider( clk_freq, baud_rate, 0.05 )
# Signals with external connections.
self.rx = Signal()
# Internal signals.
self.rx_count = Signal( range( self.clk_div ) )
self.rx_buf = Signal( 8 )
self.rx_bit = Signal( 3 )
self.rx_ack = Signal()
self.rx_fix = Signal()
self.rx_strobe = Signal()
def elaborate( self, platform ):
m = Module()
# Increment the RX counter each clock edge.
with m.If( self.rx_count == self.clk_div - 1 ):
m.d.sync += self.rx_count.eq( 0 )
with m.Else():
m.d.sync += self.rx_count.eq( self.rx_count + 1 )
# Toggle the "rx_strobe" value whenever "rx_count" ticks over.
m.d.comb += self.rx_strobe.eq( self.rx_count == 0 )
# RX state machine.
with m.FSM() as fsm:
# RX state machine: "Idle" state.
# - Move to the "Start" state when the "RX" line goes low.
# - Set the "rx_count" value to half of the clock divider, so
# that bits are sampled in the middle of each clock period.
# - Reset the other peripheral signals.
with m.State( "RX_IDLE" ):
with m.If( ~self.rx ):
m.next = "RX_START"
m.d.sync += [
self.rx_ack.eq( 0 ),
self.rx_fix.eq( 0 ),
self.rx_bit.eq( 0 ),
self.rx_buf.eq( 0x00 ),
self.rx_count.eq( self.clk_div // 2 )
]
# RX state machine: "Start" state.
# - Wait half a cycle until the clock divider ticks over, then
# move to the "Data" state.
with m.State( "RX_START" ):
with m.If( self.rx_strobe ):
m.next = "RX_DATA"
# RX state machine: "Data" state.
# - Wait one cycle, then prepend the RX bit and move to the
# "Stop" state iff 8 bits have been received.
with m.State( "RX_DATA" ):
with m.If( self.rx_strobe ):
m.d.sync += [
self.rx_buf.eq( Cat( self.rx_buf[ 1 : 8 ], self.rx ) ),
self.rx_bit.eq( self.rx_bit + 1 )
]
with m.If( self.rx_bit == 7 ):
m.next = "RX_STOP"
# RX state machine: "Stop" state.
# - Wait one cycle, then move to the "Full" state if the
# "RX" line is asserted and to the "Error" state if not.
with m.State( "RX_STOP" ):
with m.If( self.rx_strobe ):
with m.If( self.rx ):
m.next = "RX_FULL"
with m.Else():
m.next = "RX_ERROR"
# RX state machine: "Full" state.
# - Wait for an external "rx_ack" signal to indicate that the
# value has been read, then move to the "Idle" state.
# - Move to the "Error" state if a start bit is detected before
# an "rx_ack" signal is received.
with m.State( "RX_FULL" ):
with m.If( self.rx_ack ):
m.next = "RX_IDLE"
with m.Elif( ~self.rx ):
m.next = "RX_ERROR"
# RX state machine: "Error" state.
# - Wait for an external "rx_fix" signal, then return to "Idle".
with m.State( "RX_ERROR" ):
with m.If( self.rx_fix ):
m.next = "RX_IDLE"
return m
# UART transmitter
class UART_TX( Elaboratable ):
def __init__( self, clk_freq, baud_rate ):
# Calculate the clock divider for the given frequency / baud rate.
self.clk_div = clock_divider( clk_freq, baud_rate, 0.05 )
# Signals with external connections.
self.tx = Signal()
# TX signals.
self.tx_count = Signal( range( self.clk_div ) )
self.tx_buf = Signal( 8 )
self.tx_bit = Signal( 3 )
self.tx_new = Signal()
self.tx_fix = Signal()
self.tx_strobe = Signal()
def elaborate( self, platform ):
m = Module()
# Increment the TX counter each clock edge.
with m.If( self.tx_count == self.clk_div - 1 ):
m.d.sync += self.tx_count.eq( 0 )
with m.Else():
m.d.sync += self.tx_count.eq( self.tx_count + 1 )
# Toggle the "tx_strobe" value whenever "tx_count" ticks over.
m.d.comb += self.tx_strobe.eq( self.tx_count == 0 )
# TX state machine.
with m.FSM() as fsm:
# TX state machine: "Idle" state.
# - Move to "Start" state when the "tx_new" signal goes high.
# - Set "tx_count" divider to 1, and reset the other signals.
# - Send a start bit by pulling "tx" low.
with m.State( "TX_IDLE" ):
with m.If( self.tx_new ):
m.next = "TX_START"
m.d.sync += [
self.tx_new.eq( 0 ),
self.tx_fix.eq( 0 ),
self.tx_bit.eq( 0 ),
self.tx_count.eq( 1 ),
self.tx.eq( 0 )
]
# TX state machine: "Start" state.
# - Wait a cycle until the clock divider ticks over, then
# move to the "Data" state.
# - Move to the "Error" state if an unexpected "tx_new" occurs.
with m.State( "TX_START" ):
with m.If( self.tx_strobe ):
m.next = "TX_DATA"
with m.Elif( self.tx_new ):
m.next = "TX_ERROR"
# TX state machine: "Data" state.
# - Set the "tx" value based on the current bit.
# - Wait a cycle, then increment the bit number.
# - Move to the "Stop" state iff 8 bits have been sent.
# - Move to the "Error" state if an unexpected "tx_new" occurs.
with m.State( "TX_DATA" ):
m.d.sync += self.tx.eq( self.tx_buf.bit_select( self.tx_bit, 1 ) )
with m.If( self.tx_strobe ):
m.d.sync += self.tx_bit.eq( self.tx_bit + 1 )
with m.If( self.tx_bit == 7 ):
m.next = "TX_STOP"
with m.Elif( self.tx_new ):
m.next = "TX_ERROR"
# TX state machine: "Stop" state.
# - Pull the "tx" line high, clear the "tx_buf" buffer,
# and then wait one cycle before moving.
# to the "Idle" state.
with m.State( "TX_STOP" ):
m.d.sync += self.tx.eq( 1 )
m.d.sync += self.tx_buf.eq( 0x00 )
with m.If( self.tx_strobe ):
m.next = "TX_IDLE"
# TX state machine: "Error" state.
with m.State( "TX_ERROR" ):
with m.If( self.tx_fix ):
m.next = "TX_IDLE"
m.d.sync += self.tx.eq( 1 )
return m
# Combined UART interface with both TX and RX modules.
class UART( Elaboratable ):
def __init__( self, uart_rx, uart_tx ):
self.uart_rx = uart_rx
self.uart_tx = uart_tx
def elaborate( self, platform ):
m = Module()
m.submodules.rx = self.uart_rx
m.submodules.tx = self.uart_tx
return m
#
# Simple UART testbench.
#
# Helper UART test method to simulate receiving a byte.
def uart_rx_byte( uart, val ):
# Simulate a "start bit".
yield uart.rx.eq( 0 )
# Wait one cycle.
for i in range( uart.clk_div ):
yield Tick()
# Simulate the byte with one cycle between each bit.
for i in range( 8 ):
if val & ( 1 << i ):
yield uart.rx.eq( 1 )
else:
yield uart.rx.eq( 0 )
for j in range( uart.clk_div ):
yield Tick()
# Simulate the "stop bit", and wait one cycle.
yield uart.rx.eq( 1 )
for i in range( uart.clk_div ):
yield Tick()
# Helper UART test method to simulate transmitting a buffered byte.
def uart_tx_byte( uart ):
# Send a "start bit".
yield uart.tx.eq( 0 )
# Wait one cycle.
for i in range( uart.clk_div ):
yield Tick()
# Send the byte with one cycle between each bit.
for i in range( 8 ):
yield uart.tx.eq( uart.tx_buf.bit_select( i, 1 ) )
for j in range( uart.clk_div ):
yield Tick()
# Send the "stop bit", and wait one cycle.
yield uart.tx.eq( 1 )
for i in range( uart.clk_div ):
yield Tick()
# UART 'receive' testbench.
def uart_rx_test( uart_rx ):
# Simulate receiving "0xAF".
yield from uart_rx_byte( uart_rx, 0xAF )
# Wait a couple of cycles, then send "rx_ack".
for i in range( uart_rx.clk_div * 2 ):
yield Tick()
yield uart_rx.rx_ack.eq( 1 )
# Simulate receiving "0x42".
yield from uart_rx_byte( uart_rx, 0x42 )
# Simulate receiving "0x24" (should cause an error)
yield from uart_rx_byte( uart_rx, 0x24 )
# Wait a cycle, then send the "fix" signal and re-send 0x24.
for i in range( uart_rx.clk_div ):
yield Tick()
yield uart_rx.rx_fix.eq( 1 )
yield from uart_rx_byte( uart_rx, 0x24 )
# Send "rx_ack" and wait a cycle to see the end state.
yield uart_rx.rx_ack.eq( 1 )
for i in range( uart_rx.clk_div ):
yield Tick()
# UART 'transmit' testbench.
def uart_tx_test( uart_tx ):
# Set the "tx_buf" value to 0xAF, and set the "tx_new" signal.
yield uart_tx.tx_buf.eq( 0xAF )
yield uart_tx.tx_new.eq( 1 )
# Simulate transmitting the byte.
yield from uart_tx_byte( uart_tx )
# Wait a couple of cycles.
for i in range( uart_tx.clk_div * 2 ):
yield Tick()
# Setup the TX module to send 0x42.
yield uart_tx.tx_buf.eq( 0x42 )
yield uart_tx.tx_new.eq( 1 )
# Simulate sending the byte.
yield from uart_tx_byte( uart_tx )
# Wait a cycle to see the end state.
for i in range( uart_tx.clk_div ):
yield Tick()
# Create a UART module and run tests on it.
# (The baud rate is set to a high value to speed up the simulation.)
if __name__ == "__main__":
#uart_rx = UART_RX( 24000000, 9600 )
#uart_tx = UART_TX( 24000000, 9600 )
uart_rx = UART_RX( 24000000, 1000000 )
uart_tx = UART_TX( 24000000, 1000000 )
uart = UART( uart_rx, uart_tx )
# Run the UART tests.
with Simulator( uart, vcd_file = open( 'test.vcd', 'w' ) ) as sim:
def proc_rx():
yield from uart_rx_test( uart.uart_rx )
def proc_tx():
yield from uart_tx_test( uart.uart_tx )
# Run the UART test with a 24MHz clock.
sim.add_clock( 24e-6 )
sim.add_sync_process( proc_rx )
sim.add_sync_process( proc_tx )
sim.run()
| 34.820189
| 74
| 0.603642
| 6,562
| 0.594492
| 2,395
| 0.216978
| 0
| 0
| 0
| 0
| 4,661
| 0.422269
|
184359b6c6261d67915a09440ec8b6d1a0cc0927
| 5,853
|
py
|
Python
|
edk2basetools/FMMT/core/GuidTools.py
|
YuweiChen1110/edk2-basetools
|
cfd05c928492b7ffd1329634cfcb089db995eeca
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
edk2basetools/FMMT/core/GuidTools.py
|
YuweiChen1110/edk2-basetools
|
cfd05c928492b7ffd1329634cfcb089db995eeca
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
edk2basetools/FMMT/core/GuidTools.py
|
YuweiChen1110/edk2-basetools
|
cfd05c928492b7ffd1329634cfcb089db995eeca
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
## @file
# This file is used to define the FMMT dependent external tool management class.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import glob
import logging
import os
import shutil
import sys
import tempfile
import uuid
from edk2basetools.FMMT.PI.Common import *
from edk2basetools.FMMT.utils.FmmtLogger import FmmtLogger as logger
import subprocess
def ExecuteCommand(cmd: list) -> None:
subprocess.run(cmd,stdout=subprocess.DEVNULL)
class GUIDTool:
def __init__(self, guid: str, short_name: str, command: str) -> None:
self.guid: str = guid
self.short_name: str = short_name
self.command: str = command
def pack(self, buffer: bytes) -> bytes:
"""
compress file.
"""
tool = self.command
if tool:
tmp = tempfile.mkdtemp(dir=os.environ.get('tmp'))
ToolInputFile = os.path.join(tmp, "pack_uncompress_sec_file")
ToolOuputFile = os.path.join(tmp, "pack_sec_file")
try:
file = open(ToolInputFile, "wb")
file.write(buffer)
file.close()
command = [tool, '-e', '-o', ToolOuputFile,
ToolInputFile]
ExecuteCommand(command)
buf = open(ToolOuputFile, "rb")
res_buffer = buf.read()
except Exception as msg:
logger.error(msg)
return ""
else:
buf.close()
if os.path.exists(tmp):
shutil.rmtree(tmp)
return res_buffer
else:
logger.error(
"Error parsing section: EFI_SECTION_GUID_DEFINED cannot be parsed at this time.")
logger.info("Its GUID is: %s" % self.guid)
return ""
def unpack(self, buffer: bytes) -> bytes:
"""
buffer: remove common header
uncompress file
"""
tool = self.command
if tool:
tmp = tempfile.mkdtemp(dir=os.environ.get('tmp'))
ToolInputFile = os.path.join(tmp, "unpack_sec_file")
ToolOuputFile = os.path.join(tmp, "unpack_uncompress_sec_file")
try:
file = open(ToolInputFile, "wb")
file.write(buffer)
file.close()
command = [tool, '-d', '-o', ToolOuputFile, ToolInputFile]
ExecuteCommand(command)
buf = open(ToolOuputFile, "rb")
res_buffer = buf.read()
except Exception as msg:
logger.error(msg)
return ""
else:
buf.close()
if os.path.exists(tmp):
shutil.rmtree(tmp)
return res_buffer
else:
logger.error("Error parsing section: EFI_SECTION_GUID_DEFINED cannot be parsed at this time.")
logger.info("Its GUID is: %s" % self.guid)
return ""
class GUIDTools:
'''
GUIDTools is responsible for reading FMMTConfig.ini, verify the tools and provide interfaces to access those tools.
'''
default_tools = {
struct2stream(ModifyGuidFormat("a31280ad-481e-41b6-95e8-127f4c984779")): GUIDTool("a31280ad-481e-41b6-95e8-127f4c984779", "TIANO", "TianoCompress"),
struct2stream(ModifyGuidFormat("ee4e5898-3914-4259-9d6e-dc7bd79403cf")): GUIDTool("ee4e5898-3914-4259-9d6e-dc7bd79403cf", "LZMA", "LzmaCompress"),
struct2stream(ModifyGuidFormat("fc1bcdb0-7d31-49aa-936a-a4600d9dd083")): GUIDTool("fc1bcdb0-7d31-49aa-936a-a4600d9dd083", "CRC32", "GenCrc32"),
struct2stream(ModifyGuidFormat("d42ae6bd-1352-4bfb-909a-ca72a6eae889")): GUIDTool("d42ae6bd-1352-4bfb-909a-ca72a6eae889", "LZMAF86", "LzmaF86Compress"),
struct2stream(ModifyGuidFormat("3d532050-5cda-4fd0-879e-0f7f630d5afb")): GUIDTool("3d532050-5cda-4fd0-879e-0f7f630d5afb", "BROTLI", "BrotliCompress"),
}
def __init__(self, tooldef_file: str=None) -> None:
self.dir = os.path.dirname(__file__)
self.tooldef_file = tooldef_file if tooldef_file else os.path.join(
self.dir, "FMMTConfig.ini")
self.tooldef = dict()
self.load()
def VerifyTools(self) -> None:
"""
Verify Tools and Update Tools path.
"""
path_env = os.environ.get("PATH")
path_env_list = path_env.split(os.pathsep)
path_env_list.append(os.path.dirname(__file__))
path_env_list = list(set(path_env_list))
for tool in self.tooldef.values():
cmd = tool.command
if os.path.isabs(cmd):
if not os.path.exists(cmd):
print("Tool Not found %s" % cmd)
else:
for syspath in path_env_list:
if glob.glob(os.path.join(syspath, cmd+"*")):
break
else:
print("Tool Not found %s" % cmd)
def load(self) -> None:
if os.path.exists(self.tooldef_file):
with open(self.tooldef_file, "r") as fd:
config_data = fd.readlines()
for line in config_data:
try:
guid, short_name, command = line.split()
new_format_guid = struct2stream(ModifyGuidFormat(guid.strip()))
self.tooldef[new_format_guid] = GUIDTool(
guid.strip(), short_name.strip(), command.strip())
except:
print("GuidTool load error!")
continue
else:
self.tooldef.update(self.default_tools)
self.VerifyTools()
def __getitem__(self, guid) -> None:
return self.tooldef.get(guid)
guidtools = GUIDTools()
| 38.254902
| 160
| 0.571843
| 5,304
| 0.906202
| 0
| 0
| 0
| 0
| 0
| 0
| 1,414
| 0.241586
|
18444ea5a0cd3e04e2706a71502de539bb9fa0dc
| 1,709
|
py
|
Python
|
python/tests/test_tree_intersection.py
|
Yonatan1P/data-structures-and-algorithms
|
ddd647d52a3182ca01032bfdb72f94ea22a0e76b
|
[
"MIT"
] | 1
|
2020-12-16T22:38:12.000Z
|
2020-12-16T22:38:12.000Z
|
python/tests/test_tree_intersection.py
|
Yonatan1P/data-structures-and-algorithms
|
ddd647d52a3182ca01032bfdb72f94ea22a0e76b
|
[
"MIT"
] | 1
|
2020-11-14T05:37:48.000Z
|
2020-11-14T05:37:48.000Z
|
python/tests/test_tree_intersection.py
|
Yonatan1P/data-structures-and-algorithms
|
ddd647d52a3182ca01032bfdb72f94ea22a0e76b
|
[
"MIT"
] | null | null | null |
from challenges.tree_intersection.tree_intersection import find_intersection
from challenges.tree.tree import BinarySearchTree
def test_find_intersection():
tree1 = BinarySearchTree()
tree1.add(1)
tree1.add(2)
tree1.add(3)
tree1.add(4)
tree1.add(5)
tree1.add(6)
tree1.add(7)
tree1.add(8)
tree2 = BinarySearchTree()
tree2.add(12)
tree2.add(12)
tree2.add(13)
tree2.add(14)
tree2.add(15)
tree2.add(16)
tree2.add(7)
tree2.add(8)
actual = find_intersection(tree1, tree2)
expected = [7,8]
assert actual == expected
def test_empty_binary_tree():
tree1 = BinarySearchTree()
tree1.add(1)
tree1.add(2)
tree1.add(3)
tree1.add(4)
tree1.add(5)
tree1.add(6)
tree1.add(7)
tree1.add(8)
tree2 = BinarySearchTree()
actual = find_intersection(tree1, tree2)
expected = []
assert actual == expected
def test_first_empty_binary_tree():
tree2 = BinarySearchTree()
tree2.add(1)
tree2.add(2)
tree2.add(3)
tree2.add(4)
tree2.add(5)
tree2.add(6)
tree2.add(7)
tree2.add(8)
tree1 = BinarySearchTree()
actual = find_intersection(tree1, tree2)
expected = []
assert actual == expected
def test_same_tree():
tree1 = BinarySearchTree()
tree1.add(1)
tree1.add(2)
tree1.add(3)
tree1.add(4)
tree1.add(5)
tree1.add(6)
tree1.add(7)
tree1.add(8)
tree2 = BinarySearchTree()
tree2.add(1)
tree2.add(2)
tree2.add(3)
tree2.add(4)
tree2.add(5)
tree2.add(6)
tree2.add(7)
tree2.add(8)
actual = find_intersection(tree1, tree2)
expected = [1,2,3,4,5,6,7,8]
assert actual == expected
| 21.632911
| 76
| 0.627853
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1847f0a48843e1e83cb2f45be72c476d66e2ca39
| 562
|
py
|
Python
|
setup.py
|
rif/imgdup
|
fe59c6b4b8c06699d48f887bc7a90acea48aa8f2
|
[
"MIT"
] | 14
|
2016-02-10T04:53:42.000Z
|
2021-08-08T17:39:55.000Z
|
setup.py
|
rif/imgdup
|
fe59c6b4b8c06699d48f887bc7a90acea48aa8f2
|
[
"MIT"
] | null | null | null |
setup.py
|
rif/imgdup
|
fe59c6b4b8c06699d48f887bc7a90acea48aa8f2
|
[
"MIT"
] | 2
|
2017-11-01T14:02:46.000Z
|
2019-02-20T10:55:52.000Z
|
from setuptools import setup, find_packages
setup(
name = "imgdup",
version = "1.3",
packages = find_packages(),
scripts = ['imgdup.py'],
install_requires = ['pillow>=2.8.1'],
# metadata for upload to PyPI
author = "Radu Ioan Fericean",
author_email = "radu@fericean.ro",
description = "Visual similarity image finder and cleaner (image deduplication tool)",
license = "MIT",
keywords = "deduplication duplicate images image visual finder",
url = "https://github.com/rif/imgdup", # project home page, if any
)
| 31.222222
| 90
| 0.663701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 292
| 0.519573
|
184a025720245d69fec4505befed933cb56ea1a7
| 178
|
py
|
Python
|
exercicio13.py
|
LuizHps18/infosatc-lp-avaliativo-01
|
0b891d74a98705182175a53e023b6cbbe8cc880a
|
[
"MIT"
] | null | null | null |
exercicio13.py
|
LuizHps18/infosatc-lp-avaliativo-01
|
0b891d74a98705182175a53e023b6cbbe8cc880a
|
[
"MIT"
] | null | null | null |
exercicio13.py
|
LuizHps18/infosatc-lp-avaliativo-01
|
0b891d74a98705182175a53e023b6cbbe8cc880a
|
[
"MIT"
] | null | null | null |
k = float(input("Digite uma distância em quilometros: "))
m = k / 1.61
print("A distância digitada é de {} quilometros, essa distância convertida é {:.2f} milhas" .format(k,m))
| 35.6
| 105
| 0.696629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 129
| 0.704918
|
184a7377a4969ebcc47ccb33cd2b9fb82e77a11d
| 660
|
py
|
Python
|
rcs/wiki/urls.py
|
ShuffleBox/django-rcsfield
|
dd8b5b22635bcdae9825e00b65887bb51171e76f
|
[
"BSD-3-Clause"
] | null | null | null |
rcs/wiki/urls.py
|
ShuffleBox/django-rcsfield
|
dd8b5b22635bcdae9825e00b65887bb51171e76f
|
[
"BSD-3-Clause"
] | null | null | null |
rcs/wiki/urls.py
|
ShuffleBox/django-rcsfield
|
dd8b5b22635bcdae9825e00b65887bb51171e76f
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf.urls.defaults import *
urlpatterns = patterns('rcs.wiki.views',
url(r'^((?:[A-Z]+[a-z]+){2,})/$', 'page', {}, name="wiki_page"),
url(r'^((?:[A-Z]+[a-z]+){2,})/edit/$', 'edit', {}, name="wiki_edit"),
url(r'^((?:[A-Z]+[a-z]+){2,})/attachments/$', 'attachments', {}, name="wiki_attachments"),
url(r'^((?:[A-Z]+[a-z]+){2,})/rev/([a-f0-9]+)/$', 'revision', {}, name="wiki_revision"),
url(r'^((?:[A-Z]+[a-z]+){2,})/diff/([\w]+)/([\w]+)/$', 'diff', {}, name="wiki_diff"),
url(r'^list/$', 'list', {}, name="wiki_list"),
url(r'^recent/$', 'recent', {}, name="wiki_recent"),
url(r'^$', 'index', {}, name="wiki_index"),
)
| 55
| 94
| 0.487879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 401
| 0.607576
|
184a8a8a53eaf08a2a13054389bb04e1b3d15e28
| 3,359
|
py
|
Python
|
sample 1/main.py
|
RezaFirouzii/multi-choice_correction_opencv
|
31c777d6714216e0811947a1ceadc893c2c1d7c0
|
[
"MIT"
] | 1
|
2022-03-04T15:55:20.000Z
|
2022-03-04T15:55:20.000Z
|
sample 1/main.py
|
RezaFirouzii/multi-choice_correction_opencv
|
31c777d6714216e0811947a1ceadc893c2c1d7c0
|
[
"MIT"
] | null | null | null |
sample 1/main.py
|
RezaFirouzii/multi-choice_correction_opencv
|
31c777d6714216e0811947a1ceadc893c2c1d7c0
|
[
"MIT"
] | null | null | null |
import cv2 as cv
import numpy as np
import pandas as pd
import heapq
def sort_contours_horizontally(contours):
dic = {}
for contour in contours:
x_points = contour[:, :, 0]
dic[min(x_points)[0]] = contour
dic = dict(sorted(dic.items()))
return dic.values()
def sort_contours(contours):
dic = {}
error = 5
for contour in contours:
x_points = contour[:, :, 0]
y_points = contour[:, :, 1]
key = min(x_points)[0]
min_key = list(filter(lambda x: -error < x - key < error, dic.keys()))
if min_key:
key = min_key[0]
dic[key].append((min(y_points)[0], contour))
else:
dic[key] = [(min(y_points)[0], contour)]
dic = dict(sorted(dic.items()))
for key in dic.keys():
dic[key] = list(sorted(dic[key]))
return [tup[1] for sublist in dic.values() for tup in sublist]
def is_valid_test(tests):
# multi answers => WRONG
answers = list(filter(lambda x: x > 0.7, tests))
if len(answers) != 1:
return False
return True
if __name__ == "__main__":
img = cv.imread('sample1_2.jpg')
cop = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 10)
kernel = cv.getStructuringElement(cv.MORPH_RECT, (4, 1))
img = cv.morphologyEx(img, cv.MORPH_CLOSE, kernel)
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
contours = list(filter(lambda x: 300 < cv.contourArea(x) < 450, contours))
contours = sort_contours(contours)
answers = []
for i, contour in enumerate(contours):
x, y, w, h = cv.boundingRect(contour)
roi = cv.cvtColor(cop[y: y+h, x: x + w], cv.COLOR_BGR2GRAY)
roi_cop = roi.copy()
roi = cv.adaptiveThreshold(roi, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 10)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (1, 3))
roi = cv.morphologyEx(roi, cv.MORPH_CLOSE, kernel)
cnts, hierarchy = cv.findContours(roi, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_NONE)
cnts = list(filter(cv.contourArea, cnts))
cnts = sort_contours_horizontally(cnts)
tests = list(map(cv.boundingRect, cnts))
coord = [(x, y)]
for j, test in enumerate(tests): # each test is a contour
coord.append(test)
x, y, w, h = test
area = w * h
filled_area = np.count_nonzero(roi[y: y+h, x: x+w])
tests[j] = filled_area / area
if is_valid_test(tests):
choice = tests.index(max(tests)) + 1
answers.append(choice)
X, Y = coord[0]
x, y, w, h = coord[choice]
pt1 = (X + x, Y + y)
pt2 = (X + x + w, Y + y + h)
cv.rectangle(cop, pt1, pt2, (0, 255, 0), 2)
else:
answers.append(-1)
for i in range(len(answers)):
print(i + 1, ":", answers[i])
data = {
"Q": [i for i in range(1, len(answers) + 1)],
"A": answers
}
data = pd.DataFrame(data)
data.to_excel('./sample1.xlsx', 'Answer Sheet 1')
cv.imwrite('output.jpg', cop)
cv.imshow('Detected Choices', cop)
cv.waitKey()
| 28.709402
| 101
| 0.567133
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 144
| 0.04287
|
184b18ea17717fde23e6a6b62fed9b2b61f17cb3
| 704
|
py
|
Python
|
a-practical-introduction-to-python-programming-brian-heinold/chapter-08/exercise-07.py
|
elarabyelaidy19/awesome-reading
|
5c01a4272ba58e4f7ea665aab14b4c0aa252ea89
|
[
"MIT"
] | 31
|
2021-11-02T19:51:13.000Z
|
2022-02-17T10:55:26.000Z
|
a-practical-introduction-to-python-programming-brian-heinold/chapter-08/exercise-07.py
|
MosTafaHoSamm/awesome-reading
|
469408fefc049d78ed53a2b2331b5d5cecdc6c06
|
[
"MIT"
] | 1
|
2022-01-18T12:27:54.000Z
|
2022-01-18T12:27:54.000Z
|
a-practical-introduction-to-python-programming-brian-heinold/chapter-08/exercise-07.py
|
MosTafaHoSamm/awesome-reading
|
469408fefc049d78ed53a2b2331b5d5cecdc6c06
|
[
"MIT"
] | 3
|
2022-01-11T05:01:34.000Z
|
2022-02-05T14:36:29.000Z
|
# 7. Write a program that estimates the average number of drawings it takes before the user’s
# numbers are picked in a lottery that consists of correctly picking six different numbers that
# are between 1 and 10. To do this, run a loop 1000 times that randomly generates a set of
# user numbers and simulates drawings until the user’s numbers are drawn. Find the average
# number of drawings needed over the 1000 times the loop runs.
import random
lottery_numbers = [i for i in range(1, 11)]
avg = 0
for i in range(1000):
user = random.randint(1, 10)
lott = random.choice(lottery_numbers)
if lott == user:
avg = avg + 1
print('Average number of drawings:', round(1000 / avg, 4))
| 37.052632
| 95
| 0.728693
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 463
| 0.653955
|
184bf76e800fcea4dae223c4ac96db64613fb1ae
| 709
|
py
|
Python
|
humfrey/update/utils.py
|
ox-it/humfrey
|
c92e46a24a9bf28aa9638a612f166d209315e76b
|
[
"BSD-3-Clause"
] | 6
|
2015-01-09T15:53:07.000Z
|
2020-02-13T14:00:53.000Z
|
humfrey/update/utils.py
|
ox-it/humfrey
|
c92e46a24a9bf28aa9638a612f166d209315e76b
|
[
"BSD-3-Clause"
] | null | null | null |
humfrey/update/utils.py
|
ox-it/humfrey
|
c92e46a24a9bf28aa9638a612f166d209315e76b
|
[
"BSD-3-Clause"
] | 1
|
2017-05-12T20:46:15.000Z
|
2017-05-12T20:46:15.000Z
|
from django.conf import settings
from django.utils.importlib import import_module
from humfrey.update.transform.base import Transform
def get_transforms():
try:
return get_transforms._cache
except AttributeError:
pass
transforms = {'__builtins__': {}}
for class_path in settings.UPDATE_TRANSFORMS:
module_path, class_name = class_path.rsplit('.', 1)
transform = getattr(import_module(module_path), class_name)
assert issubclass(transform, Transform)
transforms[transform.__name__] = transform
get_transforms._cache = transforms
return transforms
def evaluate_pipeline(pipeline):
return eval('(%s)' % pipeline, get_transforms())
| 29.541667
| 67
| 0.723554
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 23
| 0.03244
|
184dce967a4de0cb71723aecd6ec63f6783befa6
| 2,448
|
py
|
Python
|
flask/model/device_model.py
|
Dev-Jahn/cms
|
84ea115bdb865daff83d069502f6f0dd105fc4f0
|
[
"RSA-MD"
] | null | null | null |
flask/model/device_model.py
|
Dev-Jahn/cms
|
84ea115bdb865daff83d069502f6f0dd105fc4f0
|
[
"RSA-MD"
] | 9
|
2021-01-05T07:48:28.000Z
|
2021-05-14T06:38:27.000Z
|
flask/model/device_model.py
|
Dev-Jahn/cms
|
84ea115bdb865daff83d069502f6f0dd105fc4f0
|
[
"RSA-MD"
] | 4
|
2021-01-05T06:46:09.000Z
|
2021-05-06T01:44:28.000Z
|
from .db_base import db, env
class Device(db.Model):
__tablename__ = 'device'
id = db.Column(db.Integer, primary_key=True)
if env == 'development':
model = db.Column(db.String(16, 'utf8mb4_unicode_ci'))
serial = db.Column(db.String(20, 'utf8mb4_unicode_ci'))
company_id = db.Column(db.ForeignKey('company.id', onupdate='CASCADE'), index=True)
owner_id = db.Column(db.ForeignKey('user.id', onupdate='CASCADE'), index=True)
ip = db.Column(db.String(15, 'utf8mb4_unicode_ci'), server_default=db.FetchedValue())
else:
model = db.Column(db.String(16, 'utf8mb4_unicode_ci'), nullable=False)
serial = db.Column(db.String(20, 'utf8mb4_unicode_ci'), nullable=False, unique=True)
company_id = db.Column(db.ForeignKey('company.id', onupdate='CASCADE'), nullable=False, index=True)
owner_id = db.Column(db.ForeignKey('user.id', onupdate='CASCADE'), nullable=False, index=True)
ip = db.Column(db.String(15, 'utf8mb4_unicode_ci'), nullable=False, unique=True,
server_default=db.FetchedValue())
created = db.Column(db.DateTime)
created_by_id = db.Column(db.ForeignKey('user.id', onupdate='CASCADE'), index=True)
last_edited = db.Column(db.DateTime)
edited_by_id = db.Column(db.ForeignKey('user.id', onupdate='CASCADE'), index=True)
is_deleted = db.Column(db.Integer)
company = db.relationship('Company', primaryjoin='Device.company_id == Company.id',
backref='devices')
created_by = db.relationship('User', primaryjoin='Device.created_by_id == User.id',
backref='created_devices')
edited_by = db.relationship('User', primaryjoin='Device.edited_by_id == User.id', backref='edited_devices')
owner = db.relationship('User', primaryjoin='Device.owner_id == User.id', backref='owned_devices')
def __repr__(self):
return f'<Device {self.model} | {self.serial}>'
def to_dict(self):
return dict(
id=self.id,
model=self.model,
serial=self.serial,
company=self.company,
owner=self.owner,
ip=self.ip,
created=self.created,
created_by=self.created_by,
last_edited=self.last_edited,
edited_by=self.edited_by,
is_deleted=self.is_deleted
)
| 48
| 112
| 0.624183
| 2,412
| 0.985294
| 0
| 0
| 0
| 0
| 0
| 0
| 505
| 0.206291
|
184e8888d3aeff144a6fa7390d4e574c4fcd9c17
| 18,542
|
py
|
Python
|
pytests/tuqquery/tuq_tokens.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/tuq_tokens.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/tuq_tokens.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
from lib.remote.remote_util import RemoteMachineShellConnection
from pytests.tuqquery.tuq import QueryTests
class TokenTests(QueryTests):
def setUp(self):
if not self._testMethodName == 'suite_setUp':
self.skip_buckets_handle = True
super(TokenTests, self).setUp()
self.n1ql_port = self.input.param("n1ql_port", 8093)
self.scan_consistency = self.input.param("scan_consistency", 'REQUEST_PLUS')
def tearDown(self):
server = self.master
shell = RemoteMachineShellConnection(server)
# shell.execute_command("""curl -X DELETE -u Administrator:password http://{0}:8091/pools/default/buckets/beer-sample""".format(server.ip))
self.sleep(20)
super(TokenTests, self).tearDown()
def test_tokens_secondary_indexes(self):
self.rest.load_sample("beer-sample")
self.sleep(20)
created_indexes = []
self.query = 'create primary index on `beer-sample`'
self.run_cbq_query()
self.query = 'create index idx1 on `beer-sample`(description,name )'
self.run_cbq_query()
self.query = 'create index idx2 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx3 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"lower","names":true,"specials":false}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx4 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false,"specials":true}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx5 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx6 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx7 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx8 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"":""}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx9 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"random"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx10 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"names":"random"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx11 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"specials":"random"}) END ,description,name )'
self.run_cbq_query()
self.query = 'create index idx12 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description) END )'
self.run_cbq_query()
self.query = 'create index idx13 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"lower"}) END )'
self.run_cbq_query()
self.query = 'create index idx14 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper"}) END )'
self.run_cbq_query()
self.query = 'create index idx15 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"lower","names":true,"specials":false}) END )'
self.run_cbq_query()
self.query = 'create index idx16 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false,"specials":true}) END )'
self.run_cbq_query()
self.query = 'create index idx17 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"upper","names":false}) END )'
self.run_cbq_query()
self.query = 'create index idx18 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{}) END )'
self.run_cbq_query()
self.query = 'create index idx19 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"":""}) END )'
self.run_cbq_query()
self.query = 'create index idx20 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"case":"random"}) END )'
self.run_cbq_query()
self.query = 'create index idx21 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"names":"random"}) END )'
self.run_cbq_query()
self.query = 'create index idx22 on `beer-sample`( DISTINCT ARRAY v FOR v in tokens(description,{"specials":"random"}) END )'
self.run_cbq_query()
for i in xrange(1,22):
index = 'idx{0}'.format(i)
created_indexes.append(index)
self.query = 'explain select name from `beer-sample` where any v in tokens(description) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue(actual_result['results'])
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx2")
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['covers'][0]) == ('cover ((distinct (array `v` for `v` in tokens((`beer-sample`.`description`)) end)))'))
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(reverse(description)) satisfies v = "nedlog" END order by meta().id limit 10'
expected_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` where any v in tokens(reverse(description)) satisfies v = "nedlog" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
#self.assertTrue(str(actual_result['results'])=="[{u'name': u'21A IPA'}, {u'name': u'Amendment Pale Ale'}, {u'name': u'Double Trouble IPA'}, {u'name': u'South Park Blonde'}, {u'name': u'Restoration Pale Ale'}, {u'name': u'S.O.S'}, {u'name': u'Satsuma Harvest Wit'}, {u'name': u'Adnams Explorer'}, {u'name': u'Shock Top'}, {u'name': u'Anniversary Maibock'}]" )
self.assertTrue((actual_result['results'])== (expected_result['results']))
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"case":"lower","names":true,"specials":false}) satisfies v = "brewery" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['covers'][0]) == ('cover ((distinct (array `v` for `v` in tokens((`beer-sample`.`description`), {"case": "lower", "names": true, "specials": false}) end)))'))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx3")
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"case":"lower","names":true,"specials":false}) satisfies v = "brewery" END order by meta().id limit 10'
expected_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`idx15`) where any v in tokens(description,{"case":"lower","names":true,"specials":false}) satisfies v = "brewery" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
self.assertTrue((actual_result['results'])== (expected_result['results']) )
self.query = 'explain select name from `beer-sample` use index(`idx14`) where any v in tokens(description,{"case":"upper","names":false,"specials":true}) satisfies v = "BREWERY" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['covers'][0]) == ('cover ((distinct (array `v` for `v` in tokens((`beer-sample`.`description`), {"case": "upper", "names": false, "specials": true}) end)))'))
self.assertTrue(str(plan['~children'][0]['~children'][0]['scan']['index']) == "idx4")
self.query = 'select name from `beer-sample` use index(`idx16`) where any v in tokens(description,{"case":"upper","names":false,"specials":true}) satisfies v = "BREWERY" END order by meta().id limit 10'
actual_result = self.run_cbq_query()
self.assertTrue((actual_result['results'])== (expected_result['results']))
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"case":"upper","names":false}) satisfies v = "GOLDEN" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx5")
self.query = 'select name from `beer-sample` use index(`idx17`) where any v in tokens(description,{"case":"upper","names":false}) satisfies v = "GOLDEN" END limit 10'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"case":"upper","names":false}) satisfies v = "GOLDEN" END limit 10'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx7")
self.query = 'select name from `beer-sample` use index(`idx18`) where any v in tokens(description,{}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{}) satisfies v = "golden" END limit 10'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"":""}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx8")
self.query = 'select name from `beer-sample` use index(`idx19`) where any v in tokens(description,{"":""}) satisfies v = "golden" END order by name '
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"":""}) satisfies v = "golden" END order by name '
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"case":"random"}) satisfies v = "golden" END '
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['scan']['index'] == "idx9")
self.query = 'select name from `beer-sample` use index(`idx20`) where any v in tokens(description,{"case":"random"}) satisfies v = "golden" END order by name '
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"case":"random"}) satisfies v = "golden" END order by name '
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"specials":"random"}) satisfies v = "brewery" END order by name'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx11")
self.query = 'select name from `beer-sample` use index(`idx22`) where any v in tokens(description,{"specials":"random"}) satisfies v = "golden" END order by name'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"specials":"random"}) satisfies v = "golden" END order by name'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
self.query = 'explain select name from `beer-sample` where any v in tokens(description,{"names":"random"}) satisfies v = "brewery" END limit 10'
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("covers" in str(plan))
self.assertTrue(plan['~children'][0]['~children'][0]['scan']['index'] == "idx10")
self.query = 'select name from `beer-sample` use index(`idx21`) where any v in tokens(description,{"names":"random"}) satisfies v = "golden" END limit 10'
actual_result = self.run_cbq_query()
self.query = 'select name from `beer-sample` use index(`#primary`) where any v in tokens(description,{"names":"random"}) satisfies v = "golden" END limit 10'
expected_result = self.run_cbq_query()
self.assertTrue(actual_result['results']==expected_result['results'])
for idx in created_indexes:
self.query = "DROP INDEX %s.%s USING %s" % ("`beer-sample`", idx, self.index_type)
actual_result = self.run_cbq_query()
'''This test is specific to beer-sample bucket'''
def test_tokens_simple_syntax(self):
self.rest.load_sample("beer-sample")
bucket_doc_map = {"beer-sample": 7303}
bucket_status_map = {"beer-sample": "healthy"}
self.wait_for_buckets_status(bucket_status_map, 5, 120)
self.wait_for_bucket_docs(bucket_doc_map, 5, 120)
self._wait_for_index_online("beer-sample", "beer_primary")
self.sleep(10)
created_indexes = []
try:
idx1 = "idx_suffixes"
idx2 = "idx_tokens"
idx3 = "idx_pairs"
idx4 = "idx_addresses"
self.query = 'CREATE INDEX {0} ON `beer-sample`( DISTINCT SUFFIXES( name ) )'.format(idx1)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx1)
self.query = "explain select * from `beer-sample` where name like '%Cafe%'"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['scan']['index'], idx1)
self.query = 'CREATE INDEX {0} ON `beer-sample`( DISTINCT TOKENS( description ) )'.format(idx2)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx2)
self.query = "explain select * from `beer-sample` where contains_token(description,'Great')"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['scan']['index'], idx2)
self.query = "CREATE INDEX {0} ON `beer-sample`( DISTINCT PAIRS( SELF ) )".format(idx3)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx3)
self.query = "explain select * from `beer-sample` where name like 'A%' and abv > 6"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue("idx_suffixes" in str(plan))
self.query = "CREATE INDEX {0} ON `beer-sample`( ALL address )".format(idx4)
self.run_cbq_query()
self._wait_for_index_online("beer-sample", "beer_primary")
created_indexes.append(idx4)
self.query = "explain select min(addr) from `beer-sample` unnest address as addr"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['index'], idx4)
self.query = "explain select count(a) from `beer-sample` unnest address as a"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertEqual(plan['~children'][0]['index'], idx4)
self.query = "explain select * from `beer-sample` where any place in address satisfies " \
"place LIKE '100 %' end"
actual_result = self.run_cbq_query()
plan = self.ExplainPlanHelper(actual_result)
self.assertTrue(idx4 in str(plan))
self.assertTrue(idx3 in str(plan))
finally:
for idx in created_indexes:
self.query = "DROP INDEX `beer-sample`.%s" % (idx)
self.run_cbq_query()
self.rest.delete_bucket("beer-sample")
def test_dynamicindex_limit(self):
self.rest.load_sample("beer-sample")
self.sleep(20)
created_indexes = []
try:
idx1 = "idx_abv"
idx2 = "dynamic"
self.query = "CREATE INDEX idx_abv ON `beer-sample`( abv )"
self.run_cbq_query()
created_indexes.append(idx1)
self.query = "CREATE INDEX dynamic ON `beer-sample`( DISTINCT PAIRS( SELF ) )"
self.run_cbq_query()
created_indexes.append(idx2)
self.query = "Explain select * from `beer-sample` where abv > 5 LIMIT 10"
res = self.run_cbq_query()
plan = self.ExplainPlanHelper(res)
self.assertTrue(plan['~children'][0]['~children'][0]['limit']=='10')
finally:
for idx in created_indexes:
self.query = "DROP INDEX `beer-sample`.%s" % ( idx)
self.run_cbq_query()
| 65.059649
| 367
| 0.642595
| 18,432
| 0.994068
| 0
| 0
| 0
| 0
| 0
| 0
| 9,495
| 0.512081
|
184fa55d99eb6ba4a36992ee508941f13328275f
| 1,074
|
py
|
Python
|
src/python/autotransform/input/empty.py
|
nathro/AutoTransform
|
04ef5458bc8401121e33370ceda6ef638e535e9a
|
[
"MIT"
] | 11
|
2022-01-02T00:50:24.000Z
|
2022-02-22T00:30:09.000Z
|
src/python/autotransform/input/empty.py
|
nathro/AutoTransform
|
04ef5458bc8401121e33370ceda6ef638e535e9a
|
[
"MIT"
] | 6
|
2022-01-06T01:45:34.000Z
|
2022-02-03T21:49:52.000Z
|
src/python/autotransform/input/empty.py
|
nathro/AutoTransform
|
04ef5458bc8401121e33370ceda6ef638e535e9a
|
[
"MIT"
] | null | null | null |
# AutoTransform
# Large scale, component based code modification library
#
# Licensed under the MIT License <http://opensource.org/licenses/MIT>
# SPDX-License-Identifier: MIT
# Copyright (c) 2022-present Nathan Rockenbach <http://github.com/nathro>
# @black_format
"""The implementation for the DirectoryInput."""
from __future__ import annotations
from typing import ClassVar, Sequence
from autotransform.input.base import Input, InputName
from autotransform.item.base import Item
class EmptyInput(Input):
"""An Input that simply returns an empty list. Used when a Transformer operates
on the whole codebase, rather than on an individual Item/set of Items.
Attributes:
name (ClassVar[InputName]): The name of the component.
"""
name: ClassVar[InputName] = InputName.EMPTY
def get_items(self) -> Sequence[Item]:
"""Returns an empty list of Items, useful for Transformers that operate
on the whole codebase at once.
Returns:
Sequence[Item]: An empty list of Items.
"""
return []
| 28.263158
| 83
| 0.712291
| 583
| 0.542831
| 0
| 0
| 0
| 0
| 0
| 0
| 741
| 0.689944
|
1851692534eb7b89ed5ce5f0fcea30358bb3c381
| 27,790
|
py
|
Python
|
snowplow_tracker/tracker.py
|
jackwilliamson/snowplow-python-tracker
|
b4ee5192bde044f406182bef848b51bd21646f12
|
[
"Apache-2.0"
] | null | null | null |
snowplow_tracker/tracker.py
|
jackwilliamson/snowplow-python-tracker
|
b4ee5192bde044f406182bef848b51bd21646f12
|
[
"Apache-2.0"
] | 1
|
2019-01-08T17:09:11.000Z
|
2019-01-08T17:09:11.000Z
|
snowplow_tracker/tracker.py
|
jackwilliamson/snowplow-python-tracker
|
b4ee5192bde044f406182bef848b51bd21646f12
|
[
"Apache-2.0"
] | 1
|
2017-05-30T20:49:24.000Z
|
2017-05-30T20:49:24.000Z
|
"""
tracker.py
Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved.
This program is licensed to you under the Apache License Version 2.0,
and you may not use this file except in compliance with the Apache License
Version 2.0. You may obtain a copy of the Apache License Version 2.0 at
http://www.apache.org/licenses/LICENSE-2.0.
Unless required by applicable law or agreed to in writing,
software distributed under the Apache License Version 2.0 is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the Apache License Version 2.0 for the specific
language governing permissions and limitations there under.
Authors: Anuj More, Alex Dean, Fred Blundun
Copyright: Copyright (c) 2013-2014 Snowplow Analytics Ltd
License: Apache License Version 2.0
"""
import time
import uuid
import six
from contracts import contract, new_contract
from snowplow_tracker import payload, _version, SelfDescribingJson
from snowplow_tracker import subject as _subject
from snowplow_tracker.timestamp import Timestamp, TrueTimestamp, DeviceTimestamp
"""
Constants & config
"""
VERSION = "py-%s" % _version.__version__
DEFAULT_ENCODE_BASE64 = True
BASE_SCHEMA_PATH = "iglu:com.snowplowanalytics.snowplow"
SCHEMA_TAG = "jsonschema"
CONTEXT_SCHEMA = "%s/contexts/%s/1-0-1" % (BASE_SCHEMA_PATH, SCHEMA_TAG)
UNSTRUCT_EVENT_SCHEMA = "%s/unstruct_event/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG)
FORM_NODE_NAMES = ("INPUT", "TEXTAREA", "SELECT")
FORM_TYPES = (
"button", "checkbox", "color", "date", "datetime",
"datetime-local", "email", "file", "hidden", "image", "month",
"number", "password", "radio", "range", "reset", "search",
"submit", "tel", "text", "time", "url", "week"
)
"""
Tracker class
"""
class Tracker:
new_contract("not_none", lambda s: s is not None)
new_contract("non_empty_string", lambda s: isinstance(s, six.string_types)
and len(s) > 0)
new_contract("string_or_none", lambda s: (isinstance(s, six.string_types)
and len(s) > 0) or s is None)
new_contract("payload", lambda s: isinstance(s, payload.Payload))
new_contract("tracker", lambda s: isinstance(s, Tracker))
new_contract("emitter", lambda s: hasattr(s, "input"))
new_contract("self_describing_json", lambda s: isinstance(s, SelfDescribingJson))
new_contract("context_array", "list(self_describing_json)")
new_contract("form_node_name", lambda s: s in FORM_NODE_NAMES)
new_contract("form_type", lambda s: s.lower() in FORM_TYPES)
new_contract("timestamp", lambda x: (isinstance(x, Timestamp)))
new_contract("form_element", lambda x: Tracker.check_form_element(x))
@contract
def __init__(self, emitters, subject=None,
namespace=None, app_id=None, encode_base64=DEFAULT_ENCODE_BASE64):
"""
:param emitters: Emitters to which events will be sent
:type emitters: list[>0](emitter) | emitter
:param subject: Subject to be tracked
:type subject: subject | None
:param namespace: Identifier for the Tracker instance
:type namespace: string_or_none
:param app_id: Application ID
:type app_id: string_or_none
:param encode_base64: Whether JSONs in the payload should be base-64 encoded
:type encode_base64: bool
"""
if subject is None:
subject = _subject.Subject()
if type(emitters) is list:
self.emitters = emitters
else:
self.emitters = [emitters]
self.subject = subject
self.encode_base64 = encode_base64
self.standard_nv_pairs = {
"tv": VERSION,
"tna": namespace,
"aid": app_id
}
self.timer = None
@staticmethod
@contract
def get_uuid():
"""
Set transaction ID for the payload once during the lifetime of the
event.
:rtype: string
"""
return str(uuid.uuid4())
@staticmethod
@contract
def get_timestamp(tstamp=None):
"""
:param tstamp: User-input timestamp or None
:type tstamp: int | float | None
:rtype: int
"""
if tstamp is None:
return int(time.time() * 1000)
elif isinstance(tstamp, (int, float, )):
return int(tstamp)
"""
Tracking methods
"""
@contract
def track(self, pb):
"""
Send the payload to a emitter
:param pb: Payload builder
:type pb: payload
:rtype: tracker
"""
for emitter in self.emitters:
emitter.input(pb.nv_pairs)
return self
@contract
def complete_payload(self, pb, context, tstamp):
"""
Called by all tracking events to add the standard name-value pairs
to the Payload object irrespective of the tracked event.
:param pb: Payload builder
:type pb: payload
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
pb.add("eid", Tracker.get_uuid())
if isinstance(tstamp, TrueTimestamp):
pb.add("ttm", tstamp.value)
if isinstance(tstamp, DeviceTimestamp):
pb.add("dtm", Tracker.get_timestamp(tstamp.value))
elif isinstance(tstamp, (int, float, type(None))):
pb.add("dtm", Tracker.get_timestamp(tstamp))
if context is not None:
context_jsons = list(map(lambda c: c.to_json(), context))
context_envelope = SelfDescribingJson(CONTEXT_SCHEMA, context_jsons).to_json()
pb.add_json(context_envelope, self.encode_base64, "cx", "co")
pb.add_dict(self.standard_nv_pairs)
pb.add_dict(self.subject.standard_nv_pairs)
return self.track(pb)
@contract
def track_page_view(self, page_url, page_title=None, referrer=None, context=None, tstamp=None):
"""
:param page_url: URL of the viewed page
:type page_url: non_empty_string
:param page_title: Title of the viewed page
:type page_title: string_or_none
:param referrer: Referrer of the page
:type referrer: string_or_none
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
pb = payload.Payload()
pb.add("e", "pv") # pv: page view
pb.add("url", page_url)
pb.add("page", page_title)
pb.add("refr", referrer)
return self.complete_payload(pb, context, tstamp)
@contract
def track_page_ping(self, page_url, page_title=None, referrer=None, min_x=None, max_x=None, min_y=None, max_y=None, context=None, tstamp=None):
"""
:param page_url: URL of the viewed page
:type page_url: non_empty_string
:param page_title: Title of the viewed page
:type page_title: string_or_none
:param referrer: Referrer of the page
:type referrer: string_or_none
:param min_x: Minimum page x offset seen in the last ping period
:type min_x: int | None
:param max_x: Maximum page x offset seen in the last ping period
:type max_x: int | None
:param min_y: Minimum page y offset seen in the last ping period
:type min_y: int | None
:param max_y: Maximum page y offset seen in the last ping period
:type max_y: int | None
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
pb = payload.Payload()
pb.add("e", "pp") # pp: page ping
pb.add("url", page_url)
pb.add("page", page_title)
pb.add("refr", referrer)
pb.add("pp_mix", min_x)
pb.add("pp_max", max_x)
pb.add("pp_miy", min_y)
pb.add("pp_may", max_y)
return self.complete_payload(pb, context, tstamp)
@contract
def track_link_click(self, target_url, element_id=None,
element_classes=None, element_target=None,
element_content=None, context=None, tstamp=None):
"""
:param target_url: Target URL of the link
:type target_url: non_empty_string
:param element_id: ID attribute of the HTML element
:type element_id: string_or_none
:param element_classes: Classes of the HTML element
:type element_classes: list(str) | tuple(str,*) | None
:param element_content: The content of the HTML element
:type element_content: string_or_none
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
properties = {}
properties["targetUrl"] = target_url
if element_id is not None:
properties["elementId"] = element_id
if element_classes is not None:
properties["elementClasses"] = element_classes
if element_target is not None:
properties["elementTarget"] = element_target
if element_content is not None:
properties["elementContent"] = element_content
event_json = SelfDescribingJson("%s/link_click/%s/1-0-1" % (BASE_SCHEMA_PATH, SCHEMA_TAG), properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_add_to_cart(self, sku, quantity, name=None, category=None,
unit_price=None, currency=None, context=None,
tstamp=None):
"""
:param sku: Item SKU or ID
:type sku: non_empty_string
:param quantity: Number added to cart
:type quantity: int
:param name: Item's name
:type name: string_or_none
:param category: Item's category
:type category: string_or_none
:param unit_price: Item's price
:type unit_price: int | float | None
:param currency: Type of currency the price is in
:type currency: string_or_none
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
properties = {}
properties["sku"] = sku
properties["quantity"] = quantity
if name is not None:
properties["name"] = name
if category is not None:
properties["category"] = category
if unit_price is not None:
properties["unitPrice"] = unit_price
if currency is not None:
properties["currency"] = currency
event_json = SelfDescribingJson("%s/add_to_cart/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG), properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_remove_from_cart(self, sku, quantity, name=None, category=None,
unit_price=None, currency=None, context=None,
tstamp=None):
"""
:param sku: Item SKU or ID
:type sku: non_empty_string
:param quantity: Number added to cart
:type quantity: int
:param name: Item's name
:type name: string_or_none
:param category: Item's category
:type category: string_or_none
:param unit_price: Item's price
:type unit_price: int | float | None
:param currency: Type of currency the price is in
:type currency: string_or_none
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
properties = {}
properties["sku"] = sku
properties["quantity"] = quantity
if name is not None:
properties["name"] = name
if category is not None:
properties["category"] = category
if unit_price is not None:
properties["unitPrice"] = unit_price
if currency is not None:
properties["currency"] = currency
event_json = SelfDescribingJson("%s/remove_from_cart/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG), properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_form_change(self, form_id, element_id, node_name, value, type_=None,
element_classes=None, context=None, tstamp=None):
"""
:param form_id: ID attribute of the HTML form
:type form_id: non_empty_string
:param element_id: ID attribute of the HTML element
:type element_id: string_or_none
:param node_name: Type of input element
:type node_name: form_node_name
:param value: Value of the input element
:type value: string_or_none
:param type_: Type of data the element represents
:type type_: non_empty_string, form_type
:param element_classes: Classes of the HTML element
:type element_classes: list(str) | tuple(str,*) | None
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
properties = dict()
properties["formId"] = form_id
properties["elementId"] = element_id
properties["nodeName"] = node_name
properties["value"] = value
if type_ is not None:
properties["type"] = type_
if element_classes is not None:
properties["elementClasses"] = element_classes
event_json = SelfDescribingJson("%s/change_form/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG), properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_form_submit(self, form_id, form_classes=None, elements=None,
context=None, tstamp=None):
"""
:param form_id: ID attribute of the HTML form
:type form_id: non_empty_string
:param form_classes: Classes of the HTML form
:type form_classes: list(str) | tuple(str,*) | None
:param elements: Classes of the HTML form
:type elements: list(form_element) | None
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
properties = dict()
properties['formId'] = form_id
if form_classes is not None:
properties['formClasses'] = form_classes
if elements is not None and len(elements) > 0:
properties['elements'] = elements
event_json = SelfDescribingJson("%s/submit_form/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG), properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_site_search(self, terms, filters=None, total_results=None,
page_results=None, context=None, tstamp=None):
"""
:param terms: Search terms
:type terms: seq[>=1](str)
:param filters: Filters applied to the search
:type filters: dict(str:str|bool) | None
:param total_results: Total number of results returned
:type total_results: int | None
:param page_results: Total number of pages of results
:type page_results: int | None
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: Optional user-provided timestamp for the event
:type tstamp: timestamp | int | float | None
:rtype: tracker
"""
properties = {}
properties["terms"] = terms
if filters is not None:
properties["filters"] = filters
if total_results is not None:
properties["totalResults"] = total_results
if page_results is not None:
properties["pageResults"] = page_results
event_json = SelfDescribingJson("%s/site_search/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG), properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_ecommerce_transaction_item(self, order_id, sku, price, quantity,
name=None, category=None, currency=None,
context=None,
tstamp=None):
"""
This is an internal method called by track_ecommerce_transaction.
It is not for public use.
:param order_id: Order ID
:type order_id: non_empty_string
:param sku: Item SKU
:type sku: non_empty_string
:param price: Item price
:type price: int | float
:param quantity: Item quantity
:type quantity: int
:param name: Item name
:type name: string_or_none
:param category: Item category
:type category: string_or_none
:param currency: The currency the price is expressed in
:type currency: string_or_none
:param context: Custom context for the event
:type context: context_array | None
:rtype: tracker
"""
pb = payload.Payload()
pb.add("e", "ti")
pb.add("ti_id", order_id)
pb.add("ti_sk", sku)
pb.add("ti_nm", name)
pb.add("ti_ca", category)
pb.add("ti_pr", price)
pb.add("ti_qu", quantity)
pb.add("ti_cu", currency)
return self.complete_payload(pb, context, tstamp)
@contract
def track_ecommerce_transaction(self, order_id, total_value,
affiliation=None, tax_value=None, shipping=None,
city=None, state=None, country=None, currency=None,
items=None,
context=None, tstamp=None):
"""
:param order_id: ID of the eCommerce transaction
:type order_id: non_empty_string
:param total_value: Total transaction value
:type total_value: int | float
:param affiliation: Transaction affiliation
:type affiliation: string_or_none
:param tax_value: Transaction tax value
:type tax_value: int | float | None
:param shipping: Delivery cost charged
:type shipping: int | float | None
:param city: Delivery address city
:type city: string_or_none
:param state: Delivery address state
:type state: string_or_none
:param country: Delivery address country
:type country: string_or_none
:param currency: The currency the price is expressed in
:type currency: string_or_none
:param items: The items in the transaction
:type items: list(dict(str:*))
:param context: Custom context for the event
:type context: context_array | None
:rtype: tracker
"""
pb = payload.Payload()
pb.add("e", "tr")
pb.add("tr_id", order_id)
pb.add("tr_tt", total_value)
pb.add("tr_af", affiliation)
pb.add("tr_tx", tax_value)
pb.add("tr_sh", shipping)
pb.add("tr_ci", city)
pb.add("tr_st", state)
pb.add("tr_co", country)
pb.add("tr_cu", currency)
tstamp = Tracker.get_timestamp(tstamp)
self.complete_payload(pb, context, tstamp)
for item in items:
item["tstamp"] = tstamp
item["order_id"] = order_id
item["currency"] = currency
self.track_ecommerce_transaction_item(**item)
return self
@contract
def track_screen_view(self, name=None, id_=None, context=None, tstamp=None):
"""
:param name: The name of the screen view event
:type name: string_or_none
:param id_: Screen view ID
:type id_: string_or_none
:param context: Custom context for the event
:type context: context_array | None
:rtype: tracker
"""
screen_view_properties = {}
if name is not None:
screen_view_properties["name"] = name
if id_ is not None:
screen_view_properties["id"] = id_
event_json = SelfDescribingJson("%s/screen_view/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG), screen_view_properties)
return self.track_unstruct_event(event_json, context, tstamp)
@contract
def track_struct_event(self, category, action, label=None, property_=None, value=None,
context=None,
tstamp=None):
"""
:param category: Category of the event
:type category: non_empty_string
:param action: The event itself
:type action: non_empty_string
:param label: Refer to the object the action is
performed on
:type label: string_or_none
:param property_: Property associated with either the action
or the object
:type property_: string_or_none
:param value: A value associated with the user action
:type value: int | float | None
:param context: Custom context for the event
:type context: context_array | None
:rtype: tracker
"""
pb = payload.Payload()
pb.add("e", "se")
pb.add("se_ca", category)
pb.add("se_ac", action)
pb.add("se_la", label)
pb.add("se_pr", property_)
pb.add("se_va", value)
return self.complete_payload(pb, context, tstamp)
@contract
def track_unstruct_event(self, event_json, context=None, tstamp=None):
"""
:param event_json: The properties of the event. Has two field:
A "data" field containing the event properties and
A "schema" field identifying the schema against which the data is validated
:type event_json: self_describing_json
:param context: Custom context for the event
:type context: context_array | None
:param tstamp: User-set timestamp
:type tstamp: timestamp | int | None
:rtype: tracker
"""
envelope = SelfDescribingJson(UNSTRUCT_EVENT_SCHEMA, event_json.to_json()).to_json()
pb = payload.Payload()
pb.add("e", "ue")
pb.add_json(envelope, self.encode_base64, "ue_px", "ue_pr")
return self.complete_payload(pb, context, tstamp)
# Alias
track_self_describing_event = track_unstruct_event
@contract
def flush(self, is_async=False):
"""
Flush the emitter
:param is_async: Whether the flush is done asynchronously. Default is False
:type is_async: bool
:rtype: tracker
"""
for emitter in self.emitters:
if is_async:
emitter.flush()
else:
emitter.sync_flush()
return self
@contract
def set_subject(self, subject):
"""
Set the subject of the events fired by the tracker
:param subject: Subject to be tracked
:type subject: subject | None
:rtype: tracker
"""
self.subject = subject
return self
@contract
def add_emitter(self, emitter):
"""
Add a new emitter to which events should be passed
:param emitter: New emitter
:type emitter: emitter
:rtype: tracker
"""
self.emitters.append(emitter)
return self
@staticmethod
def check_form_element(element):
"""
PyContracts helper method to check that dictionary conforms element
in sumbit_form and change_form schemas
"""
all_present = isinstance(element, dict) and 'name' in element and 'value' in element and 'nodeName' in element
try:
if element['type'] in FORM_TYPES:
type_valid = True
else:
type_valid = False
except KeyError:
type_valid = True
return all_present and element['nodeName'] in FORM_NODE_NAMES and type_valid
| 40.688141
| 147
| 0.556747
| 25,954
| 0.933933
| 0
| 0
| 24,785
| 0.891868
| 0
| 0
| 16,098
| 0.579273
|
185308de027ac2681bc3f8d490477023a29fcb44
| 6,597
|
py
|
Python
|
src/oic/oauth2/util.py
|
alanbuxey/pyoidc
|
5f2d9ac468aaad599260f70481062c9d31273da2
|
[
"Apache-2.0"
] | 290
|
2015-01-02T20:14:53.000Z
|
2022-01-24T11:39:10.000Z
|
src/oic/oauth2/util.py
|
peppelinux/pyoidc
|
2e751ed84039259a2b138148eae204c877518950
|
[
"Apache-2.0"
] | 103
|
2015-02-03T13:20:59.000Z
|
2017-09-19T20:01:08.000Z
|
src/oic/oauth2/util.py
|
peppelinux/pyoidc
|
2e751ed84039259a2b138148eae204c877518950
|
[
"Apache-2.0"
] | 128
|
2015-01-02T20:14:19.000Z
|
2021-11-07T14:28:03.000Z
|
import logging
from http import cookiejar as http_cookiejar
from http.cookiejar import http2time # type: ignore
from typing import Any # noqa
from typing import Dict # noqa
from urllib.parse import parse_qs
from urllib.parse import urlsplit
from urllib.parse import urlunsplit
from oic.exception import UnSupported
from oic.oauth2.exception import TimeFormatError
from oic.utils.sanitize import sanitize
logger = logging.getLogger(__name__)
__author__ = "roland"
URL_ENCODED = "application/x-www-form-urlencoded"
JSON_ENCODED = "application/json"
DEFAULT_POST_CONTENT_TYPE = URL_ENCODED
PAIRS = {
"port": "port_specified",
"domain": "domain_specified",
"path": "path_specified",
}
ATTRS = {
"version": None,
"name": "",
"value": None,
"port": None,
"port_specified": False,
"domain": "",
"domain_specified": False,
"domain_initial_dot": False,
"path": "",
"path_specified": False,
"secure": False,
"expires": None,
"discard": True,
"comment": None,
"comment_url": None,
"rest": "",
"rfc2109": True,
} # type: Dict[str, Any]
def get_or_post(
uri, method, req, content_type=DEFAULT_POST_CONTENT_TYPE, accept=None, **kwargs
):
"""
Construct HTTP request.
:param uri:
:param method:
:param req:
:param content_type:
:param accept:
:param kwargs:
:return:
"""
if method in ["GET", "DELETE"]:
if req.keys():
_req = req.copy()
comp = urlsplit(str(uri))
if comp.query:
_req.update(parse_qs(comp.query))
_query = str(_req.to_urlencoded())
path = urlunsplit(
(comp.scheme, comp.netloc, comp.path, _query, comp.fragment)
)
else:
path = uri
body = None
elif method in ["POST", "PUT"]:
path = uri
if content_type == URL_ENCODED:
body = req.to_urlencoded()
elif content_type == JSON_ENCODED:
body = req.to_json()
else:
raise UnSupported("Unsupported content type: '%s'" % content_type)
header_ext = {"Content-Type": content_type}
if accept:
header_ext = {"Accept": accept}
if "headers" in kwargs.keys():
kwargs["headers"].update(header_ext)
else:
kwargs["headers"] = header_ext
else:
raise UnSupported("Unsupported HTTP method: '%s'" % method)
return path, body, kwargs
def set_cookie(cookiejar, kaka):
"""
Place a cookie (a http_cookielib.Cookie based on a set-cookie header line) in the cookie jar.
Always chose the shortest expires time.
:param cookiejar:
:param kaka: Cookie
"""
# default rfc2109=False
# max-age, httponly
for cookie_name, morsel in kaka.items():
std_attr = ATTRS.copy()
std_attr["name"] = cookie_name
_tmp = morsel.coded_value
if _tmp.startswith('"') and _tmp.endswith('"'):
std_attr["value"] = _tmp[1:-1]
else:
std_attr["value"] = _tmp
std_attr["version"] = 0
attr = ""
# copy attributes that have values
try:
for attr in morsel.keys():
if attr in ATTRS:
if morsel[attr]:
if attr == "expires":
std_attr[attr] = http2time(morsel[attr])
else:
std_attr[attr] = morsel[attr]
elif attr == "max-age":
if morsel[attr]:
std_attr["expires"] = http2time(morsel[attr])
except TimeFormatError:
# Ignore cookie
logger.info(
"Time format error on %s parameter in received cookie"
% (sanitize(attr),)
)
continue
for att, spec in PAIRS.items():
if std_attr[att]:
std_attr[spec] = True
if std_attr["domain"] and std_attr["domain"].startswith("."):
std_attr["domain_initial_dot"] = True
if morsel["max-age"] == 0:
try:
cookiejar.clear(
domain=std_attr["domain"],
path=std_attr["path"],
name=std_attr["name"],
)
except ValueError:
pass
else:
# Fix for Microsoft cookie error
if "version" in std_attr:
try:
std_attr["version"] = std_attr["version"].split(",")[0]
except (TypeError, AttributeError):
pass
new_cookie = http_cookiejar.Cookie(**std_attr) # type: ignore
cookiejar.set_cookie(new_cookie)
def match_to_(val, vlist):
if isinstance(vlist, str):
if vlist.startswith(val):
return True
else:
for v in vlist:
if v.startswith(val):
return True
return False
def verify_header(reqresp, body_type):
logger.debug("resp.headers: %s" % (sanitize(reqresp.headers),))
logger.debug("resp.txt: %s" % (sanitize(reqresp.text),))
if body_type == "":
_ctype = reqresp.headers["content-type"]
if match_to_("application/json", _ctype):
body_type = "json"
elif match_to_("application/jwt", _ctype):
body_type = "jwt"
elif match_to_(URL_ENCODED, _ctype):
body_type = "urlencoded"
else:
body_type = "txt" # reasonable default ??
elif body_type == "json":
if not match_to_("application/json", reqresp.headers["content-type"]):
if match_to_("application/jwt", reqresp.headers["content-type"]):
body_type = "jwt"
else:
raise ValueError(
"content-type: %s" % (reqresp.headers["content-type"],)
)
elif body_type == "jwt":
if not match_to_("application/jwt", reqresp.headers["content-type"]):
raise ValueError(
"Wrong content-type in header, got: {} expected "
"'application/jwt'".format(reqresp.headers["content-type"])
)
elif body_type == "urlencoded":
if not match_to_(DEFAULT_POST_CONTENT_TYPE, reqresp.headers["content-type"]):
if not match_to_("text/plain", reqresp.headers["content-type"]):
raise ValueError("Wrong content-type")
else:
raise ValueError("Unknown return format: %s" % body_type)
return body_type
| 30.123288
| 97
| 0.555404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,676
| 0.254055
|
1853550d01976a79c3f2f5631cb3c4c7ae9f5fcf
| 5,890
|
py
|
Python
|
main.py
|
aditya02acharya/TypingAgent
|
34c5230be72c3878942457a6e44b7078fbd08ea0
|
[
"MIT"
] | 5
|
2020-09-07T16:40:34.000Z
|
2022-01-18T15:50:57.000Z
|
main.py
|
aditya02acharya/TypingAgent
|
34c5230be72c3878942457a6e44b7078fbd08ea0
|
[
"MIT"
] | 1
|
2020-10-06T13:14:46.000Z
|
2020-10-06T13:14:46.000Z
|
main.py
|
aditya02acharya/TypingAgent
|
34c5230be72c3878942457a6e44b7078fbd08ea0
|
[
"MIT"
] | null | null | null |
import sys
import yaml
import numpy
import random
import logging
import argparse
from os import path, makedirs
from datetime import datetime
from src.finger_proxy.proxy_agent import ProxyAgent
from src.utilities.logging_config_manager import setup_logging
from src.display.touchscreendevice import TouchScreenDevice
from src.vision.vision_agent import VisionAgent
from src.finger.finger_agent import FingerAgent
from src.proofread.proofread_agent import ProofreadAgent
from src.supervisor.supervisor_agent import SupervisorAgent
parser = argparse.ArgumentParser()
# General parameters
parser.add_argument("--all", action="store_true", default=False,
help="train/test all the agents [vision, finger, proofread, supervisor]")
parser.add_argument("--vision", action="store_true", default=False, help="train/test only the vision agent")
parser.add_argument("--finger", action="store_true", default=False, help="train/test only the finger agent")
parser.add_argument("--proofread", action="store_true", default=False, help="train/test only the proofread agent")
parser.add_argument("--supervisor", action="store_true", default=False, help="train/test only the supervisor agent")
parser.add_argument("--train", action="store_true", default=False, help="run model in train mode")
parser.add_argument("--config", required=True, help="name of the configuration file (REQUIRED)")
parser.add_argument("--seed", type=int, default=datetime.now().microsecond, help="random seed default: current time")
parser.add_argument("--type", default=">", help="sentence to type for the agent.")
parser.add_argument("--batch", action="store_true", default=False, help="evaluate a batch of sentences.")
parser.add_argument("--users", type=int, default=1, help="number of users to simulate")
parser.add_argument("--twofinger", action="store_true", default=False, help="enable typing with two finger.")
parser.add_argument("--verbose", action="store_true", default=False, help="print tqdm step in new line.")
# get user command line arguments.
args = parser.parse_args()
# Initialise random seed.
numpy.random.seed(args.seed)
random.seed(args.seed)
# Setup Logger.
if not path.isdir("logs"):
# if logs folder doesn't exist create one.
makedirs("logs")
setup_logging(default_path=path.join("configs", "logging.yml"))
logger = logging.getLogger(__name__)
logger.info("logger is set.")
# load app config.
if path.exists(path.join("configs", args.config)):
with open(path.join("configs", args.config), 'r') as file:
config_file = yaml.load(file, Loader=yaml.FullLoader)
logger.info("App Configurations loaded.")
else:
logger.error("File doesn't exist: Failed to load %s file under configs folder." % str(args.config))
sys.exit(0)
if args.train:
if path.exists(path.join("configs", config_file['training_config'])):
with open(path.join("configs", config_file['training_config']), 'r') as file:
train_config = yaml.load(file, Loader=yaml.FullLoader)
logger.info("Training Configurations loaded.")
else:
logger.error("File doesn't exist: Failed to load %s file under configs folder." %
config_file['training_config'])
sys.exit(0)
if args.vision or args.all:
logger.info("Initiating Vision Agent Training.")
vision_agent = VisionAgent(config_file['device_config'], train_config['vision'], args.verbose)
vision_agent.train(vision_agent.episodes)
if args.finger or args.all:
logger.info("Initiating Finger Agent Training.")
finger_agent = FingerAgent(config_file['device_config'], train_config['finger'], 0, True, args.verbose)
finger_agent.train(finger_agent.episodes)
if args.proofread or args.all:
logger.info("Initiating Proofread Agent Training.")
proofread_agent = ProofreadAgent(config_file['device_config'], train_config['proofread'], args.verbose)
proofread_agent.train(proofread_agent.episodes)
if args.supervisor or args.all:
logger.info("Initiating Supervisor Agent Training.")
if args.twofinger:
supervisor_agent = SupervisorAgent(config_file['device_config'], train_config, True, True, args.verbose)
else:
supervisor_agent = SupervisorAgent(config_file['device_config'], train_config, True, False, args.verbose)
print(type(supervisor_agent.episodes))
supervisor_agent.train(supervisor_agent.episodes)
else:
if path.exists(path.join("configs", config_file['testing_config'])):
with open(path.join("configs", config_file['testing_config']), 'r') as file:
test_config = yaml.load(file, Loader=yaml.FullLoader)
logger.info("Training Configurations loaded.")
else:
logger.error("File doesn't exist: Failed to load %s file under configs folder." %
config_file['testing_config'])
sys.exit(0)
if args.vision or args.all:
logger.info("Initiating Vision Agent Evaluation.")
vision_agent = VisionAgent(config_file['device_config'], test_config['vision'])
vision_agent.evaluate(args.type)
if args.finger or args.all:
logger.info("Initiating Finger Agent Evaluation.")
finger_agent = FingerAgent(config_file['device_config'], test_config['finger'], 0, False)
finger_agent.evaluate(args.type, sat_desired=test_config['finger']['typing_accuracy'])
if args.supervisor or args.all:
logger.info("Initiating Supervisor Agent Evaluation.")
if args.twofinger:
supervisor_agent = SupervisorAgent(config_file['device_config'], test_config, False, True, args.verbose)
else:
supervisor_agent = SupervisorAgent(config_file['device_config'], test_config, False, False, args.verbose)
supervisor_agent.evaluate(args.type, args.batch, args.users)
| 47.5
| 117
| 0.720204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,834
| 0.311375
|
185491bbcdadc1f460e3cbb3e31ce90f8c3eb65e
| 1,854
|
py
|
Python
|
examples/chain.py
|
yeeliu01/pyrfa
|
536c94f1bcff232415495cbe04b8897ad91e0c76
|
[
"MIT"
] | 33
|
2016-11-29T08:18:28.000Z
|
2021-11-11T15:40:19.000Z
|
examples/chain.py
|
yeeliu01/pyrfa
|
536c94f1bcff232415495cbe04b8897ad91e0c76
|
[
"MIT"
] | 41
|
2016-09-20T10:15:11.000Z
|
2021-10-20T01:14:22.000Z
|
examples/chain.py
|
devcartel/thomsonreuters
|
536c94f1bcff232415495cbe04b8897ad91e0c76
|
[
"MIT"
] | 9
|
2016-10-19T00:09:22.000Z
|
2020-08-03T03:02:15.000Z
|
#!/usr/bin/python
#
# Decoding a legacy chain ric
#
import pyrfa
p = pyrfa.Pyrfa()
p.createConfigDb("./pyrfa.cfg")
p.acquireSession("Session1")
p.createOMMConsumer()
p.login()
p.directoryRequest()
p.dictionaryRequest()
p.setInteractionType("snapshot")
def snapshotRequest(chainRIC):
p.marketPriceRequest(chainRIC)
snapshots = p.dispatchEventQueue(1000)
if snapshots:
for snapshot in snapshots:
if snapshot['MTYPE'] == 'IMAGE':
return snapshot
return ()
fids = ['LINK_1', 'LINK_2', 'LINK_3', 'LINK_4', 'LINK_5', 'LINK_6', 'LINK_7', 'LINK_8',
'LINK_9', 'LINK_10', 'LINK_11', 'LINK_12', 'LINK_13', 'LINK_14',
'LONGLINK1', 'LONGLINK2', 'LONGLINK3', 'LONGLINK4', 'LONGLINK5', 'LONGLINK6', 'LONGLINK7',
'LONGLINK8', 'LONGLINK9', 'LONGLINK10', 'LONGLINK11', 'LONGLINK12', 'LONGLINK13', 'LONGLINK14',
'BR_LINK1', 'BR_LINK2', 'BR_LINK3', 'BR_LINK4', 'BR_LINK5', 'BR_LINK6', 'BR_LINK7', 'BR_LINK8',
'BR_LINK9', 'BR_LINK10', 'BR_LINK11', 'BR_LINK12', 'BR_LINK13', 'BR_LINK14']
def expandChainRIC(chainRIC):
expanded = []
done = False
snapshot = snapshotRequest(chainRIC)
while not done:
if not snapshot:
break
for fid in fids:
if snapshot.has_key(fid) and snapshot[fid]:
expanded.append(snapshot[fid])
if snapshot.has_key('NEXT_LR') and snapshot['NEXT_LR']:
snapshot = snapshotRequest(snapshot['NEXT_LR'])
elif snapshot.has_key('LONGNEXTLR') and snapshot['LONGNEXTLR']:
snapshot = snapshotRequest(snapshot['LONGNEXTLR'])
elif snapshot.has_key('BR_NEXTLR') and snapshot['BR_NEXTLR']:
snapshot = snapshotRequest(snapshot['BR_NEXTLR'])
else:
done = True
return expanded
rics = expandChainRIC("0#.FTSE")
print(rics)
| 34.981132
| 103
| 0.635922
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 621
| 0.334951
|
185637d8cc3eb01cc46a55e5e9f5b84f8e7f9e79
| 1,746
|
py
|
Python
|
hard-gists/749857/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 21
|
2019-07-08T08:26:45.000Z
|
2022-01-24T23:53:25.000Z
|
hard-gists/749857/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 5
|
2019-06-15T14:47:47.000Z
|
2022-02-26T05:02:56.000Z
|
hard-gists/749857/snippet.py
|
jjhenkel/dockerizeme
|
eaa4fe5366f6b9adf74399eab01c712cacaeb279
|
[
"Apache-2.0"
] | 17
|
2019-05-16T03:50:34.000Z
|
2021-01-14T14:35:12.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# launchctl unload /System/Library/LaunchDaemons/com.apple.syslogd.plist
# launchctl load /System/Library/LaunchDaemons/com.apple.syslogd.plist
from twisted.internet import reactor, stdio, defer
from twisted.internet.protocol import Protocol, Factory
from twisted.protocols.basic import LineReceiver
import time, re, math, json
#<22>Nov 1 00:12:04 gleicon-vm1 postfix/smtpd[4880]: connect from localhost[127.0.0.1]
severity = ['emerg', 'alert', 'crit', 'err', 'warn', 'notice', 'info', 'debug', ]
facility = ['kern', 'user', 'mail', 'daemon', 'auth', 'syslog', 'lpr', 'news',
'uucp', 'cron', 'authpriv', 'ftp', 'ntp', 'audit', 'alert', 'at', 'local0',
'local1', 'local2', 'local3', 'local4', 'local5', 'local6', 'local7',]
fs_match = re.compile("<(.+)>(.*)", re.I)
class SyslogdProtocol(LineReceiver):
delimiter = '\n'
def connectionMade(self):
print 'Connection from %r' % self.transport
def lineReceived(self, line):
k = {}
k['line'] = line.strip()
(fac, sev) = self._calc_lvl(k['line'])
k['host'] = self.transport.getHost().host
k['tstamp'] = time.time()
k['facility'] = fac
k['severity'] = sev
print json.dumps(k)
def _calc_lvl(self, line):
lvl = fs_match.split(line)
if lvl and len(lvl) > 1:
i = int(lvl[1])
fac = int(math.floor(i / 8))
sev = i - (fac * 8)
return (facility[fac], severity[sev])
return (None, None)
class SyslogdFactory(Factory):
protocol = SyslogdProtocol
def main():
factory = SyslogdFactory()
reactor.listenTCP(25000, factory, 10)
reactor.run()
if __name__ == '__main__':
main()
| 31.178571
| 87
| 0.605956
| 778
| 0.44559
| 0
| 0
| 0
| 0
| 0
| 0
| 578
| 0.331042
|
185646f6d47cb9be2bd7e09abafec85a18497f07
| 11,371
|
py
|
Python
|
research/Issue2/utils.py
|
johnklee/ff_crawler
|
53b056bd94ccf55388d12c7f70460d280964f45f
|
[
"MIT"
] | null | null | null |
research/Issue2/utils.py
|
johnklee/ff_crawler
|
53b056bd94ccf55388d12c7f70460d280964f45f
|
[
"MIT"
] | 4
|
2021-04-09T02:05:42.000Z
|
2021-07-04T07:42:15.000Z
|
research/Issue2/utils.py
|
johnklee/ff_crawler
|
53b056bd94ccf55388d12c7f70460d280964f45f
|
[
"MIT"
] | null | null | null |
import requests as reqlib
import os
import re
import random
import time
import pickle
import abc
import hashlib
import threading
from urllib.parse import urlparse
from purifier import TEAgent
from purifier.logb import getLogger
from enum import IntEnum
from typing import Tuple, List, Dict, Optional
class ScraperTimeout(Exception):
def __init__(self, ex):
self.ex = ex
def __str__(self):
return f"Timeout: {self.ex}"
class ScraperNot200(Exception):
def __init__(self, sc):
self.sc = sc
def __str__(self):
return f"Unexpected Status Code={self.sc}!"
class UnsupportedMIME(Exception):
def __init__(self, mime):
self.mime = mime
def __str__(self):
return f"Unsupported MIME={self.mime}!"
class Scraper(metaclass=abc.ABCMeta):
@abc.abstractmethod
def get(self, url):
pass
class ReqScraper(object):
def __init__(self,
page_cache_path="page_caches",
headers={'User-Agent': 'Mozilla/5.0'},
skip_cache=False,
supported_mime_set={"text/html"}):
self.page_cache_path = page_cache_path
if not os.path.isdir(self.page_cache_path):
os.makedirs(self.page_cache_path)
self.headers = headers
self.logger = getLogger(os.path.basename(self.__class__.__name__))
self.skip_cache = skip_cache
self.supported_mime_set = supported_mime_set
def _get_cache_path(self, url):
test_url_host = urlparse(url).netloc
url_md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
cache_file_name = f"{test_url_host}_{url_md5}.txt"
cache_file_path = os.path.join(self.page_cache_path, cache_file_name)
return cache_file_path
def _del_from_cache(self, url):
cache_file_path = self._get_cache_path(url)
if os.path.isfile(cache_file_path):
self.logger.warning("Removing cache file={cache_file_path}...")
os.remove(cache_file_path)
def _get_from_cache(self, url):
cache_file_path = self._get_cache_path(url)
if os.path.isfile(cache_file_path):
self.logger.debug(f"Return content of {url} from cache...")
with open(cache_file_path, 'r', encoding='utf8') as fo:
return fo.read()
return None
def _save2cache(self, url, html_content):
cache_file_path = self._get_cache_path(url)
with open(cache_file_path, 'w', encoding='utf8') as fw:
fw.write(html_content)
def get(self, url):
if not self.skip_cache:
cache_text = self._get_from_cache(url)
if cache_text is not None:
return cache_text
self.logger.debug(f"Crawling {url}...")
try:
resp = reqlib.get(url, headers=self.headers, timeout=(5, 10))
if resp.ok:
mime = resp.headers['content-type'].split(';')[0].strip()
self.logger.debug(f"URL={url} with MIME={mime}...")
if mime.lower() not in self.supported_mime_set:
raise UnsupportedMIME(mime)
self._save2cache(url, resp.text)
return resp.text
else:
raise ScraperNot200(resp.status_code)
except Exception as e:
raise ScraperTimeout(e)
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
class ThreadState(IntEnum):
STOPPED = 0
RUNNING = 1
STOPPING = 2
class CrawlAgent(object):
def __init__(self, name, throttling_range=(1, 2)):
self.rs = ReqScraper(page_cache_path=f"{name}_cache")
self.et = TEAgent(
policy_path="policy",
disable_policy=True,
ext_title=True
)
self.logger = getLogger(os.path.basename(self.__class__.__name__))
self.throttling_range = throttling_range
def obsolete_cache(self, url):
self.rs._del_from_cache(url)
def handle(self, url:str, skip_throttling:bool=False) -> Tuple[str, str, List[str]]:
try:
if skip_throttling:
wait_in_sec = random.uniform(*self.throttling_range)
self.logger.debug(f"throttling wait {wait_in_sec}s...")
time.sleep(wait_in_sec)
url_content_html = self.rs.get(url)
is_succ, rst, handler = self.et.parse(
"text/html",
url,
url_content_html,
do_ext_link=True
)
if is_succ:
return (rst['title'], rst['text'], rst['all_links'])
else:
return (rst['title'], rst['text'], rst['all_links'])
except ScraperNot200 as e:
self.logger.warning(f"Fail to handle URL={url}: {str(e)}")
return None, None, None
except UnsupportedMIME as e:
self.logger.warning(f"Fail to handle URL={url}: {str(e)}")
return None, None, None
except ScraperTimeout as e:
time.sleep(2)
self.logger.warning(f"Fail to handle URL={url}: {str(e)}")
return None, None, None
class ExplorerWorker(threading.Thread):
def __init__(
self,
name:str,
url_ptn:str,
src_url:str,
test_run:int=-1,
page_saved_dir:Optional[str]=None):
super(ExplorerWorker, self ).__init__(name = name)
self.name = name
self.url_ptn = url_ptn
self.src_url = src_url
self.test_run = test_run
self.ca = CrawlAgent(name)
self.pc_dict = self._get_pc_dict()
''' Processed result cache: Key as URL; value as bool (True means this URL is crawled successfully)'''
self.state = ThreadState.STOPPED
''' Thread state: 0-> stopped; 1-> running; 2-> stopping'''
self.logger = getLogger(os.path.basename(self.__class__.__name__))
''' Logger object '''
self.page_saved_dir = page_saved_dir if page_saved_dir is not None else f"{self.name}_pages_output"
''' Path or directory to save dump page'''
self.stop_signal = f"STOP_{self.name}"
''' Stop signal file '''
if not os.path.isdir(self.page_saved_dir):
os.makedirs(self.page_saved_dir)
def _get_output_page_path(self, url):
url_host = urlparse(url).netloc
url_md5 = hashlib.md5(url.encode('utf-8')).hexdigest()
page_file_name = f"{url_host}_{url_md5}.txt"
page_file_path = os.path.join(self.page_saved_dir, page_file_name)
return page_file_path
def _get_pc_serialized_file(self) -> str:
return f"{self.name}_pc_dict.pkl"
def _get_pc_dict(self) -> Dict[str, bool]:
pkl_file = self._get_pc_serialized_file()
if os.path.isfile(pkl_file):
with open(pkl_file, 'rb') as fo:
return pickle.load(fo)
else:
return {}
def _serialized(self):
pkl_file = self._get_pc_serialized_file()
with open(pkl_file, 'wb') as fo:
pickle.dump(self.pc_dict, fo)
def run(self):
self.state = ThreadState.RUNNING
url_queue = [self.src_url]
pc = sc = fc = oc = 0
while self.state == ThreadState.RUNNING and url_queue:
if os.path.isfile(self.stop_signal):
os.remove(self.stop_signal)
self.logger.warning("Receive STOP signal!")
break
url = url_queue.pop(0)
pc += 1
if url not in self.pc_dict:
# New URL
self.logger.debug(f"Handling URL={url}...")
title, content, collected_urls = self.ca.handle(url)
if content is None:
self.pc_dict[url] = False
fc += 1
else:
if url != self.src_url:
self.pc_dict[url] = True
sc += 1
self.logger.info(bcolors.BOLD + f"Completed URL={url} ({len(url_queue):,d}/{pc:,d})" + bcolors.ENDC)
next_level_urls = list(filter(lambda u: re.match(self.url_ptn, u) is not None and "#" not in u, collected_urls))
if next_level_urls:
self.logger.debug(f"\tCollected {len(next_level_urls)} next level URL(s)")
url_queue.extend(list(set(next_level_urls) - set(url_queue)))
if content and "?" not in url:
page_output_path = self._get_output_page_path(url)
with open(page_output_path, 'w', encoding='utf8') as fw:
fw.write(f"{url}\n\n")
fw.write(f"{title}\n\n")
fw.write(f"{content}")
self.logger.debug(f"\tSaved page to {page_output_path}!")
else:
# Old URL
if not self.pc_dict[url]:
self.logger.info(f"Skip broken URL={url} in the past...")
continue
title, content, collected_urls = self.ca.handle(url, skip_throttling=True)
if collected_urls:
next_level_urls = list(filter(lambda u: re.match(self.url_ptn, u) is not None, collected_urls))
url_queue.extend(list(set(next_level_urls) - set(url_queue)))
oc += 1
self.logger.info(f"URL={url} is already handled...({len(url_queue):,d}/{pc:,d})")
continue
if self.test_run > 0:
if (sc + fc) > self.test_run:
self.logger.info(f"Exceed test_run={self.test_run} and therefore stop running...")
break
if pc % 1000 == 0:
self.logger.info(bcolors.OKBLUE + bcolors.BOLD + f"{pc} URL completed: sc={sc:,d}; fc={fc:,d}; oc={oc:,d}\n" + bcolors.ENDC)
self._serialized()
self.ca.obsolete_cache(self.src_url)
url_queue.append(self.src_url)
self.logger.warning(f"Serialized explorer result (name={self.name})...")
self._serialized()
self.logger.warning(f"Explorer is stopped! (name={self.name})...")
self.state = ThreadState.STOPPED
def stop(self):
self.logger.warning(f"Stopping explorer worker (name={self.name})...")
if self.state == ThreadState.RUNNING:
self.state = ThreadState.STOPPING
while self.state != ThreadState.STOPPED:
time.sleep(1)
| 38.157718
| 141
| 0.540322
| 10,969
| 0.964647
| 0
| 0
| 58
| 0.005101
| 0
| 0
| 1,658
| 0.14581
|
1856d318d47ce3e4786a9a38b7674ba6814094a5
| 1,554
|
py
|
Python
|
Python-CPU/monitor.py
|
cwd0204/Python
|
35413d0cfab0d659d710fd3f752dacef00f4a713
|
[
"MIT"
] | 1
|
2022-01-05T05:49:59.000Z
|
2022-01-05T05:49:59.000Z
|
Python-CPU/monitor.py
|
cwd0204/Python
|
35413d0cfab0d659d710fd3f752dacef00f4a713
|
[
"MIT"
] | null | null | null |
Python-CPU/monitor.py
|
cwd0204/Python
|
35413d0cfab0d659d710fd3f752dacef00f4a713
|
[
"MIT"
] | null | null | null |
# CPU实时监控
# 作者:Charles
# 公众号:Charles的皮卡丘
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
import psutil as p
POINTS = 300
fig, ax = plt.subplots()
ax.set_ylim([0, 100])
ax.set_xlim([0, POINTS])
ax.set_autoscale_on(False)
ax.set_xticks([])
ax.set_yticks(range(0, 101, 10))
ax.grid(True)
# 执行用户进程的时间百分比
user = [None] * POINTS
# 执行内核进程和中断的时间百分比
sys = [None] * POINTS
# CPU处于空闲状态的时间百分比
idle = [None] * POINTS
l_user, = ax.plot(range(POINTS), user, label='User %')
l_sys, = ax.plot(range(POINTS), sys, label='Sys %')
l_idle, = ax.plot(range(POINTS), idle, label='Idle %')
ax.legend(loc='upper center', ncol=4, prop=font_manager.FontProperties(size=10))
bg = fig.canvas.copy_from_bbox(ax.bbox)
def cpu_usage():
t = p.cpu_times()
return [t.user, t.system, t.idle]
before = cpu_usage()
def get_cpu_usage():
global before
now = cpu_usage()
delta = [now[i] - before[i] for i in range(len(now))]
total = sum(delta)
before = now
return [(100.0*dt)/(total+0.1) for dt in delta]
def OnTimer(ax):
global user, sys, idle, bg
tmp = get_cpu_usage()
user = user[1:] + [tmp[0]]
sys = sys[1:] + [tmp[1]]
idle = idle[1:] + [tmp[2]]
l_user.set_ydata(user)
l_sys.set_ydata(sys)
l_idle.set_ydata(idle)
while True:
try:
ax.draw_artist(l_user)
ax.draw_artist(l_sys)
ax.draw_artist(l_idle)
break
except:
pass
ax.figure.canvas.draw()
def start_monitor():
timer = fig.canvas.new_timer(interval=100)
timer.add_callback(OnTimer, ax)
timer.start()
plt.show()
if __name__ == '__main__':
start_monitor()
| 20.72
| 80
| 0.689189
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 241
| 0.145006
|
185a8ab47b8d277c20020394a96aac3365fae3e8
| 8,128
|
py
|
Python
|
leaderboards/api_views.py
|
bfrederix/django-improv
|
23ae4b2cc3b7d38aa2a4d6872ea084247a1e34f6
|
[
"Apache-2.0"
] | 1
|
2020-08-07T18:46:19.000Z
|
2020-08-07T18:46:19.000Z
|
leaderboards/api_views.py
|
bfrederix/django-improv
|
23ae4b2cc3b7d38aa2a4d6872ea084247a1e34f6
|
[
"Apache-2.0"
] | null | null | null |
leaderboards/api_views.py
|
bfrederix/django-improv
|
23ae4b2cc3b7d38aa2a4d6872ea084247a1e34f6
|
[
"Apache-2.0"
] | null | null | null |
import datetime
from rest_framework import viewsets
from rest_framework.response import Response
from leaderboards import LEADERBOARD_MAX_PER_PAGE
from leaderboards.models import LeaderboardEntry, Medal, LeaderboardSpan
from leaderboards.serializers import (LeaderboardEntrySerializer, MedalSerializer,
LeaderboardSerializer, LeaderboardSpanSerializer,
LeaderboardEntrySpanSerializer)
from leaderboards import service as leaderboards_service
from users import service as users_service
from channels import service as channels_service
from shows import service as shows_service
from utilities.api import APIObject
class LeaderboardEntryAPIObject(APIObject):
field_list = ['id',
'user_id',
'points',
'wins']
def __init__(self, leaderboard_entry, **kwargs):
super(LeaderboardEntryAPIObject, self).__init__(leaderboard_entry, **kwargs)
self.show = leaderboard_entry.show_id
self.channel_name = leaderboard_entry.channel.name
self.medals = leaderboards_service.fetch_medal_ids_by_leaderboard_entry(leaderboard_entry.id)
# If a user is attached to the entry
if leaderboard_entry.user_id:
# Get their username
user_profile = users_service.fetch_user_profile(leaderboard_entry.user_id)
self.username = user_profile.safe_username
# Get the number of suggestions submitted
self.suggestions = shows_service.fetch_suggestions(user_id=leaderboard_entry.user_id,
show_id=leaderboard_entry.show_id,
count=True)
class LeaderboardEntrySpanAPIObject(APIObject):
def __init__(self, leaderboard_span_entry, **kwargs):
super(LeaderboardEntrySpanAPIObject, self).__init__(leaderboard_span_entry, **kwargs)
self.channel_name = kwargs.get('channel_name')
self.user_id = leaderboard_span_entry['user_id']
user_profile = users_service.fetch_user_profile(self.user_id)
self.username = user_profile.safe_username
self.points = leaderboard_span_entry['points']
self.show_wins = leaderboard_span_entry['show_wins']
self.suggestion_wins = leaderboard_span_entry['suggestion_wins']
class LeaderboardAPIObject(APIObject):
field_list = ['id',
'points',
'suggestion_wins',
'show_wins']
def __init__(self, channel_user, **kwargs):
super(LeaderboardAPIObject, self).__init__(channel_user, **kwargs)
self.channel_name = kwargs.get('channel_name')
user_profile = users_service.fetch_user_profile(channel_user.user_id)
self.user_id = channel_user.user_id
self.username = user_profile.safe_username
class LeaderboardEntryViewSet(viewsets.ViewSet):
"""
API endpoint that returns leaderboard entries
"""
def list(self, request):
kwargs = {}
user_id = self.request.query_params.get('user_id')
channel_id = self.request.query_params.get('channel_id')
show_id = self.request.query_params.get('show_id')
limit = self.request.query_params.get('limit')
# Pagination
page = int(self.request.query_params.get('page', 1))
offset = LEADERBOARD_MAX_PER_PAGE * (page - 1)
order_by_show_date = self.request.query_params.get('order_by_show_date')
if user_id:
kwargs['user'] = user_id
if channel_id:
kwargs['channel'] = channel_id
if show_id:
kwargs['show'] = show_id
# Make sure we exclude any entries that don't have users attached
queryset = LeaderboardEntry.objects.filter(**kwargs).exclude(user=None)
# If we are ordering by when the show happened
if order_by_show_date is not None:
queryset = queryset.order_by('-show_date')
# Order by suggestion wins, then points for a show or span
if show_id:
queryset = queryset.order_by('-wins', '-points')
# If there is a limit to the results returned
if limit:
queryset = queryset[:int(limit)]
# Start from the page offset
try:
queryset = queryset[offset:offset+LEADERBOARD_MAX_PER_PAGE]
except IndexError:
try:
queryset = queryset[offset:]
except IndexError:
pass
leaderboard_entry_list = [LeaderboardEntryAPIObject(item) for item in queryset]
serializer = LeaderboardEntrySerializer(leaderboard_entry_list, many=True)
return Response(serializer.data)
class LeaderboardEntrySpanViewSet(viewsets.ViewSet):
"""
API endpoint that returns leaderboard entries by span dates
"""
def list(self, request):
kwargs = {}
kwargs['channel'] = self.request.query_params.get('channel_id')
channel = channels_service.channel_or_404(kwargs['channel'], channel_id=True)
start = self.request.query_params.get('start')
end = self.request.query_params.get('end')
# Convert start and end to datetimes
start_time = datetime.datetime.strptime(start, "%Y%m%d")
end_time = datetime.datetime.strptime(end, "%Y%m%d")
# Add them to the queryset params
kwargs['show_date__gte'] = start_time
kwargs['show_date__lte'] = end_time
# Make sure we exclude any entries that don't have users attached
queryset = LeaderboardEntry.objects.filter(**kwargs).exclude(user=None)
# Aggregate all the leaderboard entries by the user
leaderboard_aggregate = leaderboards_service.aggregate_leaderboard_entries_by_user(
queryset)
api_kwargs = {'channel_name': channel.name}
# Make an api object out of the user aggregates
leaderboard_entry_list = [LeaderboardEntrySpanAPIObject(item, **api_kwargs) for item in leaderboard_aggregate]
serializer = LeaderboardEntrySpanSerializer(leaderboard_entry_list, many=True)
return Response(serializer.data)
class LeaderboardViewSet(viewsets.ViewSet):
"""
API endpoint for show, channel, or combined leaderboards
"""
def list(self, request):
channel_id = self.request.query_params.get('channel_id')
page = int(self.request.query_params.get('page', 1))
offset = LEADERBOARD_MAX_PER_PAGE * (page - 1)
queryset = channels_service.fetch_channel_users(channel_id,
leaderboard_sort=True)
# Start from the page offset
try:
queryset = queryset[offset:offset+LEADERBOARD_MAX_PER_PAGE]
except IndexError:
try:
queryset = queryset[offset:]
except IndexError:
pass
leaderboard_list = [LeaderboardAPIObject(item) for item in queryset]
serializer = LeaderboardSerializer(leaderboard_list, many=True)
return Response(serializer.data)
class LeaderboardSpanViewSet(viewsets.ViewSet):
"""
API endpoint that allows leaderboard spans to be viewed
"""
def retrieve(self, request, pk=None):
leaderboard_span = leaderboards_service.leaderboard_span_or_404(pk)
serializer = LeaderboardSpanSerializer(leaderboard_span)
return Response(serializer.data)
def list(self, request):
"""
This view should return a list of all leaderboard spans
"""
queryset = LeaderboardSpan.objects.all()
channel_id = self.request.query_params.get('channel_id')
if channel_id:
queryset = queryset.filter(channel=channel_id)
serializer = LeaderboardSpanSerializer(queryset, many=True)
return Response(serializer.data)
class MedalViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows medals to be viewed
"""
model = Medal
serializer_class = MedalSerializer
queryset = Medal.objects.all()
| 42.333333
| 118
| 0.664001
| 7,415
| 0.912279
| 0
| 0
| 0
| 0
| 0
| 0
| 1,415
| 0.17409
|
185ab66623ac277ebae7a53438dfbee88f107a07
| 4,450
|
py
|
Python
|
pyaz/sql/instance_pool/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/sql/instance_pool/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/sql/instance_pool/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from ... pyaz_utils import _call_az
def show(name, resource_group):
'''
Get the details for an instance pool.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql instance-pool show", locals())
def list(resource_group=None):
'''
List available instance pools.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql instance-pool list", locals())
def update(name, resource_group, add=None, force_string=None, remove=None, set=None, tags=None):
'''
Update an instance pool.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string>
- force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON.
- remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove
- set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value>
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az sql instance-pool update", locals())
def delete(name, resource_group, no_wait=None, yes=None):
'''
Delete an instance pool.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
- yes -- Do not prompt for confirmation.
'''
return _call_az("az sql instance-pool delete", locals())
def create(capacity, family, location, name, resource_group, subnet, tier, license_type=None, no_wait=None, tags=None, vnet_name=None):
'''
Create an instance pool.
Required Parameters:
- capacity -- Capacity of the instance pool in vcores.
- family -- The compute generation component of the sku. Allowed value: Gen5
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- subnet -- Name or ID of the subnet that allows access to an Instance Pool. If subnet name is provided, --vnet-name must be provided.
- tier -- The edition component of the sku. Allowed value: GeneralPurpose.
Optional Parameters:
- license_type -- The license type to apply for this instance pool.
- no_wait -- Do not wait for the long-running operation to finish.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
- vnet_name -- The virtual network name
'''
return _call_az("az sql instance-pool create", locals())
def wait(name, resource_group, created=None, custom=None, deleted=None, exists=None, interval=None, timeout=None, updated=None):
'''
Wait for an instance pool to reach a desired state.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- created -- wait until created with 'provisioningState' at 'Succeeded'
- custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running']
- deleted -- wait until deleted
- exists -- wait until the resource exists
- interval -- polling interval in seconds
- timeout -- maximum wait in seconds
- updated -- wait until updated with provisioningState at 'Succeeded'
'''
return _call_az("az sql instance-pool wait", locals())
| 45.408163
| 164
| 0.702472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,697
| 0.830787
|
185b8c2212dd3b144fbc0efeca4d07970b4b5805
| 316
|
py
|
Python
|
exercicios/ex090.py
|
Siqueira-Vinicius/Python
|
bd1f7e2bcdfd5481724d32db387f51636bb4ad60
|
[
"MIT"
] | null | null | null |
exercicios/ex090.py
|
Siqueira-Vinicius/Python
|
bd1f7e2bcdfd5481724d32db387f51636bb4ad60
|
[
"MIT"
] | null | null | null |
exercicios/ex090.py
|
Siqueira-Vinicius/Python
|
bd1f7e2bcdfd5481724d32db387f51636bb4ad60
|
[
"MIT"
] | null | null | null |
aluno = {}
aluno['nome'] = str(input('Digite o nome do aluno: '))
aluno['media'] = float(input('Digite a média desse aluno: '))
if aluno['media'] >= 5:
aluno['situação'] = '\033[32mAprovado\033[m'
else:
aluno['situação'] = '\033[31mReprovado\033[m'
for k, v in aluno.items():
print(f'{k} do aluno é {v}')
| 35.111111
| 61
| 0.617089
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 172
| 0.534161
|
185c355337e2e9938d29808ca0f7b31c79694a3f
| 813
|
py
|
Python
|
cntr_div_train_test_images.py
|
globalgood-ag/treecover
|
ecab0ac2cef622b5f72054d5a234237a34c0bd4d
|
[
"MIT"
] | null | null | null |
cntr_div_train_test_images.py
|
globalgood-ag/treecover
|
ecab0ac2cef622b5f72054d5a234237a34c0bd4d
|
[
"MIT"
] | null | null | null |
cntr_div_train_test_images.py
|
globalgood-ag/treecover
|
ecab0ac2cef622b5f72054d5a234237a34c0bd4d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 6 10:57:41 2019
Creates train and test splits at the IMAGE LEVEL to prep for thumbnail extraction in countr_cnn_1
@author: smcguire
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
## read dataframe of unique images with annotation info
df_unique = pd.read_pickle('./df_unique.pkl')
# create df_test from every 4th image
df_test = df_unique[df_unique.index % 4 == 0]
# create df_train_val from every image not in df_test
df_train_val = df_unique[df_unique.index % 4 != 0]
# reset indexes
df_test = df_test.reset_index(drop=True)
df_train_val = df_train_val.reset_index(drop=True)
# pickle dataframes
df_test.to_pickle('./df_test.pkl')
df_train_val.to_pickle('./df_train_val.pkl')
| 26.225806
| 98
| 0.710947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 427
| 0.525215
|
185c491ee371d020cd3b4bc449367e92f4f7af90
| 1,144
|
py
|
Python
|
SUAVE/SUAVE-2.5.0/trunk/SUAVE/Attributes/Propellants/Aviation_Gasoline.py
|
Vinicius-Tanigawa/Undergraduate-Research-Project
|
e92372f07882484b127d7affe305eeec2238b8a9
|
[
"MIT"
] | null | null | null |
SUAVE/SUAVE-2.5.0/trunk/SUAVE/Attributes/Propellants/Aviation_Gasoline.py
|
Vinicius-Tanigawa/Undergraduate-Research-Project
|
e92372f07882484b127d7affe305eeec2238b8a9
|
[
"MIT"
] | null | null | null |
SUAVE/SUAVE-2.5.0/trunk/SUAVE/Attributes/Propellants/Aviation_Gasoline.py
|
Vinicius-Tanigawa/Undergraduate-Research-Project
|
e92372f07882484b127d7affe305eeec2238b8a9
|
[
"MIT"
] | null | null | null |
## @ingroup Attributes-Propellants
# Aviation_Gasoline.py
#
# Created: Unk 2013, SUAVE TEAM
# Modified: Apr 2015, SUAVE TEAM
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
from .Propellant import Propellant
# ----------------------------------------------------------------------
# Aviation_Gasoline Propellant Class
# ----------------------------------------------------------------------
## @ingroup Attributes-Propellants
class Aviation_Gasoline(Propellant):
"""Contains density and specific energy values for this propellant
Assumptions:
None
Source:
None
"""
def __defaults__(self):
"""This sets the default values.
Assumptions:
None
Source:
Values commonly available
Inputs:
None
Outputs:
None
Properties Used:
None
"""
self.tag='Aviation Gasoline'
self.density = 721.0 # kg/m^3
self.specific_energy = 43.71e6 # J/kg
| 23.346939
| 72
| 0.436189
| 593
| 0.518357
| 0
| 0
| 0
| 0
| 0
| 0
| 885
| 0.773601
|
185eea51530d25c06bcb22494c22d6c4640df3ce
| 4,108
|
py
|
Python
|
write_grok/write_grok.py
|
namedyangfan/Python_practice
|
7f7394d82bb5afc13b039eec286b9485a775ae39
|
[
"MIT"
] | null | null | null |
write_grok/write_grok.py
|
namedyangfan/Python_practice
|
7f7394d82bb5afc13b039eec286b9485a775ae39
|
[
"MIT"
] | null | null | null |
write_grok/write_grok.py
|
namedyangfan/Python_practice
|
7f7394d82bb5afc13b039eec286b9485a775ae39
|
[
"MIT"
] | null | null | null |
import os, glob, shutil
class Write_grok():
def __init__(self, grok_name, grok_directory):
''' modify a given grok file'''
self.grok_directory = grok_directory
self.grok_path = os.path.join(grok_directory,grok_name)
if not os.path.exists(self.grok_path):
print("File not found: {0}".format(self.grok_path))
return None
self.backup_copy = self.grok_path + '.preunsat_original'
if not os.path.exists(self.backup_copy):
shutil.copy(self.grok_path, self.backup_copy)
shutil.move(self.grok_path, self.grok_path + '.backup')
def search_path(self, file_name, ldebug=False):
''' search for file name in the grok folder'''
filename= glob.glob(os.path.join(self.grok_directory,'**',file_name), recursive=True)
if not filename: raise IOError(file_name)
if len(filename)>1: print('Warning: more than one match was found. {}'.format(filename))
if ldebug: print (filename)
file_rela = (os.path.relpath(filename[0], self.grok_directory))
if ldebug: print (file_rela)
return(file_rela)
def add_gw_wells(self, inc_file_name, overwrite=False):
'''add include file below the ref_line'''
ref_line = 'Data Output'
# get the relative path of the include file
inc_file_path = self.search_path(inc_file_name)
with open(self.backup_copy) as fhand, open(self.grok_path, 'w') as fcopy:
line = fhand.readline()
while line:
if ref_line in line:
# locate where command line need to be inserted
output_flag=True
while output_flag:
if inc_file_name in line:
print('file {} is already included'.format(inc_file_name))
output_flag = False
elif 'Simulation Control Parameters' in line:
output_flag = False
fcopy.write('include .\{} \n'.format(inc_file_path))
fcopy.write(line)
line=fhand.readline()
fcopy.write(line)
line=fhand.readline()
if overwrite:
shutil.copy(self.grok_path, self.backup_copy)
def add_target_output_times(self,out_times,target_times=None,overwrite=False):
with open(self.backup_copy) as fhand, open(self.grok_path, 'w') as fcopy:
line = fhand.readline()
while line:
if line.strip().lower().startswith('output times'):
o_time_flag = True
fcopy.write(line)
for t in out_times:
fcopy.write('{0} \n'.format(t))
print('{0} \n'.format(t))
while o_time_flag:
if line.strip().lower().startswith('end'):
o_time_flag = False
fcopy.write(line)
line=fhand.readline()
fcopy.write(line)
line=fhand.readline()
if overwrite:
shutil.copy(self.grok_path, self.backup_copy)
with open(self.backup_copy) as fhand, open(self.grok_path, 'w') as fcopy:
line = fhand.readline()
while line:
if target_times:
if line.strip().lower().startswith('target times'):
tar_time_flag = True
fcopy.write(line)
for t in target_times: fcopy.write('{0} \n'.format(t))
while tar_time_flag:
if line.strip().lower().startswith('end'):
tar_time_flag = False
fcopy.write(line)
line=fhand.readline()
fcopy.write(line)
line=fhand.readline()
if overwrite:
shutil.copy(self.grok_path, self.backup_copy)
| 45.644444
| 96
| 0.529211
| 4,083
| 0.993914
| 0
| 0
| 0
| 0
| 0
| 0
| 467
| 0.113681
|
185f0bca3ed3085aa387bfdbe9104d5218249f4a
| 5,752
|
py
|
Python
|
src/tfi/publish.py
|
ajbouh/tfi
|
6e89e8c8f1ca3b285c788cc6b802fc44f9001290
|
[
"MIT"
] | 160
|
2017-09-13T00:32:05.000Z
|
2018-05-21T18:17:32.000Z
|
src/tfi/publish.py
|
tesserai/tfi
|
6e89e8c8f1ca3b285c788cc6b802fc44f9001290
|
[
"MIT"
] | 6
|
2017-09-14T17:54:21.000Z
|
2018-01-27T19:31:18.000Z
|
src/tfi/publish.py
|
ajbouh/tfi
|
6e89e8c8f1ca3b285c788cc6b802fc44f9001290
|
[
"MIT"
] | 11
|
2017-09-13T00:37:08.000Z
|
2018-03-05T08:03:34.000Z
|
import decimal
import hashlib
import json
import requests
import tempfile
import uuid
import os
from tqdm import tqdm
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor
def sha256_for_file(f, buf_size=65536):
pos = f.tell()
dgst = hashlib.sha256()
while True:
data = f.read(buf_size)
if not data:
break
dgst.update(data)
size = f.tell() - pos
f.seek(pos)
return size, dgst.hexdigest()
namespace = "default"
fission_url = os.environ["FISSION_URL"]
def post(rel_url, data):
response = requests.post(
"%s%s" % (fission_url, rel_url),
data=json.dumps(data),
headers={"Content-Type": "application/json"})
# print("POST", rel_url)
# print(response, response.text)
if response.status_code in [404, 409]:
return response.status_code, None
if response.status_code == 500:
raise Exception(response.text)
return response.status_code, response.json()
def get(rel_url, params=None):
response = requests.get(
"%s%s" % (fission_url, rel_url),
params=params)
if response.status_code == 404:
return response.status_code, None
if response.status_code == 500:
raise Exception(response.text)
return response.status_code, response.json()
def format_bytes(count):
label_ix = 0
labels = ["B", "KiB", "MiB", "GiB"]
while label_ix < len(labels) and count / 1024. > 1:
count = count / 1024.
label_ix += 1
count = decimal.Decimal(count)
count = count.to_integral() if count == count.to_integral() else round(count.normalize(), 2)
return "%s %s" % (count, labels[label_ix])
def lazily_define_package(environment, file):
filesize, archive_sha256 = sha256_for_file(file)
base_archive_url = "%s/proxy/storage/v1/archive" % fission_url
status_code, response = get("/v2/packages/%s" % archive_sha256)
if status_code == 200:
print("Already uploaded", flush=True)
return archive_sha256, response
progress = tqdm(
total=filesize,
desc="Uploading",
unit="B",
unit_scale=True,
unit_divisor=1024,
leave=True)
last_bytes_read = 0
def update_progress(monitor):
# Your callback function
nonlocal last_bytes_read
progress.update(monitor.bytes_read - last_bytes_read)
last_bytes_read = monitor.bytes_read
e = MultipartEncoder(fields={'uploadfile': ('uploaded', file, 'text/plain')})
m = MultipartEncoderMonitor(e, update_progress)
archive_response = requests.post(base_archive_url,
data=m,
headers={
"X-File-Size": str(filesize),
'Content-Type': m.content_type})
archive_id = archive_response.json()['id']
print(" done", flush=True)
archive_url = "%s?id=%s" % (base_archive_url, archive_id)
package = {
"metadata": {
"name": archive_sha256,
"namespace": namespace,
},
"spec": {
"environment": environment,
"deployment": {
"type": "url",
"url": archive_url,
"checksum": {
"type": "sha256",
"sum": archive_sha256,
},
},
},
"status": {
"buildstatus": "succeeded",
},
}
return archive_sha256, post("/v2/packages", package)[1]
def lazily_define_function(environment, f):
archive_sha256, package_ref = lazily_define_package(environment, f)
print("Registering ...", end='', flush=True)
function_name = archive_sha256[:8]
status_code, response = get("/v2/functions/%s" % function_name)
if status_code == 200:
return function_name
status_code, r = post("/v2/functions", {
"metadata": {
"name": function_name,
"namespace": namespace,
},
"spec": {
"environment": environment,
"package": {
"functionName": function_name,
"packageref": package_ref,
},
},
})
if status_code == 409 or status_code == 201:
print(" done", flush=True)
return function_name
print(" error", flush=True)
raise Exception(r.text)
def lazily_define_trigger2(function_name, http_method, host, relativeurl):
trigger_name = "%s-%s-%s" % (
host.replace('.', '-'),
relativeurl.replace(':.*', '').replace('{', '').replace('}', '').replace('/', '-'),
http_method.lower())
status_code, response = get("/v2/triggers/http/%s" % trigger_name)
if status_code == 200:
return
status_code, r = post("/v2/triggers/http", {
"metadata": {
"name": trigger_name,
"namespace": namespace,
},
"spec": {
"host": host,
"relativeurl": relativeurl,
"method": http_method,
"functionref": {
"Type": "name",
"Name": function_name,
},
},
})
if status_code == 409 or status_code == 201:
return
raise Exception(r.text)
def publish(environment_name, f):
environment = {
"namespace": namespace,
"name": environment_name,
}
function_name = lazily_define_function(environment, f)
host = "%s.tfi.gcp.tesserai.com" % function_name
lazily_define_trigger2(function_name, "POST", host, "/{path-info:.*}")
lazily_define_trigger2(function_name, "GET", host, "/{path-info:.*}")
lazily_define_trigger2(function_name, "GET", host, "/")
return "http://%s" % host
| 30.433862
| 96
| 0.577712
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 905
| 0.157337
|
185f488bf8a799cdaf7e16f96a249d8d9b0d63bc
| 1,824
|
py
|
Python
|
skeema/intermediate/compiler/class_builder.py
|
HeadHaus/Skeema
|
fc0faf13afad2c95b8943eaa3bfc2cc23b7de003
|
[
"MIT"
] | null | null | null |
skeema/intermediate/compiler/class_builder.py
|
HeadHaus/Skeema
|
fc0faf13afad2c95b8943eaa3bfc2cc23b7de003
|
[
"MIT"
] | null | null | null |
skeema/intermediate/compiler/class_builder.py
|
HeadHaus/Skeema
|
fc0faf13afad2c95b8943eaa3bfc2cc23b7de003
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import sys
from skeema.intermediate.compiler.parser import Parser
from skeema import ModelMeta
from skeema import util
def private(name):
return f'_{name}'
class ClassBuilder:
"""
ClassBuilder
"""
@staticmethod
def set_class_module(klass, module_name: str):
klass.__module__ = module_name
module = sys.modules[module_name]
module.__dict__[klass.__name__] = klass
@staticmethod
def create_class(class_name: str, base_classes: [str], parameters: [dict], data_members: [dict]):
module_name = 'skeema'
# Populate a dictionary of property accessors
cls_dict = dict()
# Parsing for json
def parse(cls, json_str: str):
return Parser.parse(cls, json_str)
cls_dict['parse'] = classmethod(parse)
def decorate(annotation: str, array: bool) -> str:
if array:
return f'[{annotation}]'
else:
return annotation
parameter_annotation_dict = {
name: decorate(annotation, array) for name, annotation, array in
((parameter['name'], parameter['class'], parameter['array']) for parameter in parameters)
}
data_member_dict = {
name: decorate(annotation, array) for name, annotation, array in
((data_member['name'], data_member['class'], data_member['array']) for data_member in data_members)
}
cls = ModelMeta(
class_name,
tuple(util.class_lookup(module_name, base_class) for base_class in base_classes),
cls_dict,
parameters=parameter_annotation_dict,
data_members=data_member_dict
)
ClassBuilder.set_class_module(cls, module_name)
return cls
| 28.5
| 111
| 0.627193
| 1,622
| 0.889254
| 0
| 0
| 1,558
| 0.854167
| 0
| 0
| 173
| 0.094846
|
185f6fd90f53269fc456d4b79fc344aa07fad28a
| 1,064
|
py
|
Python
|
problems/csp/single/LabeledDice.py
|
xcsp3team/pycsp3
|
a11bc370e34cd3fe37faeae9a5df935fcbd7770d
|
[
"MIT"
] | 28
|
2019-12-14T09:25:52.000Z
|
2022-03-24T08:15:13.000Z
|
problems/csp/single/LabeledDice.py
|
xcsp3team/pycsp3
|
a11bc370e34cd3fe37faeae9a5df935fcbd7770d
|
[
"MIT"
] | 7
|
2020-04-15T11:02:07.000Z
|
2022-01-20T12:48:54.000Z
|
problems/csp/single/LabeledDice.py
|
xcsp3team/pycsp3
|
a11bc370e34cd3fe37faeae9a5df935fcbd7770d
|
[
"MIT"
] | 3
|
2020-04-15T08:23:45.000Z
|
2021-12-07T14:02:28.000Z
|
"""
From http://jimorlin.wordpress.com/2009/02/17/colored-letters-labeled-dice-a-logic-puzzle/
There are 13 words as follows: buoy, cave, celt, flub, fork, hemp, judy, junk, limn, quip, swag, visa.
There are 24 different letters that appear in the 13 words.
The question is: can one assign the 24 letters to 4 different cubes so that the four letters of each word appears on different cubes.
There is one letter from each word on each cube.
The puzzle was created by Humphrey Dudley.
Execution:
python3 LabeledDice.py
"""
from pycsp3 import *
words = ["buoy", "cave", "celt", "flub", "fork", "hemp", "judy", "junk", "limn", "quip", "swag", "visa"]
# x[i] is the cube where the ith letter of the alphabet is put
x = VarArray(size=26, dom=lambda i: range(1, 5) if i in alphabet_positions("".join(words)) else None)
satisfy(
# the four letters of each word appear on different cubes
[AllDifferent(x[i] for i in alphabet_positions(w)) for w in words],
# each cube is assigned 6 letters
Cardinality(x, occurrences={i: 6 for i in range(1, 5)})
)
| 39.407407
| 133
| 0.710526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 750
| 0.704887
|
1860d4a4ba12e96e49b6739a4f21bf910d68cc1a
| 4,220
|
py
|
Python
|
lib/JumpScale/tools/cuisine/solutions/CuisineCockpit.py
|
Jumpscale/jumpscale_core8
|
f80ac9b1ab99b833ee7adb17700dcf4ef35f3734
|
[
"Apache-2.0"
] | 8
|
2016-04-14T14:04:57.000Z
|
2020-06-09T00:24:34.000Z
|
lib/JumpScale/tools/cuisine/solutions/CuisineCockpit.py
|
Jumpscale/jumpscale_core8
|
f80ac9b1ab99b833ee7adb17700dcf4ef35f3734
|
[
"Apache-2.0"
] | 418
|
2016-01-25T10:30:00.000Z
|
2021-09-08T12:29:13.000Z
|
lib/JumpScale/tools/cuisine/solutions/CuisineCockpit.py
|
Jumpscale/jumpscale_core8
|
f80ac9b1ab99b833ee7adb17700dcf4ef35f3734
|
[
"Apache-2.0"
] | 9
|
2016-04-21T07:21:17.000Z
|
2022-01-24T10:35:54.000Z
|
from JumpScale import j
base = j.tools.cuisine._getBaseClass()
class CuisineCockpit(base):
def __init__(self, executor, cuisine):
self._executor = executor
self._cuisine = cuisine
def configure(self):
C = """
[mode]
prod = false
[oauth]
client_secret = "IqFcD2TYC7WrPpoH2Oi6YTpN102Hj-sdfsdfsdfsdf-sMBcydIacGI"
redirect_uri = "https://f0919f6e.ngrok.io/oauth/callback"
client_id = "OrgTest"
organization = "OrgTest"
jwt_key = ""
[api]
[api.ays]
port = 5000
host = "0.0.0.0"
debug = true
"""
self._cuisine.core.dir_ensure("$cfgDir/cockpit_api")
j.tools.cuisine.local.core.file_write("$cfgDir/cockpit_api/config.toml", C)
def install(self, start=True, branch="master"):
self.install_deps()
self._cuisine.development.git.pullRepo('https://github.com/Jumpscale/jscockpit', branch=branch)
dir_paths = self._cuisine.core.dir_paths
self._cuisine.core.dir_ensure('%s/ays_api/' % dir_paths['appDir'])
self._cuisine.core.file_link('%s/github/jumpscale/jscockpit/api_server' %
dir_paths['codeDir'], '%s/ays_api/api_server' % dir_paths['appDir'])
self._cuisine.core.file_link('%s/github/jumpscale/jscockpit/ays_api' %
dir_paths['codeDir'], '%s/ays_api/ays_api' % dir_paths['appDir'])
self.configure()
if start:
self.start()
def install_all_in_one(self, start=True, branch="master", reset=True, ip="localhost"):
"""
This will install the all the component of the cockpit in one command.
(mongodb, portal, ays_api, ays_daemon)
Make sure that you don't have uncommitted code in any code repository cause this method will discard them !!!
"""
# install mongodb, required for portal
self._cuisine.apps.mongodb.build(install=False, start=start, reset=reset)
self._cuisine.apps.mongodb.install(start=start)
# install portal
self._cuisine.apps.portal.install(start=False, installdeps=True, branch=branch)
# add link from portal to API
# 1- copy the nav to the portalbase and then edit it
content = self._cuisine.core.file_read('$codeDir/github/jumpscale/jumpscale_portal8/apps/portalbase/AYS81/.space/nav.wiki')
# 2- fix the ays api endpoint.
if 'REST API:/api' not in content:
content += 'AYS API:http://{ip}:5000/apidocs/index.html?raml=api.raml'.format(ip=ip)
self._cuisine.core.file_write('$appDir/portals/main/base/AYS81/.space/nav.wiki', content=content)
self._cuisine.apps.portal.configure(production=False)
self._cuisine.apps.portal.start()
# install REST API AND ays daemon
self.install(start=start, branch=branch)
# configure base URI for api-console
raml = self._cuisine.core.file_read('$appDir/ays_api/ays_api/apidocs/api.raml')
raml = raml.replace('baseUri: https://localhost:5000', "baseUri: http://{ip}:5000".format(ip=ip))
self._cuisine.core.file_write('$appDir/ays_api/ays_api/apidocs/api.raml', raml)
if start:
# start API and daemon
self.start()
def start(self, name='main'):
# start AYS REST API
cmd = 'jspython api_server'
dir_paths = self._cuisine.core.dir_paths
self._cuisine.processmanager.ensure(cmd=cmd, name='cockpit_%s' % name, path='%s/ays_api' % dir_paths['appDir'])
# start daemon
cmd = 'ays start'
self._cuisine.processmanager.ensure(cmd=cmd, name='cockpit_daemon_%s' % name)
def stop(self, name='main'):
self._cuisine.processmanager.stop('cockpit_%s' % name,)
self._cuisine.processmanager.stop('cockpit_daemon_%s' % name,)
def install_deps(self):
self._cuisine.package.mdupdate()
self._cuisine.package.install('libssl-dev')
deps = """
cryptography
python-jose
wtforms_json
flask_wtf
python-telegram-bot
"""
self._cuisine.development.pip.multiInstall(deps, upgrade=True)
| 39.074074
| 131
| 0.632701
| 4,153
| 0.984123
| 0
| 0
| 0
| 0
| 0
| 0
| 1,859
| 0.440521
|
186207c724d6262ec17c4da5e5a9cf096b45d2c3
| 7,103
|
py
|
Python
|
examples/finetune-bert/02-BERT-sst2-DeepSpeed.py
|
ceshine/pytorch-helper-bot
|
32c88d41fffa41fe35ba21c278eae83d914f3847
|
[
"MIT"
] | 10
|
2019-12-13T23:30:31.000Z
|
2021-12-08T14:21:47.000Z
|
examples/finetune-bert/02-BERT-sst2-DeepSpeed.py
|
ceshine/pytorch-helper-bot
|
32c88d41fffa41fe35ba21c278eae83d914f3847
|
[
"MIT"
] | null | null | null |
examples/finetune-bert/02-BERT-sst2-DeepSpeed.py
|
ceshine/pytorch-helper-bot
|
32c88d41fffa41fe35ba21c278eae83d914f3847
|
[
"MIT"
] | 1
|
2021-11-07T19:00:03.000Z
|
2021-11-07T19:00:03.000Z
|
""" Finetuning BERT using DeepSpeed's ZeRO-Offload
"""
import json
import dataclasses
from pathlib import Path
from functools import partial
import nlp
import torch
import typer
import deepspeed
import numpy as np
from transformers import BertTokenizerFast
from transformers import BertForSequenceClassification
from sklearn.model_selection import train_test_split
from pytorch_helper_bot import (
DeepSpeedBot, MovingAverageStatsTrackerCallback, CheckpointCallback,
LearningRateSchedulerCallback, MultiStageScheduler, Top1Accuracy,
LinearLR, CosineAnnealingScheduler
)
CACHE_DIR = Path("cache/")
CACHE_DIR.mkdir(exist_ok=True)
APP = typer.Typer()
class SST2Dataset(torch.utils.data.Dataset):
def __init__(self, entries_dict):
super().__init__()
self.entries_dict = entries_dict
def __len__(self):
return len(self.entries_dict["label"])
def __getitem__(self, idx):
return (
self.entries_dict["input_ids"][idx],
self.entries_dict["attention_mask"][idx],
self.entries_dict["token_type_ids"][idx],
self.entries_dict["label"][idx]
)
@dataclasses.dataclass
class SST2Bot(DeepSpeedBot):
log_dir = CACHE_DIR / "logs"
def __post_init__(self):
super().__post_init__()
self.loss_format = "%.6f"
@staticmethod
def extract_prediction(output):
return output[0]
class Object(object):
pass
def convert_to_features(tokenizer, example_batch):
# Tokenize contexts and questions (as pairs of inputs)
encodings = tokenizer.batch_encode_plus(
example_batch['sentence'], padding='max_length', max_length=64, truncation=True)
return encodings
@APP.command(
context_settings={"allow_extra_args": True, "ignore_unknown_options": True}
)
def main(arch="bert-base-uncased", config="gpu.json"):
# Reference:
#
# * https://github.com/huggingface/nlp/blob/master/notebooks/Overview.ipynb
with open(config) as fin:
config_params = json.load(fin)
dataset = nlp.load_dataset('glue', "sst2")
print(set([x['label'] for x in dataset["train"]]))
tokenizer = BertTokenizerFast.from_pretrained(arch)
# Format our dataset to outputs torch.Tensor to train a pytorch model
columns = ['input_ids', 'token_type_ids', 'attention_mask', "label"]
for subset in ("train", "validation"):
dataset[subset] = dataset[subset].map(
partial(convert_to_features, tokenizer), batched=True)
dataset[subset].set_format(type='torch', columns=columns)
print(tokenizer.decode(dataset['train'][6]["input_ids"].numpy()))
print(dataset['train'][0]["attention_mask"])
valid_idx, test_idx = train_test_split(
list(range(len(dataset["validation"]))), test_size=0.5, random_state=42)
train_dict = {
"input_ids": dataset['train']["input_ids"],
"attention_mask": dataset['train']["attention_mask"],
"token_type_ids": dataset['train']["token_type_ids"],
"label": dataset['train']["label"]
}
valid_dict = {
"input_ids": dataset['validation']["input_ids"][valid_idx],
"attention_mask": dataset['validation']["attention_mask"][valid_idx],
"token_type_ids": dataset['validation']["token_type_ids"][valid_idx],
"label": dataset['validation']["label"][valid_idx]
}
test_dict = {
"input_ids": dataset['validation']["input_ids"][test_idx],
"attention_mask": dataset['validation']["attention_mask"][test_idx],
"token_type_ids": dataset['validation']["token_type_ids"][test_idx],
"label": dataset['validation']["label"][test_idx]
}
# Instantiate a PyTorch Dataloader around our dataset
train_loader = torch.utils.data.DataLoader(
SST2Dataset(train_dict), batch_size=config_params["train_batch_size"], shuffle=True)
valid_loader = torch.utils.data.DataLoader(
SST2Dataset(valid_dict), batch_size=config_params["train_batch_size"], drop_last=False)
test_loader = torch.utils.data.DataLoader(
SST2Dataset(test_dict), batch_size=config_params["train_batch_size"], drop_last=False)
model = BertForSequenceClassification.from_pretrained(arch)
# torch.nn.init.kaiming_normal_(model.classifier.weight)
# torch.nn.init.constant_(model.classifier.bias, 0)
# torch.nn.init.kaiming_normal_(model.bert.pooler.dense.weight)
# torch.nn.init.constant_(model.bert.pooler.dense.bias, 0);
args = Object()
setattr(args, "local_rank", 0)
setattr(args, "deepspeed_config", config)
if config[:3] == "cpu":
if "optimizer" in config_params:
model, optimizer, _, _ = deepspeed.initialize(
args=args,
model=model,
model_parameters=model.parameters()
)
else:
from deepspeed.ops.adam import DeepSpeedCPUAdam
optimizer = DeepSpeedCPUAdam(model.parameters(), lr=2e-5)
model, optimizer, _, _ = deepspeed.initialize(
args=args,
model=model,
model_parameters=model.parameters(),
optimizer=optimizer
)
else:
model, optimizer, _, _ = deepspeed.initialize(
args=args,
model=model,
model_parameters=model.parameters()
# optimizer=optimizer
)
total_steps = len(train_loader) * 3
# checkpoints = CheckpointCallback(
# keep_n_checkpoints=1,
# checkpoint_dir=CACHE_DIR / "model_cache/",
# monitor_metric="accuracy"
# )
lr_durations = [
int(total_steps*0.2),
int(np.ceil(total_steps*0.8))
]
break_points = [0] + list(np.cumsum(lr_durations))[:-1]
callbacks = [
MovingAverageStatsTrackerCallback(
avg_window=len(train_loader) // 8,
log_interval=len(train_loader) // 10
),
LearningRateSchedulerCallback(
MultiStageScheduler(
[
LinearLR(optimizer, 0.01, lr_durations[0]),
CosineAnnealingScheduler(optimizer, lr_durations[1])
],
start_at_epochs=break_points
)
),
# checkpoints
]
bot = SST2Bot(
model=model,
train_loader=train_loader,
valid_loader=valid_loader,
clip_grad=10.,
optimizer=optimizer, echo=True,
criterion=torch.nn.CrossEntropyLoss(),
callbacks=callbacks,
pbar=False,
use_tensorboard=False,
# use_amp=APEX_AVAILABLE,
metrics=(Top1Accuracy(),)
)
print(total_steps)
bot.train(
total_steps=total_steps,
checkpoint_interval=len(train_loader) // 2
)
# bot.load_model(checkpoints.best_performers[0][1])
# checkpoints.remove_checkpoints(keep=0)
# TARGET_DIR = CACHE_DIR / "sst2_bert_uncased"
# TARGET_DIR.mkdir(exist_ok=True)
# bot.model.save_pretrained(TARGET_DIR)
bot.eval(valid_loader)
bot.eval(test_loader)
if __name__ == "__main__":
APP()
| 32.582569
| 95
| 0.651696
| 749
| 0.105448
| 0
| 0
| 5,609
| 0.789666
| 0
| 0
| 1,822
| 0.256511
|
18620b84b0e67aed4d98fbdd7983e2e41f67ec2d
| 2,118
|
py
|
Python
|
examples/images/autoencoder.py
|
jjpalacio/tflearn
|
e69bc9f341a1d2a90080bb24a686e0e2cf724d63
|
[
"MIT"
] | 10,882
|
2016-03-31T16:03:11.000Z
|
2022-03-26T03:00:27.000Z
|
examples/images/autoencoder.py
|
ciderpark/tflearn
|
5c23566de6e614a36252a5828d107d001a0d0482
|
[
"MIT"
] | 1,079
|
2016-04-02T06:14:16.000Z
|
2022-02-27T10:04:47.000Z
|
examples/images/autoencoder.py
|
ciderpark/tflearn
|
5c23566de6e614a36252a5828d107d001a0d0482
|
[
"MIT"
] | 3,014
|
2016-03-31T16:03:26.000Z
|
2022-03-30T20:36:53.000Z
|
# -*- coding: utf-8 -*-
""" Auto Encoder Example.
Using an auto encoder on MNIST handwritten digits.
References:
Y. LeCun, L. Bottou, Y. Bengio, and P. Haffner. "Gradient-based
learning applied to document recognition." Proceedings of the IEEE,
86(11):2278-2324, November 1998.
Links:
[MNIST Dataset] http://yann.lecun.com/exdb/mnist/
"""
from __future__ import division, print_function, absolute_import
import numpy as np
import matplotlib.pyplot as plt
import tflearn
# Data loading and preprocessing
import tflearn.datasets.mnist as mnist
X, Y, testX, testY = mnist.load_data(one_hot=True)
# Building the encoder
encoder = tflearn.input_data(shape=[None, 784])
encoder = tflearn.fully_connected(encoder, 256)
encoder = tflearn.fully_connected(encoder, 64)
# Building the decoder
decoder = tflearn.fully_connected(encoder, 256)
decoder = tflearn.fully_connected(decoder, 784, activation='sigmoid')
# Regression, with mean square error
net = tflearn.regression(decoder, optimizer='adam', learning_rate=0.001,
loss='mean_square', metric=None)
# Training the auto encoder
model = tflearn.DNN(net, tensorboard_verbose=0)
model.fit(X, X, n_epoch=20, validation_set=(testX, testX),
run_id="auto_encoder", batch_size=256)
# Encoding X[0] for test
print("\nTest encoding of X[0]:")
# New model, re-using the same session, for weights sharing
encoding_model = tflearn.DNN(encoder, session=model.session)
print(encoding_model.predict([X[0]]))
# Testing the image reconstruction on new data (test set)
print("\nVisualizing results after being encoded and decoded:")
testX = tflearn.data_utils.shuffle(testX)[0]
# Applying encode and decode over test set
encode_decode = model.predict(testX)
# Compare original images with their reconstructions
f, a = plt.subplots(2, 10, figsize=(10, 2))
for i in range(10):
temp = [[ii, ii, ii] for ii in list(testX[i])]
a[0][i].imshow(np.reshape(temp, (28, 28, 3)))
temp = [[ii, ii, ii] for ii in list(encode_decode[i])]
a[1][i].imshow(np.reshape(temp, (28, 28, 3)))
f.show()
plt.draw()
plt.waitforbuttonpress()
| 32.584615
| 72
| 0.72474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 854
| 0.403211
|