hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e9f2022d8957402e8b079abe1da08f467caf510b | 2,431 | py | Python | lmnet/lmnet/datasets/cifar100_distribute.py | toohsk/blueoil | 596922caa939db9c5ecbac3286fbf6f703865ee6 | [
"Apache-2.0"
] | null | null | null | lmnet/lmnet/datasets/cifar100_distribute.py | toohsk/blueoil | 596922caa939db9c5ecbac3286fbf6f703865ee6 | [
"Apache-2.0"
] | 1 | 2018-11-21T07:06:17.000Z | 2018-11-21T07:06:17.000Z | lmnet/lmnet/datasets/cifar100_distribute.py | toohsk/blueoil | 596922caa939db9c5ecbac3286fbf6f703865ee6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2018 The Blueoil Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import functools
import numpy as np
from lmnet.datasets.cifar100 import Cifar100
from lmnet.datasets.base import DistributionInterface
from lmnet.utils.random import shuffle
| 31.571429 | 106 | 0.626903 |
e9f22bb1ea67ab94d6fe17f1e1dc1a68f58ceef8 | 3,149 | py | Python | gridengine/functional.py | MiqG/gridengine | 457c34b16f2c43b9be985cd822f30305d68afd91 | [
"BSD-3-Clause"
] | 20 | 2015-01-31T16:52:15.000Z | 2019-03-22T20:09:50.000Z | gridengine/functional.py | MiqG/gridengine | 457c34b16f2c43b9be985cd822f30305d68afd91 | [
"BSD-3-Clause"
] | 1 | 2021-11-27T16:33:59.000Z | 2021-11-27T16:33:59.000Z | gridengine/functional.py | MiqG/gridengine | 457c34b16f2c43b9be985cd822f30305d68afd91 | [
"BSD-3-Clause"
] | 7 | 2015-10-27T16:49:52.000Z | 2021-09-22T10:16:25.000Z | import inspect
import functools
from gridengine import job, dispatch, schedulers
# ----------------------------------------------------------------------------
# Partial
# ----------------------------------------------------------------------------
def isexception(x):
"""Test whether the value is an Exception instance"""
return isinstance(x, Exception)
def isnumeric(x):
"""Test whether the value can be represented as a number"""
try:
float(x)
return True
except:
return False
def partial(f, *args, **kwargs):
"""Return a callable partially closed over the input function and arguments
partial is functionally equivalent to functools.partial, however it also
applies a variant of functools.update_wrapper, with:
__doc__ = f.__doc__
__module__ = f.__module__
__name__ = f.__name__ + string_representation_of_closed_arguments
This is useful for running functions with different parameter sets, whilst
being able to identify the variants by name
"""
g = functools.partial(f, *args, **kwargs)
g.__doc__ = f.__doc__
g.__module__ = f.__module__
g.__name__ = '_'.join([f.__name__] + [name(arg) for arg in list(args)+list(kwargs.values())])
return g
# ----------------------------------------------------------------------------
# Map
# ----------------------------------------------------------------------------
def map(f, args, scheduler=schedulers.best_available, reraise=True):
"""Perform a functional-style map operation
Apply a function f to each argument in the iterable args. This is equivalent to
y = [f(x) for x in args]
or
y = map(f, args)
except that each argument in the iterable is assigned to a separate Job
and scheduled to run via the scheduler.
The default scheduler is a schedulers.ProcessScheduler instance. To run map
on a grid engine, simply pass a schedulers.GridEngineScheduler instance.
Args:
f (func): A picklable function
args (iterable): An iterable (list) of arguments to f
Keyword Args:
scheduler: A schedulers.Scheduler instance or class. By default, the
system tries to return the best_available() scheduler. Use this if you
want to set a scheduler specifically.
reraise (bool): Reraise exceptions that occur in any of the jobs. Set this
to False if you want to salvage any good results.
Returns:
List of return values equivalent to the builtin map function
Raises:
Any exception that would occur when applying [f(x) for x in args]
"""
# setup the dispatcher
dispatcher = dispatch.JobDispatcher(scheduler)
# allocate the jobs
jobs = [job.Job(target=f, args=(arg,)) for arg in args]
# run the jobs (guaranteed to return in the same order)
dispatcher.dispatch(jobs)
results = dispatcher.join()
# check for exceptions
if reraise:
for exception in filter(isexception, results):
# an error occurred during execution of one of the jobs, reraise it
raise exception
return results
| 32.463918 | 97 | 0.64719 |
e9f24ec99f076ba98908603ffa1d50f5644d6aa7 | 31,441 | py | Python | Bio/Prosite/__init__.py | nuin/biopython | 045d57b08799ef52c64bd4fa807629b8a7e9715a | [
"PostgreSQL"
] | 2 | 2016-05-09T04:20:06.000Z | 2017-03-07T10:25:53.000Z | Bio/Prosite/__init__.py | nuin/biopython | 045d57b08799ef52c64bd4fa807629b8a7e9715a | [
"PostgreSQL"
] | null | null | null | Bio/Prosite/__init__.py | nuin/biopython | 045d57b08799ef52c64bd4fa807629b8a7e9715a | [
"PostgreSQL"
] | 1 | 2019-08-19T22:05:14.000Z | 2019-08-19T22:05:14.000Z | # Copyright 1999 by Jeffrey Chang. All rights reserved.
# Copyright 2000 by Jeffrey Chang. All rights reserved.
# Revisions Copyright 2007 by Peter Cock. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module provides code to work with the prosite dat file from
Prosite.
http://www.expasy.ch/prosite/
Tested with:
Release 15.0, July 1998
Release 16.0, July 1999
Release 17.0, Dec 2001
Release 19.0, Mar 2006
Functions:
parse Iterates over entries in a Prosite file.
scan_sequence_expasy Scan a sequence for occurrences of Prosite patterns.
index_file Index a Prosite file for a Dictionary.
_extract_record Extract Prosite data from a web page.
_extract_pattern_hits Extract Prosite patterns from a web page.
Classes:
Record Holds Prosite data.
PatternHit Holds data from a hit against a Prosite pattern.
Dictionary Accesses a Prosite file using a dictionary interface.
RecordParser Parses a Prosite record into a Record object.
Iterator Iterates over entries in a Prosite file; DEPRECATED.
_Scanner Scans Prosite-formatted data.
_RecordConsumer Consumes Prosite data to a Record object.
"""
from types import *
import re
import sgmllib
from Bio import File
from Bio import Index
from Bio.ParserSupport import *
# There is probably a cleaner way to write the read/parse functions
# if we don't use the "parser = RecordParser(); parser.parse(handle)"
# approach. Leaving that for the next revision of Bio.Prosite.
def scan_sequence_expasy(seq=None, id=None, exclude_frequent=None):
"""scan_sequence_expasy(seq=None, id=None, exclude_frequent=None) ->
list of PatternHit's
Search a sequence for occurrences of Prosite patterns. You can
specify either a sequence in seq or a SwissProt/trEMBL ID or accession
in id. Only one of those should be given. If exclude_frequent
is true, then the patterns with the high probability of occurring
will be excluded.
"""
from Bio import ExPASy
if (seq and id) or not (seq or id):
raise ValueError("Please specify either a sequence or an id")
handle = ExPASy.scanprosite1(seq, id, exclude_frequent)
return _extract_pattern_hits(handle)
def _extract_pattern_hits(handle):
"""_extract_pattern_hits(handle) -> list of PatternHit's
Extract hits from a web page. Raises a ValueError if there
was an error in the query.
"""
p = parser()
p.feed(handle.read())
if p.broken_message:
raise ValueError(p.broken_message)
return p.hits
def index_file(filename, indexname, rec2key=None):
"""index_file(filename, indexname, rec2key=None)
Index a Prosite file. filename is the name of the file.
indexname is the name of the dictionary. rec2key is an
optional callback that takes a Record and generates a unique key
(e.g. the accession number) for the record. If not specified,
the id name will be used.
"""
import os
if not os.path.exists(filename):
raise ValueError("%s does not exist" % filename)
index = Index.Index(indexname, truncate=1)
index[Dictionary._Dictionary__filename_key] = filename
handle = open(filename)
records = parse(handle)
end = 0L
for record in records:
start = end
end = long(handle.tell())
length = end - start
if rec2key is not None:
key = rec2key(record)
else:
key = record.name
if not key:
raise KeyError("empty key was produced")
elif key in index:
raise KeyError("duplicate key %s found" % key)
index[key] = start, length
# This function can be deprecated once Bio.Prosite.ExPASyDictionary
# is removed.
def _extract_record(handle):
"""_extract_record(handle) -> str
Extract PROSITE data from a web page. Raises a ValueError if no
data was found in the web page.
"""
# All the data appears between tags:
# <pre width = 80>ID NIR_SIR; PATTERN.
# </PRE>
p = parser()
p.feed(handle.read())
if not p.data:
raise ValueError("No data found in web page.")
return "".join(p.data)
| 35.326966 | 256 | 0.580675 |
e9f29e0f95ccd2b1945aff6967594472289887d8 | 21,120 | py | Python | build/lib/mrgaze/pupilometry.py | jmtyszka/mrgaze | 29217eab9ea431686fd200f08bddd6615c45d0d3 | [
"MIT"
] | 18 | 2016-01-22T02:47:45.000Z | 2021-09-23T18:37:51.000Z | build/lib/mrgaze/pupilometry.py | jmtyszka/mrgaze | 29217eab9ea431686fd200f08bddd6615c45d0d3 | [
"MIT"
] | 7 | 2015-05-26T21:33:16.000Z | 2020-05-26T11:47:54.000Z | build/lib/mrgaze/pupilometry.py | jmtyszka/mrgaze | 29217eab9ea431686fd200f08bddd6615c45d0d3 | [
"MIT"
] | 7 | 2016-02-06T00:17:52.000Z | 2021-02-22T03:51:55.000Z | #!/usr/bin/env python
#
# Video pupilometry functions
# - takes calibration and gaze video filenames as input
# - controls calibration and gaze estimation workflow
#
# USAGE : mrgaze.py <Calibration Video> <Gaze Video>
#
# AUTHOR : Mike Tyszka
# PLACE : Caltech
# DATES : 2014-05-07 JMT From scratch
# 2016-02-22 JMT Update print for python3. Remove unused vars, imports
#
# This file is part of mrgaze.
#
# mrgaze is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# mrgaze is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with mrgaze. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2014 California Institute of Technology.
import os
import time
import getpass
import cv2
from mrgaze import media, utils, config, calibrate, report, engine
def LivePupilometry(data_dir, live_eyetracking=False):
"""
Perform pupil boundary ellipse fitting on camera feed
Arguments
----
data_dir : string
Root data directory path.
cfg :
Analysis configuration parameters
Returns
----
pupils : boolean
Completion status (True = successful)
"""
# If user did not provide a root data directory, we use HOME/mrgaze
if data_dir == '':
data_dir = os.path.join(os.getenv("HOME"), 'mrgaze')
# Full video file paths
hostname = os.uname()[1]
username = getpass.getuser()
ss_dir = os.path.join(data_dir, "%s_%s_%s" % (hostname, username, int(time.time())))
else:
ss_dir = data_dir
# Load Configuration
cfg = config.LoadConfig(data_dir)
cfg_ts = time.time()
# Output flags
verbose = cfg.getboolean('OUTPUT', 'verbose')
overwrite = cfg.getboolean('OUTPUT', 'overwrite')
# Video information
# vin_ext = cfg.get('VIDEO', 'inputextension')
vout_ext = cfg.get('VIDEO' ,'outputextension')
# vin_fps = cfg.getfloat('VIDEO', 'inputfps')
# Flag for freeze frame
freeze_frame = False
vid_dir = os.path.join(ss_dir, 'videos')
res_dir = os.path.join(ss_dir, 'results')
vout_path = os.path.join(vid_dir, 'gaze' + vout_ext)
cal_vout_path = os.path.join(vid_dir, 'cal' + vout_ext)
# if we do live eye-tracking, we read in what would be the output of the live eye-tracking
if not live_eyetracking:
vin_path = vout_path
cal_vin_path = cal_vout_path
else:
vin_path = 0
# Raw and filtered pupilometry CSV file paths
cal_pupils_csv = os.path.join(res_dir, 'cal_pupils.csv')
pupils_csv = os.path.join(res_dir, 'gaze_pupils.csv')
# Check that output directory exists
if not os.path.isdir(res_dir):
os.makedirs(res_dir)
print('* %s does not exist - creating' % res_dir)
if not os.path.isdir(vid_dir):
os.makedirs(vid_dir)
print('* %s does not exist - creating' % vid_dir)
# Set up the LBP cascade classifier
LBP_path = os.path.join(utils._package_root(), 'Cascade/cascade.xml')
print(' Loading LBP cascade')
cascade = cv2.CascadeClassifier(LBP_path)
if cascade.empty():
print('* LBP cascade is empty - mrgaze installation problem')
return False
# Check for output CSV existance and overwrite flag
if os.path.isfile(pupils_csv):
print('+ Pupilometry output already exists - checking overwrite flag')
if overwrite:
print('+ Overwrite allowed - continuing')
else:
print('+ Overwrite forbidden - skipping pupilometry')
return True
#
# Camera Input
#
print(' Opening camera stream')
try:
if not live_eyetracking:
vin_stream = cv2.VideoCapture(vin_path)
cal_vin_stream = cv2.VideoCapture(cal_vin_path)
else:
vin_stream = cv2.VideoCapture(vin_path)
cal_vin_stream = vin_stream
except:
print('* Problem opening input video stream - skipping pupilometry')
return False
while not vin_stream.isOpened():
print("Waiting for Camera.")
key = utils._waitKey(500)
if key == 'ESC':
print("User Abort.")
break
if not vin_stream.isOpened():
print('* Video input stream not opened - skipping pupilometry')
return False
if not cal_vin_stream.isOpened():
print('* Calibration video input stream not opened - skipping pupilometry')
return False
# Video FPS from metadata
# TODO: may not work with Quicktime videos
# fps = vin_stream.get(cv2.cv.CV_CAP_PROP_FPS)
# fps = cfg.getfloat('CAMERA', 'fps')
# Desired time between frames in milliseconds
# time_bw_frames = 1000.0 / fps
vin_stream.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 320)
vin_stream.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 240)
vin_stream.set(cv2.cv.CV_CAP_PROP_FPS, 30)
# Total number of frames in video file
# nf = vin_stream.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)
# print(' Video has %d frames at %0.3f fps' % (nf, vin_fps))
# Read first preprocessed video frame from stream
keep_going, frame_orig = media.LoadVideoFrame(vin_stream, cfg)
if keep_going:
frame, art_power = media.Preproc(frame_orig, cfg)
else:
art_power = 0.0
# Get size of preprocessed frame for output video setup
nx, ny = frame.shape[1], frame.shape[0]
# By default we start in non-calibration mode
# switch between gaze/cal modes by pressing key "c"
do_cal = False
while keep_going:
if do_cal == False:
#
# Output video
#
if live_eyetracking:
print(' Opening output video stream')
# Output video codec (MP4V - poor quality compression)
# TODO : Find a better multiplatform codec
fourcc = cv2.cv.CV_FOURCC('m','p','4','v')
try:
vout_stream = cv2.VideoWriter(vout_path, fourcc, 30, (nx, ny), True)
except:
print('* Problem creating output video stream - skipping pupilometry')
return False
if not vout_stream.isOpened():
print('* Output video not opened - skipping pupilometry')
return False
# Open pupilometry CSV file to write
try:
pupils_stream = open(pupils_csv, 'w')
except:
print('* Problem opening pupilometry CSV file - skipping pupilometry')
return False
#
# Main Video Frame Loop
#
# Print verbose column headers
if verbose:
print('')
print(' %10s %10s %10s %10s %10s' % (
'Time (s)', 'Area', 'Blink', 'Artifact', 'FPS'))
# Init frame counter
fc = 0
# Init processing timer
t0 = time.time()
t = t0
while keep_going:
# check whether config file has been updated, reload of that is the case
if fc % 30 == 0:
cfg_mtime = os.path.getmtime(os.path.join(data_dir, 'mrgaze.cfg'))
if cfg_mtime > cfg_ts:
print("Updating Configuration")
cfg = config.LoadConfig(data_dir)
cfg_ts = time.time()
# Current video time in seconds
t = time.time()
# -------------------------------------
# Pass this frame to pupilometry engine
# -------------------------------------
# b4_engine = time.time()
pupil_ellipse, roi_rect, blink, glint, frame_rgb = engine.PupilometryEngine(frame, cascade, cfg)
# print "Enging took %s ms" % (time.time() - b4_engine)
# Derive pupilometry parameters
px, py, area = engine.PupilometryPars(pupil_ellipse, glint, cfg)
# Write data line to pupilometry CSV file
pupils_stream.write(
'%0.4f,%0.3f,%0.3f,%0.3f,%d,%0.3f,\n' %
(t, area, px, py, blink, art_power)
)
if live_eyetracking:
# Write output video frame
vout_stream.write(frame_orig)
# Read next frame, unless we want to figure out the correct settings for this frame
if not freeze_frame:
keep_going, frame_orig = media.LoadVideoFrame(vin_stream, cfg)
if keep_going:
frame, art_power = media.Preproc(frame_orig, cfg)
else:
art_power = 0.0
# Increment frame counter
fc = fc + 1
# Report processing FPS
if verbose:
if fc % 100 == 0:
pfps = fc / (time.time() - t0)
print(' %10.1f %10.1f %10d %10.3f %10.1f' % (
t, area, blink, art_power, pfps))
t0 = time.time()
fc = 0
# wait whether user pressed esc to exit the experiment
key = utils._waitKey(1)
if key == 'ESC':
# Clean up
if live_eyetracking:
vout_stream.release()
pupils_stream.close()
keep_going = False
elif key == 'c':
# Clean up
if live_eyetracking:
vout_stream.release()
pupils_stream.close()
do_cal = True
print("Starting calibration.")
break
elif key == 'f':
freeze_frame = not freeze_frame
else: # do calibration
#
# Output video
#
if live_eyetracking:
print(' Opening output video stream')
# Output video codec (MP4V - poor quality compression)
# TODO : Find a better multiplatform codec
fourcc = cv2.cv.CV_FOURCC('m','p','4','v')
try:
cal_vout_stream = cv2.VideoWriter(cal_vout_path, fourcc, 30, (nx, ny), True)
except:
print('* Problem creating output video stream - skipping pupilometry')
return False
if not cal_vout_stream.isOpened():
print('* Output video not opened - skipping pupilometry')
return False
# Open pupilometry CSV file to write
try:
cal_pupils_stream = open(cal_pupils_csv, 'w')
except:
print('* Problem opening pupilometry CSV file - skipping pupilometry')
return False
#
# Main Video Frame Loop
#
# Print verbose column headers
if verbose:
print('')
print(' %10s %10s %10s %10s %10s' % (
'Time (s)', 'Area', 'Blink', 'Artifact', 'FPS'))
# Init frame counter
fc = 0
# Init processing timer
t0 = time.time()
t = t0
while keep_going:
# check whether config file has been updated, reload of that is the case
if fc % 30 == 0:
cfg_mtime = os.path.getmtime(os.path.join(data_dir, 'mrgaze.cfg'))
if cfg_mtime > cfg_ts:
print("Updating Configuration")
cfg = config.LoadConfig(data_dir)
cfg_ts = time.time()
# Current video time in seconds
t = time.time()
# -------------------------------------
# Pass this frame to pupilometry engine
# -------------------------------------
# b4_engine = time.time()
pupil_ellipse, roi_rect, blink, glint, frame_rgb = engine.PupilometryEngine(frame, cascade, cfg)
# print "Engine took %s ms" % (time.time() - b4_engine)
# Derive pupilometry parameters
px, py, area = engine.PupilometryPars(pupil_ellipse, glint, cfg)
# Write data line to pupilometry CSV file
cal_pupils_stream.write(
'%0.4f,%0.3f,%0.3f,%0.3f,%d,%0.3f,\n' %
(t, area, px, py, blink, art_power)
)
# Write output video frame
if live_eyetracking:
cal_vout_stream.write(frame_orig)
# Read next frame (if available)
# if verbose:
# b4_frame = time.time()
keep_going, frame_orig = media.LoadVideoFrame(vin_stream, cfg)
if keep_going:
frame, art_power = media.Preproc(frame_orig, cfg)
else:
art_power = 0.0
#if verbose:
# print "Time to load frame: %s" % (time.time() - b4_frame)
# Increment frame counter
fc = fc + 1
# Report processing FPS
if verbose:
if fc % 100 == 0:
pfps = fc / (time.time() - t0)
print(' %10.1f %10.1f %10d %10.3f %10.1f' % (
t, area, blink, art_power, pfps))
t0 = time.time()
fc = 0
# wait whether user pressed esc to exit the experiment
key = utils._waitKey(1)
if key == 'ESC':
keep_going = False
# Clean up
if live_eyetracking:
cal_vout_stream.release()
cal_pupils_stream.close()
elif key == 'v' or not keep_going:
do_cal = False
print("Stopping calibration.")
# Clean up
if live_eyetracking:
cal_vout_stream.release()
cal_pupils_stream.close()
break
print(' Create calibration model')
C, central_fix = calibrate.AutoCalibrate(res_dir, cfg)
if not C.any():
print('* Empty calibration matrix detected - skipping')
try:
print(' Calibrate pupilometry')
calibrate.ApplyCalibration(ss_dir, C, central_fix, cfg)
except UnboundLocalError:
print(' No calibration data found')
cv2.destroyAllWindows()
vin_stream.release()
print('')
print(' Generate Report')
print(' ---------------')
report.WriteReport(ss_dir, cfg)
# Return pupilometry timeseries
return t, px, py, area, blink, art_power
def VideoPupilometry(data_dir, subj_sess, v_stub, cfg):
"""
Perform pupil boundary ellipse fitting on entire video
Arguments
----
data_dir : string
Root data directory path.
subj_sess : string
Subject/Session name used for subdirectory within data_dir
v_stub : string
Video filename stub, eg 'cal' or 'gaze'
cfg :
Analysis configuration parameters
Returns
----
pupils : boolean
Completion status (True = successful)
"""
# Output flags
verbose = cfg.getboolean('OUTPUT', 'verbose')
overwrite = cfg.getboolean('OUTPUT','overwrite')
# Video information
vin_ext = cfg.get('VIDEO', 'inputextension')
vout_ext = cfg.get('VIDEO' ,'outputextension')
vin_fps = cfg.getfloat('VIDEO', 'inputfps')
# Full video file paths
ss_dir = os.path.join(data_dir, subj_sess)
vid_dir = os.path.join(ss_dir, 'videos')
res_dir = os.path.join(ss_dir, 'results')
vin_path = os.path.join(vid_dir, v_stub + vin_ext)
vout_path = os.path.join(res_dir, v_stub + '_pupils' + vout_ext)
# Raw and filtered pupilometry CSV file paths
pupils_csv = os.path.join(res_dir, v_stub + '_pupils.csv')
# Check that input video file exists
if not os.path.isfile(vin_path):
print('* %s does not exist - returning' % vin_path)
return False
# Set up the LBP cascade classifier
LBP_path = os.path.join(utils._package_root(), 'Cascade/cascade.xml')
print(' Loading LBP cascade')
cascade = cv2.CascadeClassifier(LBP_path)
if cascade.empty():
print('* LBP cascade is empty - mrgaze installation problem')
return False
# Check for output CSV existance and overwrite flag
if os.path.isfile(pupils_csv):
print('+ Pupilometry output already exists - checking overwrite flag')
if overwrite:
print('+ Overwrite allowed - continuing')
else:
print('+ Overwrite forbidden - skipping pupilometry')
return True
#
# Input video
#
print(' Opening input video stream')
try:
vin_stream = cv2.VideoCapture(vin_path)
except:
print('* Problem opening input video stream - skipping pupilometry')
return False
if not vin_stream.isOpened():
print('* Video input stream not opened - skipping pupilometry')
return False
# Video FPS from metadata
# TODO: may not work with Quicktime videos
# fps = vin_stream.get(cv2.cv.CV_CAP_PROP_FPS)
# Total number of frames in video file
nf = vin_stream.get(cv2.CAP_PROP_FRAME_COUNT)
print(' Video has %d frames at %0.3f fps' % (nf, vin_fps))
# Read first preprocessed video frame from stream
keep_going, frame_orig = media.LoadVideoFrame(vin_stream, cfg)
if keep_going:
frame, art_power = media.Preproc(frame_orig, cfg)
else:
art_power = 0.0
# Get size of preprocessed frame for output video setup
nx, ny = frame.shape[1], frame.shape[0]
#
# Output video
#
print(' Opening output video stream')
# Output video codec (MP4V - poor quality compression)
fourcc = cv2.VideoWriter_fourcc('m','p','4','v')
try:
vout_stream = cv2.VideoWriter(vout_path, fourcc, 30, (nx, ny), True)
except:
print('* Problem creating output video stream - skipping pupilometry')
return False
if not vout_stream.isOpened():
print('* Output video not opened - skipping pupilometry')
return False
# Open pupilometry CSV file to write
try:
pupils_stream = open(pupils_csv, 'w')
except:
print('* Problem opening pupilometry CSV file - skipping pupilometry')
return False
#
# Main Video Frame Loop
#
# Print verbose column headers
if verbose:
print('')
print(' %10s %10s %10s %10s %10s %10s' % (
'Time (s)', '% Done', 'Area', 'Blink', 'Artifact', 'FPS'))
# Init frame counter
fc = 0
# Init processing timer
t0 = time.time()
while keep_going:
# Current video time in seconds
t = fc / vin_fps
# -------------------------------------
# Pass this frame to pupilometry engine
# -------------------------------------
pupil_ellipse, roi_rect, blink, glint, frame_rgb = engine.PupilometryEngine(frame, cascade, cfg)
# Derive pupilometry parameters
px, py, area = engine.PupilometryPars(pupil_ellipse, glint, cfg)
# Write data line to pupilometry CSV file
pupils_stream.write(
'%0.3f,%0.3f,%0.3f,%0.3f,%d,%0.3f,\n' %
(t, area, px, py, blink, art_power)
)
# Write output video frame
vout_stream.write(frame_rgb)
# Read next frame (if available)
keep_going, frame_orig = media.LoadVideoFrame(vin_stream, cfg)
if keep_going:
frame, art_power = media.Preproc(frame_orig, cfg)
else:
art_power = 0.0
# Increment frame counter
fc = fc + 1
# Report processing FPS
if verbose:
if fc % 100 == 0:
perc_done = fc / float(nf) * 100.0
pfps = fc / (time.time() - t0)
print(' %10.1f %10.1f %10.1f %10d %10.3f %10.1f' % (
t, perc_done, area, blink, art_power, pfps))
# Clean up
cv2.destroyAllWindows()
vin_stream.release()
vout_stream.release()
pupils_stream.close()
# Return pupilometry timeseries
return t, px, py, area, blink, art_power
| 33.051643 | 112 | 0.550284 |
e9f462dbb1b4b480ae079d20eb179ca06f53f704 | 1,927 | py | Python | aws_glue/combine_csv_files/combine_csv_files.py | veben/aws_python_snippets | 39fa3cda8290fb097a5b9e8168829b62ab1af41e | [
"MIT"
] | 1 | 2020-09-08T09:22:25.000Z | 2020-09-08T09:22:25.000Z | aws_glue/combine_csv_files/combine_csv_files.py | veben/aws_python_snippets | 39fa3cda8290fb097a5b9e8168829b62ab1af41e | [
"MIT"
] | null | null | null | aws_glue/combine_csv_files/combine_csv_files.py | veben/aws_python_snippets | 39fa3cda8290fb097a5b9e8168829b62ab1af41e | [
"MIT"
] | 1 | 2020-09-08T09:26:58.000Z | 2020-09-08T09:26:58.000Z | from lib_combination.aws_client.aws_client import get_session_for_profile, run_job, get_job, create_job
from lib_combination.aws_client.aws_client import upload_file_to_s3_bucket
from lib_combination.conf_utils.conf_utils import get_job_name, get_profile_name, get_bucket_name, get_database_name
from lib_combination.file_utils.file_utils import get_local_script_folder_path
if __name__ == "__main__":
main()
| 41.891304 | 116 | 0.70576 |
e9f4dc1139fdd0b79eb9f6a5670984a538e5b297 | 1,062 | py | Python | p850h/rectangle_area.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | 1 | 2020-02-20T12:04:46.000Z | 2020-02-20T12:04:46.000Z | p850h/rectangle_area.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | p850h/rectangle_area.py | l33tdaima/l33tdaima | 0a7a9573dc6b79e22dcb54357493ebaaf5e0aa90 | [
"MIT"
] | null | null | null | from typing import List
# TESTS
for rectangles, expected in [
([[0, 0, 2, 2], [1, 0, 2, 3], [1, 0, 3, 1]], 6),
([[0, 0, 1000000000, 1000000000]], 49),
]:
sol = Solution()
actual = sol.rectangleArea(rectangles)
print("Total area covered by rectangles", rectangles, "->", actual)
assert actual == expected
| 32.181818 | 88 | 0.508475 |
e9f668b9ca060060d4949971143a55425febaef0 | 1,323 | py | Python | hack/examples/python/sentiments/sentiments.py | margarytaSadovets/nuclio | 37bf21900d543a6340edf9374475b104ea963459 | [
"Apache-2.0"
] | 1 | 2018-01-02T18:48:27.000Z | 2018-01-02T18:48:27.000Z | hack/examples/python/sentiments/sentiments.py | ilaykav/nuclio | 23a65b9f5c9e00afccbfbc62cd2a4dd2cc8a75dd | [
"Apache-2.0"
] | null | null | null | hack/examples/python/sentiments/sentiments.py | ilaykav/nuclio | 23a65b9f5c9e00afccbfbc62cd2a4dd2cc8a75dd | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The Nuclio Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# uses vader lib (will be installed automatically via build commands) to identify sentiments in the body string
# return score result in the form of: {'neg': 0.0, 'neu': 0.323, 'pos': 0.677, 'compound': 0.6369}
#
# @nuclio.configure
#
# function.yaml:
# apiVersion: "nuclio.io/v1beta1"
# kind: "Function"
# spec:
# runtime: "python"
#
# build:
# commands:
# - "pip install requests vaderSentiment"
#
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
| 30.767442 | 111 | 0.721844 |
e9f7cea197f517cec2cbd809a57d3dcde8bc48fa | 1,636 | py | Python | crawler/src/map_client/kakao_map_client.py | HVHO/holiday-pharmacy | e641dca93ed0cc0e3ffa28f54a1da6a86c1cfe22 | [
"MIT"
] | null | null | null | crawler/src/map_client/kakao_map_client.py | HVHO/holiday-pharmacy | e641dca93ed0cc0e3ffa28f54a1da6a86c1cfe22 | [
"MIT"
] | null | null | null | crawler/src/map_client/kakao_map_client.py | HVHO/holiday-pharmacy | e641dca93ed0cc0e3ffa28f54a1da6a86c1cfe22 | [
"MIT"
] | null | null | null | import requests
| 33.387755 | 94 | 0.590465 |
e9f9eaf439178a9738f5c3bed675e41c46a5be64 | 404 | py | Python | main.py | Alenx58/python-mysql-elasticsearch | a5deb16dcfce6d37c9c4a076f7ec6ff84ca967c3 | [
"MIT"
] | 1 | 2021-04-27T06:32:18.000Z | 2021-04-27T06:32:18.000Z | main.py | Alenx58/python-mysql-elasticsearch | a5deb16dcfce6d37c9c4a076f7ec6ff84ca967c3 | [
"MIT"
] | null | null | null | main.py | Alenx58/python-mysql-elasticsearch | a5deb16dcfce6d37c9c4a076f7ec6ff84ca967c3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Author : Alenx.Hai <alenx.hai@gmail.com>
# created time: 2020/12/21-10:49
import asyncio
from src.mysql_elastic import MySQLElasticSearch
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait([main()]))
| 21.263158 | 51 | 0.707921 |
e9fbb4ffd34a72b02bcdf9fee23d69719622bfd4 | 397 | py | Python | PythonDesafios/d107/teste.py | adaatii/Python-Curso-em-Video- | 30b37713b3685469558babb93b557b53210f010c | [
"MIT"
] | null | null | null | PythonDesafios/d107/teste.py | adaatii/Python-Curso-em-Video- | 30b37713b3685469558babb93b557b53210f010c | [
"MIT"
] | null | null | null | PythonDesafios/d107/teste.py | adaatii/Python-Curso-em-Video- | 30b37713b3685469558babb93b557b53210f010c | [
"MIT"
] | null | null | null | # Crie um mdulo chamado moeda.py que tenha as funes incorporadas aumentar(), diminuir(), dobro() e metade(). Faa
# tambm um programa que importe esse mdulo e use algumas dessas funes.
import moeda
p = float(input('Digite o preo: '))
print(f'A metade do {p} R${moeda.metade(p)}')
print(f'O dobro de {p} R${moeda.dobro(p)}')
print(f'Aumentando 10%, temos R${moeda.aumentar(p, 10)}')
| 36.090909 | 116 | 0.702771 |
e9fc3b08d76230c48ce220e58abe719b3c7d3fe9 | 3,024 | py | Python | homeassistant/components/homekit/covers.py | mfrueh/home-assistant | 5d64628b5bf4713016883282fd54de9c7d5089d0 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/homekit/covers.py | mfrueh/home-assistant | 5d64628b5bf4713016883282fd54de9c7d5089d0 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/homekit/covers.py | mfrueh/home-assistant | 5d64628b5bf4713016883282fd54de9c7d5089d0 | [
"Apache-2.0"
] | null | null | null | """Class to hold all cover accessories."""
import logging
from homeassistant.components.cover import ATTR_CURRENT_POSITION
from homeassistant.helpers.event import async_track_state_change
from . import TYPES
from .accessories import HomeAccessory, add_preload_service
from .const import (
SERV_WINDOW_COVERING, CHAR_CURRENT_POSITION,
CHAR_TARGET_POSITION, CHAR_POSITION_STATE)
_LOGGER = logging.getLogger(__name__)
| 36.878049 | 75 | 0.67791 |
e9fce1f0a0567c478c06135a1b26bb39e2c00202 | 5,888 | py | Python | plotter/hysplit_reader_long.py | yosukefk/plotter | 16127ee7fc3105c717e92875ee3d61477bd41533 | [
"MIT"
] | null | null | null | plotter/hysplit_reader_long.py | yosukefk/plotter | 16127ee7fc3105c717e92875ee3d61477bd41533 | [
"MIT"
] | 6 | 2021-05-25T15:51:27.000Z | 2021-08-18T20:39:41.000Z | plotter/hysplit_reader_long.py | yosukefk/plotter | 16127ee7fc3105c717e92875ee3d61477bd41533 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import datetime
import pytz
from pathlib import Path
import warnings
from io import IOBase
from . import calpost_reader
calpost_cat = calpost_reader.calpost_cat
def hysplit_reader_long(f, tslice=slice(None, None), x=None, y=None, z=None,
rdx_map=None):
"""reads hysplit output file, returns dict of numpy arrays
:param FileIO f: either (1)opened hysplit output file, (2) hysplit output filename or (3) list of (1) or (2)
:param slice tslice: slice of time index
:param list x: list of x coords
:param list y: list of y coords
:return: dict, with ['v'] has data as 3d array (t, y, x)
:rtype: dict
"""
print(type(f))
if isinstance(f, IOBase):
raise ValueError('plese pass filename, not FileIO...')
# assume file name passed if 'f' is string
if isinstance(f, (str, Path)):
df = pd.read_csv(f, sep=r'\s+')
return hysplit_reader_long(df, tslice, x, y, z, rdx_map)
# list of files may have different time period and locations. So
# first they are grouped by time perod, then each chunk got read.
# then they got joined with the time stiching routine aware of
# spin-up time
if isinstance(f, list):
lines = [next(pd.read_csv(fn, sep=r'\s+', nrows=1).itertuples()) for fn in f]
# Pandas(Index=0, JDAY=268.208, YR1=19, MO1=9, DA1=25, HR1=5, MN1=0,
# YR2=19, MO2=9, DA2=25, HR2=5, MN2=1, Pol=1, Lev=1, Station=1,
# Value=0.0)
print(lines)
dtes = [datetime.datetime(_.YR1, _.MO1, _.DA1, _.HR1,
_.MN1).replace(tzinfo=pytz.utc).astimezone(pytz.timezone('Etc/GMT+6'))
for _ in lines]
df_fnames = pd.DataFrame({'fname': f, 'datetime': dtes})
df_fnames.to_csv('fnames.csv')
# group the file names by the datetime
dct_fnames = {}
for fn,dte in zip(f, dtes):
dct_fnames.setdefault(dte, []).append(fn)
file_dates = list(dct_fnames.keys())
dat = []
for dte,fnames in dct_fnames.items():
dfs = [pd.read_csv(fn, sep=r'\s+') for fn in fnames]
df = pd.concat(dfs)
dat.append( hysplit_reader_long(df, tslice, x, y, z, rdx_map) )
dat = calpost_cat(dat, use_later_files=True)
dat['ts'] = dat['ts'][tslice]
dat['v'] = dat['v'][tslice]
return dat
# now i should be getting dataframe
df = f
units = '???'
print('dt')
# extremely slow!
#df['Datetime'] = [datetime.datetime(_.YR1, _.MO1, _.DA1, _.HR1,
# _.MN1).replace(tzinfo=pytz.utc).astimezone(pytz.timezone('Etc/GMT+6'))
# for _ in df.itertuples()]
df['Datetime'] = pd.to_datetime(df[['YR1', 'MO1', 'DA1', 'HR1', 'MN1']].assign(
YR1= lambda df: df['YR1'] + 2000).rename(
columns={'YR1':'year', 'MO1':'month', 'DA1': 'day', 'HR1': 'hour', 'MN1': 'minute'}),
utc=True).dt.tz_convert('Etc/GMT+6')
# bad idea!
#df['Datetime_tup'] = [_ for _ in df[['YR1', 'MO1', 'DA1', 'HR1',
# 'MN1']].itertuples(index=False)]
df = df[['Datetime', 'Lev', 'Station', 'Value']]
#grouped = df.groupby(['Datetime', 'Lev', 'Station'])
nrec = len(df.index)
print('set_index')
df = df[['Datetime', 'Lev', 'Station', 'Value']].set_index(
['Datetime', 'Station', 'Lev'] )
print('dt')
ts = df.index.levels[0]
#xxx = pd.DataFrame(ts, columns=('year', 'month', 'day', 'hour',
# 'minute'))
#print(xxx)
#xxx = xxx.assign(year=lambda x: x['year']+2000)
#print(xxx)
#
#ts = pd.to_datetime(
# pd.DataFrame(
# ts,
# columns=('year', 'month', 'day', 'hour', 'minute')
# ).assign(
# year=lambda x: x['year']+2000
# ))
#print(ts)
print('cont')
stations = df.index.levels[1]
nz = len(df.index.levels[2])
nsta = len(df.index.levels[1])
nt = len(df.index.levels[0])
print('nt,nz,nsta,nrec=', nt, nz, nsta, nrec)
# ........ bad idea
#assert nt * nz * nsta == nrec
if not nt * nz * nsta == nrec:
print(f'expected {nt*nz*nsta} rec, got {nrec}, short by {nt*nz*nsta-nrec}')
print(' f:', f)
print(' rng:', df.index.levels[0][0], df.index.levels[0][-1])
print('unstack')
df = df.unstack().unstack()
df.columns = df.columns.droplevel()
if rdx_map:
x = rdx_map.x
y = rdx_map.y
nx = len(x)
ny = len(y)
grid = rdx_map.grid
v = df.to_numpy()
if rdx_map.coverage == 'full, c-order' and nsta==nx*ny:
v = v.reshape(nt, nz, ny, nx)
elif rdx_map.coverage == 'full, f-order' and nsta==nx*ny:
raise NotImplementedError(
'qa first! receptor def = "{}", '.format(rdx_map.coverage))
v = v.reshape(nt, nz, nx, ny)
v = np.swapaxes(v, -1, -2)
elif rdx_map.coverage in ('full, c-order', 'full, f-order', 'full, random', 'patial, random'):
rdx = np.arange(nt*nz) + 1
mymap = rdx_map.get_index(stations).to_numpy()
mymap = mymap[:, ::-1]
vv = np.empty((nt, nz, ny, nx))
vv[...] = np.nan
v = v.reshape(nt , nz, -1)
for tt,t in zip(vv, v):
for zz, z in zip(tt, t):
for ji,p in zip(mymap,z):
zz[tuple(ji)] = p
v = vv
else:
raise ValueError('rdx_map is mandatory for now')
#dct = {'v': v, 'ts': ts, 'units': units, 'df': f, 'name': None}
dct = {'v': v, 'ts': ts, 'units': units, 'name': None}
dct.update( {'x': x, 'y': y, 'grid': grid, })
del df
return dct
| 33.078652 | 112 | 0.529552 |
e9fce58b8db982ac1059efc2000a44b8a6f0d6b6 | 1,094 | py | Python | tests/UserTest/test_user_db.py | brijeshb42/flask-web | a859fb68fe0eedf5ee872767d107f95a4e6f4856 | [
"MIT"
] | 14 | 2015-02-20T18:31:33.000Z | 2020-12-23T02:33:05.000Z | tests/UserTest/test_user_db.py | brijeshb42/flask-web | a859fb68fe0eedf5ee872767d107f95a4e6f4856 | [
"MIT"
] | 2 | 2015-02-21T18:49:12.000Z | 2015-10-06T18:10:30.000Z | tests/UserTest/test_user_db.py | brijeshb42/yapper | a859fb68fe0eedf5ee872767d107f95a4e6f4856 | [
"MIT"
] | 10 | 2015-02-21T11:06:57.000Z | 2022-02-21T01:25:34.000Z | import unittest
from yapper import create_app, db
from yapper.blueprints.user.models import User, Role
| 30.388889 | 69 | 0.637112 |
e9fd5e9401ba6d04c5d4bf4d42d343bc34357a32 | 2,880 | py | Python | CIM16/IEC61970/Generation/Production/StartIgnFuelCurve.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | null | null | null | CIM16/IEC61970/Generation/Production/StartIgnFuelCurve.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | null | null | null | CIM16/IEC61970/Generation/Production/StartIgnFuelCurve.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 1 | 2021-04-02T18:04:49.000Z | 2021-04-02T18:04:49.000Z | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM16.IEC61970.Core.Curve import Curve
| 45 | 309 | 0.720833 |
e9fe99f79d22866cd1c3d457b72379bf7128ed8c | 122,030 | py | Python | Line/beijing_aqi.py | Hansz00/pyecharts-gallery | a0a16d980e9d4f7d355c5ada938614579ee8d461 | [
"MIT"
] | 1 | 2020-08-25T07:41:10.000Z | 2020-08-25T07:41:10.000Z | Line/beijing_aqi.py | Hansz00/pyecharts-gallery | a0a16d980e9d4f7d355c5ada938614579ee8d461 | [
"MIT"
] | null | null | null | Line/beijing_aqi.py | Hansz00/pyecharts-gallery | a0a16d980e9d4f7d355c5ada938614579ee8d461 | [
"MIT"
] | 1 | 2022-03-10T09:05:44.000Z | 2022-03-10T09:05:44.000Z | import pyecharts.options as opts
from pyecharts.charts import Line
"""
Gallery pyecharts 1.1.0
: https://echarts.baidu.com/examples/editor.html?c=line-aqi
:
1dataZoom Y
"""
all_data = [
["2000-06-05", 116],
["2000-06-06", 129],
["2000-06-07", 135],
["2000-06-08", 86],
["2000-06-09", 73],
["2000-06-10", 85],
["2000-06-11", 73],
["2000-06-12", 68],
["2000-06-13", 92],
["2000-06-14", 130],
["2000-06-15", 245],
["2000-06-16", 139],
["2000-06-17", 115],
["2000-06-18", 111],
["2000-06-19", 309],
["2000-06-20", 206],
["2000-06-21", 137],
["2000-06-22", 128],
["2000-06-23", 85],
["2000-06-24", 94],
["2000-06-25", 71],
["2000-06-26", 106],
["2000-06-27", 84],
["2000-06-28", 93],
["2000-06-29", 85],
["2000-06-30", 73],
["2000-07-01", 83],
["2000-07-02", 125],
["2000-07-03", 107],
["2000-07-04", 82],
["2000-07-05", 44],
["2000-07-06", 72],
["2000-07-07", 106],
["2000-07-08", 107],
["2000-07-09", 66],
["2000-07-10", 91],
["2000-07-11", 92],
["2000-07-12", 113],
["2000-07-13", 107],
["2000-07-14", 131],
["2000-07-15", 111],
["2000-07-16", 64],
["2000-07-17", 69],
["2000-07-18", 88],
["2000-07-19", 77],
["2000-07-20", 83],
["2000-07-21", 111],
["2000-07-22", 57],
["2000-07-23", 55],
["2000-07-24", 60],
["2000-07-25", 44],
["2000-07-26", 127],
["2000-07-27", 114],
["2000-07-28", 86],
["2000-07-29", 73],
["2000-07-30", 52],
["2000-07-31", 69],
["2000-08-01", 86],
["2000-08-02", 118],
["2000-08-03", 56],
["2000-08-04", 91],
["2000-08-05", 121],
["2000-08-06", 127],
["2000-08-07", 78],
["2000-08-08", 79],
["2000-08-09", 46],
["2000-08-10", 108],
["2000-08-11", 80],
["2000-08-12", 79],
["2000-08-13", 69],
["2000-08-14", 80],
["2000-08-15", 105],
["2000-08-16", 119],
["2000-08-17", 105],
["2000-08-18", 55],
["2000-08-19", 74],
["2000-08-20", 41],
["2000-08-21", 62],
["2000-08-22", 104],
["2000-08-23", 118],
["2000-08-24", 121],
["2000-08-25", 126],
["2000-08-26", 99],
["2000-08-27", 92],
["2000-08-28", 75],
["2000-08-29", 91],
["2000-08-30", 94],
["2000-08-31", 69],
["2000-09-01", 93],
["2000-09-02", 124],
["2000-09-03", 120],
["2000-09-04", 93],
["2000-09-05", 26],
["2000-09-06", 32],
["2000-09-07", 70],
["2000-09-08", 89],
["2000-09-10", 117],
["2000-09-11", 144],
["2000-09-12", 111],
["2000-09-13", 120],
["2000-09-14", 97],
["2000-09-15", 108],
["2000-09-17", 74],
["2000-09-18", 105],
["2000-09-19", 127],
["2000-09-20", 143],
["2000-09-21", 62],
["2000-09-22", 80],
["2000-09-23", 136],
["2000-09-24", 29],
["2000-09-25", 91],
["2000-09-26", 93],
["2000-09-27", 114],
["2000-09-28", 45],
["2000-09-29", 102],
["2000-09-30", 111],
["2000-10-01", 93],
["2000-10-02", 117],
["2000-10-03", 78],
["2000-10-04", 76],
["2000-10-05", 100],
["2000-10-06", 75],
["2000-10-07", 169],
["2000-10-08", 59],
["2000-10-09", 89],
["2000-10-10", 91],
["2000-10-11", 75],
["2000-10-12", 28],
["2000-10-13", 47],
["2000-10-14", 92],
["2000-10-16", 72],
["2000-10-17", 149],
["2000-10-18", 86],
["2000-10-19", 88],
["2000-10-20", 104],
["2000-10-21", 91],
["2000-10-22", 88],
["2000-10-23", 55],
["2000-10-24", 63],
["2000-10-25", 41],
["2000-10-26", 85],
["2000-10-27", 99],
["2000-10-28", 121],
["2000-10-29", 96],
["2000-10-30", 90],
["2000-11-01", 80],
["2000-11-02", 116],
["2000-11-03", 207],
["2000-11-04", 306],
["2000-11-05", 283],
["2000-11-06", 200],
["2000-11-07", 93],
["2000-11-08", 49],
["2000-11-09", 78],
["2000-11-10", 40],
["2000-11-11", 74],
["2000-11-12", 67],
["2000-11-13", 118],
["2000-11-14", 196],
["2000-11-15", 101],
["2000-11-16", 59],
["2000-11-17", 83],
["2000-11-18", 83],
["2000-11-19", 124],
["2000-11-20", 57],
["2000-11-21", 78],
["2000-11-22", 113],
["2000-11-23", 172],
["2000-11-24", 129],
["2000-11-25", 103],
["2000-11-26", 75],
["2000-11-27", 125],
["2000-11-28", 121],
["2000-11-29", 204],
["2000-11-30", 141],
["2000-12-01", 106],
["2000-12-02", 146],
["2000-12-03", 95],
["2000-12-04", 149],
["2000-12-05", 71],
["2000-12-07", 157],
["2000-12-08", 141],
["2000-12-09", 197],
["2000-12-10", 43],
["2000-12-11", 81],
["2000-12-12", 109],
["2000-12-13", 118],
["2000-12-15", 115],
["2000-12-16", 92],
["2000-12-17", 123],
["2000-12-18", 147],
["2000-12-19", 59],
["2000-12-20", 103],
["2000-12-21", 146],
["2000-12-22", 137],
["2000-12-23", 74],
["2000-12-24", 64],
["2000-12-25", 67],
["2000-12-26", 107],
["2000-12-27", 101],
["2000-12-28", 79],
["2000-12-29", 137],
["2000-12-30", 165],
["2000-12-31", 81],
["2001-01-01", 100],
["2001-01-02", 126],
["2001-01-03", 56],
["2001-01-05", 108],
["2001-01-06", 88],
["2001-01-07", 78],
["2001-01-08", 105],
["2001-01-09", 77],
["2001-01-10", 105],
["2001-01-11", 93],
["2001-01-12", 107],
["2001-01-13", 128],
["2001-01-14", 53],
["2001-01-15", 81],
["2001-01-16", 128],
["2001-01-17", 179],
["2001-01-18", 225],
["2001-01-19", 116],
["2001-01-20", 153],
["2001-01-21", 161],
["2001-01-22", 149],
["2001-01-23", 115],
["2001-01-24", 136],
["2001-01-25", 101],
["2001-01-26", 109],
["2001-01-27", 108],
["2001-01-28", 86],
["2001-01-29", 101],
["2001-01-30", 109],
["2001-01-31", 139],
["2001-02-01", 110],
["2001-02-02", 113],
["2001-02-03", 130],
["2001-02-04", 62],
["2001-02-05", 88],
["2001-02-06", 105],
["2001-02-07", 87],
["2001-02-08", 140],
["2001-02-09", 116],
["2001-02-10", 100],
["2001-02-11", 83],
["2001-02-12", 102],
["2001-02-13", 106],
["2001-02-14", 157],
["2001-02-15", 131],
["2001-02-16", 77],
["2001-02-17", 101],
["2001-02-18", 148],
["2001-02-19", 227],
["2001-02-20", 105],
["2001-02-21", 155],
["2001-02-22", 293],
["2001-02-23", 99],
["2001-02-24", 57],
["2001-02-25", 97],
["2001-02-26", 104],
["2001-02-27", 117],
["2001-02-28", 125],
["2001-03-01", 216],
["2001-03-02", 149],
["2001-03-03", 256],
["2001-03-04", 172],
["2001-03-05", 113],
["2001-03-06", 338],
["2001-03-07", 57],
["2001-03-08", 48],
["2001-03-10", 111],
["2001-03-11", 87],
["2001-03-12", 175],
["2001-03-13", 186],
["2001-03-14", 201],
["2001-03-15", 76],
["2001-03-16", 131],
["2001-03-17", 127],
["2001-03-18", 128],
["2001-03-19", 152],
["2001-03-20", 144],
["2001-03-21", 162],
["2001-03-22", 500],
["2001-03-24", 358],
["2001-03-25", 128],
["2001-03-26", 54],
["2001-03-27", 57],
["2001-03-28", 54],
["2001-03-29", 80],
["2001-03-30", 71],
["2001-03-31", 73],
["2001-04-01", 139],
["2001-04-02", 224],
["2001-04-03", 107],
["2001-04-04", 150],
["2001-04-05", 180],
["2001-04-06", 77],
["2001-04-07", 95],
["2001-04-08", 194],
["2001-04-09", 143],
["2001-04-10", 205],
["2001-04-11", 129],
["2001-04-12", 64],
["2001-04-13", 61],
["2001-04-14", 79],
["2001-04-15", 121],
["2001-04-16", 130],
["2001-04-17", 150],
["2001-04-18", 205],
["2001-04-19", 154],
["2001-04-20", 81],
["2001-04-21", 140],
["2001-04-22", 119],
["2001-04-23", 156],
["2001-04-24", 72],
["2001-04-25", 108],
["2001-04-26", 124],
["2001-04-27", 94],
["2001-04-28", 157],
["2001-04-29", 100],
["2001-04-30", 158],
["2001-05-01", 277],
["2001-05-02", 332],
["2001-05-03", 303],
["2001-05-04", 238],
["2001-05-05", 500],
["2001-05-06", 99],
["2001-05-07", 93],
["2001-05-08", 104],
["2001-05-09", 74],
["2001-05-10", 68],
["2001-05-11", 90],
["2001-05-12", 114],
["2001-05-13", 142],
["2001-05-14", 126],
["2001-05-15", 185],
["2001-05-16", 402],
["2001-05-17", 189],
["2001-05-17", 189],
["2001-05-17", 189],
["2001-05-18", 112],
["2001-05-19", 137],
["2001-05-20", 158],
["2001-05-21", 158],
["2001-05-22", 116],
["2001-05-23", 132],
["2001-05-24", 110],
["2001-05-25", 82],
["2001-05-26", 56],
["2001-05-27", 54],
["2001-05-28", 71],
["2001-05-29", 101],
["2001-05-30", 57],
["2001-05-31", 88],
["2001-06-01", 99],
["2001-06-02", 84],
["2001-06-03", 139],
["2001-06-04", 132],
["2001-06-05", 141],
["2001-06-07", 159],
["2001-06-08", 131],
["2001-06-09", 180],
["2001-06-10", 164],
["2001-06-11", 134],
["2001-06-12", 163],
["2001-06-13", 105],
["2001-06-14", 74],
["2001-06-15", 50],
["2001-06-16", 60],
["2001-06-17", 82],
["2001-06-18", 111],
["2001-06-19", 89],
["2001-06-20", 81],
["2001-06-21", 76],
["2001-06-22", 70],
["2001-06-23", 74],
["2001-06-24", 99],
["2001-06-25", 91],
["2001-06-26", 113],
["2001-06-27", 93],
["2001-06-28", 69],
["2001-06-29", 74],
["2001-06-30", 75],
["2001-07-01", 108],
["2001-07-02", 115],
["2001-07-03", 86],
["2001-07-04", 67],
["2001-07-05", 68],
["2001-07-06", 74],
["2001-07-07", 69],
["2001-07-08", 95],
["2001-07-09", 99],
["2001-07-10", 92],
["2001-07-11", 84],
["2001-07-12", 77],
["2001-07-13", 69],
["2001-07-14", 62],
["2001-07-15", 83],
["2001-07-16", 101],
["2001-07-17", 98],
["2001-07-18", 89],
["2001-07-19", 82],
["2001-07-20", 105],
["2001-07-21", 79],
["2001-07-22", 48],
["2001-07-23", 119],
["2001-07-24", 126],
["2001-07-25", 44],
["2001-07-26", 42],
["2001-07-27", 86],
["2001-07-28", 68],
["2001-07-29", 93],
["2001-07-30", 89],
["2001-07-31", 76],
["2001-08-01", 54],
["2001-08-02", 53],
["2001-08-03", 35],
["2001-08-04", 65],
["2001-08-05", 108],
["2001-08-06", 114],
["2001-08-07", 90],
["2001-08-08", 63],
["2001-08-09", 79],
["2001-08-10", 102],
["2001-08-11", 100],
["2001-08-12", 107],
["2001-08-13", 81],
["2001-08-14", 79],
["2001-08-15", 116],
["2001-08-16", 98],
["2001-08-17", 96],
["2001-08-18", 94],
["2001-08-19", 63],
["2001-08-20", 39],
["2001-08-21", 81],
["2001-08-22", 73],
["2001-08-23", 66],
["2001-08-24", 52],
["2001-08-25", 64],
["2001-08-26", 61],
["2001-08-27", 83],
["2001-08-28", 85],
["2001-08-29", 99],
["2001-08-30", 97],
["2001-08-31", 93],
["2001-09-01", 86],
["2001-09-02", 105],
["2001-09-03", 98],
["2001-09-04", 109],
["2001-09-05", 92],
["2001-09-06", 68],
["2001-09-07", 92],
["2001-09-08", 72],
["2001-09-09", 64],
["2001-09-10", 88],
["2001-09-11", 97],
["2001-09-12", 102],
["2001-09-13", 103],
["2001-09-14", 120],
["2001-09-15", 94],
["2001-09-16", 95],
["2001-09-17", 93],
["2001-09-18", 56],
["2001-09-19", 98],
["2001-09-20", 81],
["2001-09-21", 100],
["2001-09-22", 75],
["2001-09-23", 84],
["2001-09-24", 91],
["2001-09-25", 70],
["2001-09-26", 96],
["2001-09-27", 128],
["2001-09-28", 92],
["2001-09-29", 107],
["2001-09-30", 95],
["2001-10-01", 63],
["2001-10-02", 115],
["2001-10-03", 69],
["2001-10-04", 47],
["2001-10-05", 86],
["2001-10-06", 122],
["2001-10-07", 104],
["2001-10-08", 122],
["2001-10-09", 49],
["2001-10-10", 36],
["2001-10-11", 83],
["2001-10-12", 107],
["2001-10-13", 126],
["2001-10-14", 126],
["2001-10-15", 78],
["2001-10-16", 72],
["2001-10-17", 76],
["2001-10-18", 87],
["2001-10-19", 143],
["2001-10-20", 259],
["2001-10-21", 183],
["2001-10-22", 276],
["2001-10-23", 232],
["2001-10-24", 167],
["2001-10-25", 105],
["2001-10-26", 129],
["2001-10-27", 140],
["2001-10-28", 61],
["2001-10-29", 85],
["2001-10-30", 155],
["2001-11-01", 38],
["2001-11-02", 106],
["2001-11-03", 134],
["2001-11-04", 57],
["2001-11-05", 51],
["2001-11-06", 68],
["2001-11-07", 129],
["2001-11-08", 158],
["2001-11-09", 85],
["2001-11-10", 121],
["2001-11-11", 161],
["2001-11-12", 94],
["2001-11-13", 58],
["2001-11-14", 57],
["2001-11-15", 71],
["2001-11-16", 105],
["2001-11-17", 66],
["2001-11-18", 117],
["2001-11-19", 87],
["2001-11-20", 88],
["2001-11-21", 131],
["2001-11-22", 151],
["2001-11-23", 310],
["2001-11-24", 161],
["2001-11-25", 23],
["2001-11-26", 52],
["2001-11-27", 82],
["2001-11-28", 128],
["2001-11-29", 115],
["2001-11-30", 63],
["2001-12-02", 102],
["2001-12-03", 96],
["2001-12-04", 107],
["2001-12-05", 89],
["2001-12-06", 59],
["2001-12-07", 100],
["2001-12-08", 136],
["2001-12-09", 137],
["2001-12-10", 119],
["2001-12-11", 112],
["2001-12-12", 186],
["2001-12-13", 192],
["2001-12-14", 83],
["2001-12-15", 97],
["2001-12-16", 113],
["2001-12-18", 89],
["2001-12-19", 106],
["2001-12-20", 119],
["2001-12-21", 62],
["2001-12-22", 79],
["2001-12-23", 58],
["2001-12-24", 61],
["2001-12-25", 64],
["2001-12-26", 108],
["2001-12-27", 101],
["2001-12-28", 82],
["2001-12-29", 85],
["2001-12-30", 98],
["2001-12-31", 132],
["2002-01-01", 88],
["2002-01-02", 97],
["2002-01-03", 116],
["2002-01-04", 111],
["2002-01-05", 81],
["2002-01-06", 78],
["2002-01-07", 138],
["2002-01-08", 100],
["2002-01-09", 157],
["2002-01-10", 349],
["2002-01-11", 196],
["2002-01-12", 190],
["2002-01-13", 100],
["2002-01-14", 103],
["2002-01-15", 160],
["2002-01-16", 97],
["2002-01-17", 103],
["2002-01-18", 123],
["2002-01-19", 137],
["2002-01-20", 268],
["2002-01-21", 52],
["2002-01-22", 44],
["2002-01-23", 66],
["2002-01-24", 106],
["2002-01-25", 94],
["2002-01-26", 96],
["2002-01-27", 58],
["2002-01-28", 62],
["2002-01-29", 56],
["2002-01-30", 62],
["2002-01-31", 109],
["2002-02-01", 96],
["2002-02-02", 95],
["2002-02-03", 126],
["2002-02-04", 161],
["2002-02-05", 138],
["2002-02-06", 106],
["2002-02-07", 99],
["2002-02-08", 113],
["2002-02-09", 80],
["2002-02-10", 90],
["2002-02-11", 86],
["2002-02-12", 142],
["2002-02-13", 93],
["2002-02-14", 125],
["2002-02-15", 135],
["2002-02-16", 138],
["2002-02-17", 111],
["2002-02-18", 70],
["2002-02-19", 101],
["2002-02-20", 153],
["2002-02-21", 146],
["2002-02-22", 97],
["2002-02-23", 82],
["2002-02-24", 99],
["2002-02-25", 131],
["2002-02-26", 88],
["2002-02-27", 74],
["2002-02-28", 96],
["2002-03-01", 133],
["2002-03-02", 105],
["2002-03-03", 86],
["2002-03-04", 105],
["2002-03-05", 89],
["2002-03-06", 70],
["2002-03-07", 87],
["2002-03-08", 109],
["2002-03-09", 161],
["2002-03-10", 83],
["2002-03-11", 129],
["2002-03-12", 107],
["2002-03-13", 89],
["2002-03-14", 186],
["2002-03-15", 108],
["2002-03-16", 500],
["2002-03-17", 188],
["2002-03-18", 102],
["2002-03-19", 139],
["2002-03-20", 155],
["2002-03-21", 500],
["2002-03-22", 370],
["2002-03-23", 164],
["2002-03-24", 105],
["2002-03-25", 156],
["2002-03-26", 180],
["2002-03-27", 105],
["2002-03-28", 126],
["2002-03-29", 120],
["2002-03-30", 122],
["2002-03-31", 118],
["2002-04-01", 188],
["2002-04-02", 260],
["2002-04-03", 296],
["2002-04-04", 118],
["2002-04-05", 132],
["2002-04-06", 80],
["2002-04-07", 500],
["2002-04-08", 500],
["2002-04-09", 253],
["2002-04-10", 67],
["2002-04-11", 110],
["2002-04-13", 133],
["2002-04-14", 246],
["2002-04-15", 324],
["2002-04-16", 225],
["2002-04-17", 120],
["2002-04-18", 121],
["2002-04-19", 131],
["2002-04-20", 148],
["2002-04-21", 174],
["2002-04-22", 106],
["2002-04-23", 32],
["2002-04-24", 86],
["2002-04-25", 92],
["2002-04-26", 117],
["2002-04-27", 110],
["2002-04-28", 90],
["2002-04-29", 86],
["2002-04-30", 106],
["2002-05-01", 84],
["2002-05-02", 76],
["2002-05-03", 92],
["2002-05-04", 85],
["2002-05-05", 79],
["2002-05-07", 92],
["2002-05-08", 99],
["2002-05-09", 105],
["2002-05-10", 105],
["2002-05-11", 78],
["2002-05-12", 125],
["2002-05-13", 113],
["2002-05-14", 90],
["2002-05-15", 89],
["2002-05-16", 99],
["2002-05-17", 94],
["2002-05-18", 109],
["2002-05-19", 105],
["2002-05-20", 115],
["2002-05-21", 110],
["2002-05-22", 54],
["2002-05-23", 76],
["2002-05-24", 83],
["2002-05-25", 75],
["2002-05-26", 89],
["2002-05-27", 97],
["2002-05-28", 113],
["2002-05-29", 106],
["2002-05-30", 86],
["2002-05-31", 108],
["2002-06-01", 115],
["2002-06-02", 106],
["2002-06-03", 99],
["2002-06-04", 151],
["2002-06-05", 118],
["2002-06-06", 139],
["2002-06-07", 161],
["2002-06-08", 77],
["2002-06-09", 72],
["2002-06-10", 36],
["2002-06-11", 81],
["2002-06-12", 67],
["2002-06-13", 56],
["2002-06-14", 73],
["2002-06-15", 75],
["2002-06-16", 80],
["2002-06-17", 122],
["2002-06-19", 142],
["2002-06-20", 77],
["2002-06-21", 68],
["2002-06-22", 77],
["2002-06-23", 50],
["2002-06-24", 51],
["2002-06-25", 40],
["2002-06-26", 46],
["2002-06-27", 65],
["2002-06-28", 110],
["2002-06-29", 104],
["2002-06-30", 85],
["2002-07-01", 126],
["2002-07-02", 88],
["2002-07-03", 112],
["2002-07-04", 108],
["2002-07-05", 98],
["2002-07-06", 88],
["2002-07-07", 68],
["2002-07-08", 87],
["2002-07-09", 83],
["2002-07-10", 87],
["2002-07-11", 127],
["2002-07-12", 111],
["2002-07-13", 108],
["2002-07-14", 91],
["2002-07-15", 89],
["2002-07-16", 75],
["2002-07-17", 88],
["2002-07-18", 76],
["2002-07-19", 62],
["2002-07-20", 55],
["2002-07-21", 66],
["2002-07-22", 67],
["2002-07-23", 62],
["2002-07-24", 113],
["2002-07-25", 81],
["2002-07-26", 66],
["2002-07-27", 86],
["2002-07-28", 47],
["2002-07-29", 44],
["2002-07-30", 79],
["2002-07-31", 137],
["2002-08-01", 160],
["2002-08-02", 89],
["2002-08-03", 96],
["2002-08-04", 63],
["2002-08-05", 53],
["2002-08-06", 50],
["2002-08-07", 44],
["2002-08-08", 74],
["2002-08-09", 64],
["2002-08-10", 72],
["2002-08-11", 94],
["2002-08-12", 71],
["2002-08-13", 124],
["2002-08-14", 129],
["2002-08-15", 155],
["2002-08-16", 156],
["2002-08-17", 125],
["2002-08-18", 130],
["2002-08-19", 66],
["2002-08-20", 91],
["2002-08-21", 114],
["2002-08-22", 112],
["2002-08-23", 102],
["2002-08-24", 72],
["2002-08-25", 76],
["2002-08-26", 77],
["2002-08-27", 86],
["2002-08-28", 92],
["2002-08-29", 108],
["2002-08-30", 100],
["2002-08-31", 122],
["2002-09-01", 164],
["2002-09-02", 111],
["2002-09-03", 52],
["2002-09-04", 70],
["2002-09-05", 59],
["2002-09-06", 82],
["2002-09-07", 96],
["2002-09-08", 92],
["2002-09-09", 124],
["2002-09-10", 98],
["2002-09-11", 45],
["2002-09-12", 37],
["2002-09-13", 81],
["2002-09-14", 90],
["2002-09-15", 98],
["2002-09-16", 97],
["2002-09-17", 111],
["2002-09-18", 125],
["2002-09-19", 83],
["2002-09-20", 41],
["2002-09-21", 87],
["2002-09-22", 56],
["2002-09-23", 72],
["2002-09-25", 182],
["2002-09-26", 183],
["2002-09-27", 70],
["2002-09-28", 44],
["2002-09-29", 62],
["2002-09-30", 100],
["2002-10-01", 121],
["2002-10-02", 62],
["2002-10-03", 70],
["2002-10-04", 99],
["2002-10-05", 89],
["2002-10-06", 52],
["2002-10-07", 37],
["2002-10-08", 64],
["2002-10-09", 135],
["2002-10-10", 232],
["2002-10-11", 365],
["2002-10-12", 198],
["2002-10-13", 53],
["2002-10-14", 121],
["2002-10-15", 83],
["2002-10-16", 100],
["2002-10-17", 169],
["2002-10-18", 75],
["2002-10-20", 72],
["2002-10-21", 51],
["2002-10-22", 50],
["2002-10-23", 95],
["2002-10-24", 88],
["2002-10-26", 59],
["2002-10-27", 30],
["2002-10-28", 48],
["2002-10-29", 109],
["2002-10-30", 146],
["2002-10-31", 76],
["2002-11-01", 33],
["2002-11-02", 52],
["2002-11-03", 54],
["2002-11-04", 70],
["2002-11-05", 107],
["2002-11-06", 96],
["2002-11-07", 76],
["2002-11-08", 37],
["2002-11-09", 94],
["2002-11-10", 182],
["2002-11-11", 452],
["2002-11-12", 66],
["2002-11-13", 56],
["2002-11-14", 80],
["2002-11-15", 85],
["2002-11-16", 104],
["2002-11-17", 43],
["2002-11-18", 52],
["2002-11-19", 115],
["2002-11-20", 143],
["2002-11-21", 75],
["2002-11-22", 110],
["2002-11-23", 134],
["2002-11-24", 129],
["2002-11-25", 153],
["2002-11-26", 54],
["2002-11-27", 114],
["2002-11-28", 145],
["2002-11-29", 87],
["2002-11-30", 138],
["2002-12-01", 198],
["2002-12-02", 273],
["2002-12-03", 395],
["2002-12-04", 498],
["2002-12-05", 97],
["2002-12-06", 112],
["2002-12-07", 97],
["2002-12-08", 86],
["2002-12-09", 97],
["2002-12-10", 99],
["2002-12-12", 151],
["2002-12-13", 135],
["2002-12-14", 193],
["2002-12-15", 153],
["2002-12-16", 95],
["2002-12-17", 91],
["2002-12-18", 137],
["2002-12-19", 98],
["2002-12-20", 77],
["2002-12-21", 95],
["2002-12-22", 96],
["2002-12-23", 83],
["2002-12-24", 71],
["2002-12-25", 53],
["2002-12-26", 69],
["2002-12-27", 75],
["2002-12-28", 106],
["2002-12-29", 90],
["2002-12-30", 106],
["2002-12-31", 64],
["2003-01-01", 105],
["2003-01-02", 100],
["2003-01-03", 69],
["2003-01-04", 55],
["2003-01-05", 65],
["2003-01-06", 112],
["2003-01-07", 83],
["2003-01-08", 131],
["2003-01-09", 151],
["2003-01-10", 93],
["2003-01-11", 97],
["2003-01-12", 104],
["2003-01-13", 92],
["2003-01-14", 53],
["2003-01-15", 105],
["2003-01-16", 159],
["2003-01-17", 106],
["2003-01-18", 89],
["2003-01-19", 88],
["2003-01-20", 87],
["2003-01-21", 99],
["2003-01-22", 117],
["2003-01-23", 72],
["2003-01-24", 109],
["2003-01-25", 91],
["2003-01-26", 100],
["2003-01-27", 48],
["2003-01-28", 58],
["2003-01-29", 65],
["2003-01-30", 105],
["2003-01-31", 87],
["2003-02-01", 148],
["2003-02-02", 109],
["2003-02-03", 96],
["2003-02-04", 87],
["2003-02-05", 56],
["2003-02-06", 105],
["2003-02-07", 126],
["2003-02-08", 164],
["2003-02-09", 113],
["2003-02-10", 54],
["2003-02-11", 47],
["2003-02-12", 93],
["2003-02-13", 83],
["2003-02-14", 91],
["2003-02-15", 135],
["2003-02-16", 65],
["2003-02-17", 100],
["2003-02-18", 147],
["2003-02-19", 56],
["2003-02-20", 89],
["2003-02-21", 107],
["2003-02-22", 99],
["2003-02-23", 124],
["2003-02-24", 152],
["2003-02-25", 115],
["2003-02-26", 87],
["2003-02-27", 76],
["2003-02-28", 93],
["2003-03-01", 172],
["2003-03-02", 235],
["2003-03-03", 65],
["2003-03-04", 55],
["2003-03-05", 93],
["2003-03-06", 96],
["2003-03-07", 127],
["2003-03-08", 71],
["2003-03-09", 88],
["2003-03-10", 81],
["2003-03-11", 115],
["2003-03-12", 54],
["2003-03-13", 94],
["2003-03-14", 92],
["2003-03-15", 98],
["2003-03-17", 73],
["2003-03-18", 69],
["2003-03-19", 156],
["2003-03-20", 93],
["2003-03-21", 37],
["2003-03-22", 92],
["2003-03-23", 114],
["2003-03-24", 124],
["2003-03-25", 108],
["2003-03-26", 106],
["2003-03-27", 39],
["2003-03-28", 66],
["2003-03-29", 126],
["2003-03-30", 282],
["2003-03-31", 136],
["2003-04-01", 92],
["2003-04-02", 54],
["2003-04-03", 81],
["2003-04-04", 89],
["2003-04-05", 115],
["2003-04-06", 108],
["2003-04-07", 100],
["2003-04-08", 55],
["2003-04-09", 75],
["2003-04-10", 88],
["2003-04-11", 94],
["2003-04-12", 143],
["2003-04-13", 62],
["2003-04-14", 138],
["2003-04-15", 187],
["2003-04-16", 157],
["2003-04-17", 154],
["2003-04-18", 56],
["2003-04-19", 54],
["2003-04-20", 57],
["2003-04-21", 46],
["2003-04-22", 82],
["2003-04-24", 179],
["2003-04-25", 138],
["2003-04-26", 147],
["2003-04-28", 147],
["2003-04-29", 106],
["2003-04-30", 95],
["2003-05-01", 107],
["2003-05-02", 102],
["2003-05-03", 120],
["2003-05-04", 117],
["2003-05-05", 87],
["2003-05-06", 71],
["2003-05-07", 58],
["2003-05-08", 95],
["2003-05-09", 117],
["2003-05-10", 142],
["2003-05-11", 104],
["2003-05-12", 124],
["2003-05-13", 100],
["2003-05-14", 82],
["2003-05-15", 77],
["2003-05-16", 70],
["2003-05-17", 34],
["2003-05-18", 60],
["2003-05-19", 83],
["2003-05-20", 107],
["2003-05-21", 126],
["2003-05-22", 93],
["2003-05-23", 100],
["2003-05-24", 96],
["2003-05-25", 87],
["2003-05-26", 116],
["2003-05-27", 111],
["2003-05-28", 90],
["2003-05-29", 68],
["2003-05-30", 96],
["2003-05-31", 86],
["2003-06-01", 131],
["2003-06-02", 110],
["2003-06-03", 119],
["2003-06-04", 126],
["2003-06-05", 67],
["2003-06-06", 86],
["2003-06-07", 81],
["2003-06-08", 104],
["2003-06-09", 71],
["2003-06-10", 35],
["2003-06-11", 57],
["2003-06-12", 56],
["2003-06-13", 57],
["2003-06-14", 40],
["2003-06-15", 72],
["2003-06-16", 96],
["2003-06-17", 137],
["2003-06-18", 180],
["2003-06-19", 171],
["2003-06-20", 167],
["2003-06-21", 173],
["2003-06-22", 124],
["2003-06-23", 79],
["2003-06-24", 29],
["2003-06-25", 76],
["2003-06-26", 96],
["2003-06-27", 89],
["2003-06-28", 67],
["2003-06-29", 51],
["2003-06-30", 92],
["2003-07-01", 94],
["2003-07-02", 100],
["2003-07-03", 129],
["2003-07-04", 128],
["2003-07-05", 44],
["2003-07-06", 64],
["2003-07-07", 59],
["2003-07-08", 75],
["2003-07-09", 41],
["2003-07-10", 85],
["2003-07-11", 91],
["2003-07-12", 125],
["2003-07-13", 108],
["2003-07-14", 116],
["2003-07-15", 135],
["2003-07-16", 111],
["2003-07-17", 95],
["2003-07-18", 79],
["2003-07-19", 75],
["2003-07-20", 104],
["2003-07-21", 82],
["2003-07-22", 80],
["2003-07-23", 99],
["2003-07-24", 110],
["2003-07-25", 96],
["2003-07-26", 163],
["2003-07-27", 126],
["2003-07-28", 69],
["2003-07-29", 98],
["2003-07-30", 68],
["2003-07-31", 75],
["2003-08-01", 109],
["2003-08-02", 75],
["2003-08-03", 102],
["2003-08-04", 115],
["2003-08-05", 110],
["2003-08-06", 93],
["2003-08-07", 80],
["2003-08-08", 65],
["2003-08-09", 64],
["2003-08-10", 64],
["2003-08-11", 58],
["2003-08-13", 123],
["2003-08-14", 87],
["2003-08-15", 88],
["2003-08-16", 89],
["2003-08-17", 86],
["2003-08-18", 91],
["2003-08-19", 132],
["2003-08-20", 85],
["2003-08-21", 96],
["2003-08-22", 90],
["2003-08-23", 78],
["2003-08-24", 79],
["2003-08-25", 76],
["2003-08-26", 84],
["2003-08-27", 88],
["2003-08-28", 57],
["2003-08-29", 44],
["2003-08-30", 78],
["2003-08-31", 95],
["2003-09-01", 93],
["2003-09-02", 86],
["2003-09-03", 108],
["2003-09-04", 124],
["2003-09-05", 70],
["2003-09-06", 113],
["2003-09-07", 82],
["2003-09-08", 111],
["2003-09-09", 59],
["2003-09-10", 60],
["2003-09-11", 89],
["2003-09-12", 132],
["2003-09-13", 133],
["2003-09-14", 112],
["2003-09-15", 69],
["2003-09-16", 132],
["2003-09-17", 75],
["2003-09-18", 37],
["2003-09-19", 37],
["2003-09-20", 79],
["2003-09-21", 89],
["2003-09-22", 121],
["2003-09-23", 74],
["2003-09-24", 88],
["2003-09-26", 66],
["2003-09-27", 32],
["2003-09-28", 73],
["2003-09-29", 92],
["2003-09-30", 57],
["2003-10-01", 67],
["2003-10-02", 34],
["2003-10-03", 45],
["2003-10-05", 115],
["2003-10-06", 153],
["2003-10-07", 127],
["2003-10-08", 116],
["2003-10-09", 152],
["2003-10-10", 130],
["2003-10-11", 24],
["2003-10-12", 17],
["2003-10-13", 60],
["2003-10-14", 56],
["2003-10-15", 51],
["2003-10-16", 56],
["2003-10-17", 80],
["2003-10-18", 56],
["2003-10-19", 98],
["2003-10-20", 145],
["2003-10-21", 121],
["2003-10-22", 41],
["2003-10-23", 86],
["2003-10-24", 121],
["2003-10-25", 69],
["2003-10-26", 116],
["2003-10-27", 165],
["2003-10-29", 120],
["2003-10-30", 171],
["2003-10-31", 289],
["2003-11-01", 500],
["2003-11-02", 181],
["2003-11-03", 28],
["2003-11-04", 92],
["2003-11-05", 146],
["2003-11-06", 44],
["2003-11-07", 22],
["2003-11-08", 25],
["2003-11-09", 51],
["2003-11-10", 74],
["2003-11-11", 51],
["2003-11-12", 106],
["2003-11-13", 149],
["2003-11-14", 213],
["2003-11-15", 130],
["2003-11-16", 32],
["2003-11-17", 116],
["2003-11-18", 162],
["2003-11-19", 173],
["2003-11-20", 118],
["2003-11-21", 20],
["2003-11-22", 85],
["2003-11-23", 161],
["2003-11-24", 186],
["2003-11-25", 147],
["2003-11-26", 57],
["2003-11-27", 88],
["2003-11-28", 107],
["2003-11-29", 159],
["2003-11-30", 147],
["2003-12-01", 153],
["2003-12-02", 135],
["2003-12-03", 99],
["2003-12-04", 92],
["2003-12-05", 109],
["2003-12-06", 99],
["2003-12-07", 57],
["2003-12-08", 64],
["2003-12-09", 79],
["2003-12-10", 143],
["2003-12-11", 93],
["2003-12-12", 52],
["2003-12-13", 95],
["2003-12-14", 141],
["2003-12-15", 59],
["2003-12-16", 109],
["2003-12-17", 58],
["2003-12-18", 60],
["2003-12-19", 52],
["2003-12-20", 71],
["2003-12-21", 110],
["2003-12-22", 107],
["2003-12-23", 114],
["2003-12-24", 98],
["2003-12-25", 96],
["2003-12-26", 48],
["2003-12-27", 89],
["2003-12-28", 130],
["2003-12-29", 90],
["2003-12-30", 106],
["2003-12-31", 111],
["2004-01-01", 128],
["2004-01-02", 83],
["2004-01-03", 60],
["2004-01-04", 109],
["2004-01-05", 137],
["2004-01-06", 147],
["2004-01-07", 99],
["2004-01-08", 73],
["2004-01-09", 126],
["2004-01-10", 73],
["2004-01-11", 72],
["2004-01-12", 87],
["2004-01-13", 85],
["2004-01-14", 115],
["2004-01-15", 121],
["2004-01-16", 97],
["2004-01-17", 109],
["2004-01-18", 74],
["2004-01-19", 52],
["2004-01-20", 49],
["2004-01-21", 41],
["2004-01-22", 64],
["2004-01-23", 80],
["2004-01-24", 38],
["2004-01-25", 58],
["2004-01-26", 106],
["2004-01-27", 57],
["2004-01-28", 106],
["2004-01-29", 111],
["2004-01-31", 118],
["2004-02-01", 109],
["2004-02-02", 53],
["2004-02-03", 50],
["2004-02-04", 59],
["2004-02-06", 56],
["2004-02-07", 68],
["2004-02-08", 52],
["2004-02-09", 68],
["2004-02-10", 130],
["2004-02-11", 95],
["2004-02-12", 103],
["2004-02-13", 124],
["2004-02-14", 95],
["2004-02-15", 92],
["2004-02-16", 95],
["2004-02-17", 135],
["2004-02-18", 242],
["2004-02-19", 451],
["2004-02-20", 140],
["2004-02-21", 109],
["2004-02-23", 88],
["2004-02-24", 164],
["2004-02-25", 145],
["2004-02-26", 46],
["2004-02-27", 85],
["2004-02-28", 125],
["2004-02-29", 54],
["2004-03-01", 83],
["2004-03-02", 73],
["2004-03-03", 60],
["2004-03-04", 85],
["2004-03-05", 73],
["2004-03-06", 51],
["2004-03-07", 56],
["2004-03-08", 108],
["2004-03-09", 179],
["2004-03-10", 446],
["2004-03-11", 84],
["2004-03-13", 104],
["2004-03-14", 87],
["2004-03-15", 143],
["2004-03-16", 206],
["2004-03-17", 77],
["2004-03-19", 114],
["2004-03-20", 87],
["2004-03-21", 92],
["2004-03-22", 165],
["2004-03-23", 104],
["2004-03-24", 33],
["2004-03-25", 88],
["2004-03-26", 137],
["2004-03-27", 151],
["2004-03-28", 338],
["2004-03-29", 239],
["2004-03-30", 139],
["2004-03-31", 79],
["2004-04-01", 123],
["2004-04-02", 64],
["2004-04-03", 51],
["2004-04-05", 133],
["2004-04-06", 93],
["2004-04-07", 39],
["2004-04-08", 111],
["2004-04-09", 145],
["2004-04-10", 193],
["2004-04-11", 131],
["2004-04-12", 131],
["2004-04-13", 108],
["2004-04-14", 95],
["2004-04-15", 141],
["2004-04-16", 186],
["2004-04-17", 156],
["2004-04-18", 260],
["2004-04-19", 138],
["2004-04-20", 133],
["2004-04-21", 107],
["2004-04-22", 143],
["2004-04-23", 61],
["2004-04-24", 109],
["2004-04-25", 151],
["2004-04-26", 63],
["2004-04-27", 63],
["2004-04-28", 79],
["2004-04-29", 138],
["2004-04-30", 47],
["2004-05-01", 67],
["2004-05-02", 84],
["2004-05-03", 95],
["2004-05-04", 73],
["2004-05-05", 89],
["2004-05-06", 91],
["2004-05-07", 152],
["2004-05-08", 189],
["2004-05-09", 92],
["2004-05-10", 97],
["2004-05-11", 107],
["2004-05-12", 81],
["2004-05-13", 89],
["2004-05-14", 93],
["2004-05-15", 92],
["2004-05-16", 50],
["2004-05-17", 61],
["2004-05-18", 66],
["2004-05-19", 77],
["2004-05-21", 56],
["2004-05-22", 65],
["2004-05-23", 86],
["2004-05-24", 134],
["2004-05-25", 141],
["2004-05-26", 30],
["2004-05-27", 83],
["2004-05-28", 111],
["2004-05-29", 56],
["2004-05-30", 66],
["2004-05-31", 56],
["2004-06-01", 100],
["2004-06-02", 109],
["2004-06-03", 118],
["2004-06-04", 107],
["2004-06-05", 74],
["2004-06-06", 58],
["2004-06-07", 88],
["2004-06-08", 100],
["2004-06-09", 109],
["2004-06-10", 125],
["2004-06-11", 114],
["2004-06-12", 110],
["2004-06-13", 118],
["2004-06-14", 135],
["2004-06-15", 147],
["2004-06-16", 99],
["2004-06-17", 29],
["2004-06-18", 75],
["2004-06-19", 73],
["2004-06-20", 97],
["2004-06-21", 102],
["2004-06-22", 93],
["2004-06-23", 78],
["2004-06-24", 58],
["2004-06-25", 61],
["2004-06-26", 100],
["2004-06-27", 106],
["2004-06-28", 139],
["2004-06-29", 152],
["2004-06-30", 49],
["2004-07-01", 46],
["2004-07-02", 85],
["2004-07-03", 97],
["2004-07-04", 58],
["2004-07-05", 56],
["2004-07-06", 59],
["2004-07-07", 74],
["2004-07-08", 63],
["2004-07-09", 59],
["2004-07-10", 91],
["2004-07-11", 70],
["2004-07-12", 53],
["2004-07-13", 55],
["2004-07-14", 67],
["2004-07-15", 97],
["2004-07-16", 123],
["2004-07-17", 118],
["2004-07-18", 100],
["2004-07-19", 80],
["2004-07-20", 135],
["2004-07-21", 67],
["2004-07-22", 70],
["2004-07-23", 105],
["2004-07-24", 55],
["2004-07-25", 78],
["2004-07-26", 78],
["2004-07-27", 59],
["2004-07-28", 111],
["2004-07-29", 78],
["2004-07-30", 30],
["2004-07-31", 78],
["2004-08-01", 91],
["2004-08-02", 119],
["2004-08-03", 95],
["2004-08-04", 73],
["2004-08-05", 76],
["2004-08-06", 89],
["2004-08-07", 117],
["2004-08-08", 145],
["2004-08-09", 143],
["2004-08-10", 84],
["2004-08-11", 84],
["2004-08-12", 51],
["2004-08-13", 31],
["2004-08-14", 83],
["2004-08-15", 76],
["2004-08-16", 51],
["2004-08-17", 67],
["2004-08-18", 75],
["2004-08-19", 68],
["2004-08-20", 80],
["2004-08-21", 99],
["2004-08-22", 70],
["2004-08-23", 60],
["2004-08-24", 105],
["2004-08-25", 122],
["2004-08-26", 100],
["2004-08-27", 125],
["2004-08-28", 70],
["2004-08-29", 57],
["2004-08-30", 79],
["2004-08-31", 68],
["2004-09-01", 61],
["2004-09-02", 67],
["2004-09-03", 77],
["2004-09-04", 64],
["2004-09-05", 96],
["2004-09-06", 101],
["2004-09-07", 24],
["2004-09-08", 61],
["2004-09-09", 80],
["2004-09-10", 85],
["2004-09-11", 88],
["2004-09-12", 95],
["2004-09-13", 101],
["2004-09-14", 140],
["2004-09-15", 34],
["2004-09-16", 81],
["2004-09-17", 89],
["2004-09-18", 86],
["2004-09-19", 71],
["2004-09-20", 94],
["2004-09-21", 40],
["2004-09-22", 84],
["2004-09-23", 122],
["2004-09-24", 197],
["2004-09-25", 179],
["2004-09-26", 111],
["2004-09-27", 114],
["2004-09-29", 134],
["2004-09-30", 141],
["2004-10-01", 17],
["2004-10-02", 59],
["2004-10-03", 83],
["2004-10-04", 118],
["2004-10-05", 153],
["2004-10-06", 166],
["2004-10-07", 325],
["2004-10-08", 402],
["2004-10-09", 263],
["2004-10-10", 374],
["2004-10-11", 127],
["2004-10-12", 37],
["2004-10-13", 62],
["2004-10-14", 67],
["2004-10-15", 99],
["2004-10-16", 116],
["2004-10-17", 110],
["2004-10-18", 126],
["2004-10-19", 149],
["2004-10-20", 110],
["2004-10-21", 56],
["2004-10-22", 59],
["2004-10-23", 97],
["2004-10-24", 146],
["2004-10-25", 142],
["2004-10-26", 34],
["2004-10-27", 79],
["2004-10-28", 154],
["2004-10-29", 191],
["2004-10-30", 219],
["2004-10-31", 157],
["2004-11-01", 35],
["2004-11-02", 39],
["2004-11-03", 124],
["2004-11-04", 164],
["2004-11-05", 56],
["2004-11-06", 92],
["2004-11-07", 133],
["2004-11-08", 173],
["2004-11-09", 86],
["2004-11-10", 77],
["2004-11-11", 62],
["2004-11-12", 45],
["2004-11-13", 93],
["2004-11-14", 160],
["2004-11-15", 54],
["2004-11-16", 67],
["2004-11-17", 65],
["2004-11-18", 99],
["2004-11-19", 97],
["2004-11-20", 47],
["2004-11-21", 93],
["2004-11-22", 165],
["2004-11-23", 156],
["2004-11-24", 89],
["2004-11-25", 41],
["2004-11-26", 53],
["2004-11-27", 89],
["2004-11-28", 99],
["2004-11-29", 81],
["2004-11-30", 139],
["2004-12-01", 275],
["2004-12-02", 270],
["2004-12-03", 330],
["2004-12-04", 97],
["2004-12-05", 37],
["2004-12-06", 97],
["2004-12-07", 89],
["2004-12-08", 170],
["2004-12-09", 248],
["2004-12-10", 97],
["2004-12-11", 181],
["2004-12-12", 123],
["2004-12-13", 89],
["2004-12-14", 198],
["2004-12-15", 305],
["2004-12-16", 86],
["2004-12-17", 92],
["2004-12-18", 143],
["2004-12-19", 82],
["2004-12-20", 23],
["2004-12-21", 81],
["2004-12-22", 88],
["2004-12-23", 75],
["2004-12-24", 99],
["2004-12-25", 150],
["2004-12-26", 97],
["2004-12-27", 44],
["2004-12-28", 49],
["2004-12-29", 61],
["2004-12-30", 80],
["2004-12-31", 45],
["2005-01-01", 63],
["2005-01-02", 118],
["2005-01-03", 100],
["2005-01-04", 52],
["2005-01-05", 104],
["2005-01-06", 147],
["2005-01-07", 48],
["2005-01-08", 56],
["2005-01-09", 44],
["2005-01-10", 96],
["2005-01-11", 67],
["2005-01-12", 52],
["2005-01-13", 83],
["2005-01-14", 65],
["2005-01-15", 67],
["2005-01-16", 87],
["2005-01-17", 111],
["2005-01-18", 47],
["2005-01-19", 55],
["2005-01-20", 57],
["2005-01-21", 85],
["2005-01-22", 119],
["2005-01-23", 174],
["2005-01-24", 143],
["2005-01-25", 95],
["2005-01-26", 115],
["2005-01-27", 173],
["2005-01-28", 163],
["2005-01-29", 95],
["2005-01-30", 50],
["2005-01-31", 69],
["2005-02-01", 69],
["2005-02-02", 47],
["2005-02-03", 96],
["2005-02-04", 79],
["2005-02-05", 46],
["2005-02-06", 68],
["2005-02-07", 71],
["2005-02-08", 68],
["2005-02-09", 84],
["2005-02-10", 38],
["2005-02-11", 71],
["2005-02-12", 102],
["2005-02-13", 122],
["2005-02-14", 153],
["2005-02-15", 150],
["2005-02-16", 69],
["2005-02-17", 105],
["2005-02-18", 60],
["2005-02-19", 42],
["2005-02-20", 47],
["2005-02-21", 87],
["2005-02-22", 102],
["2005-02-23", 30],
["2005-02-24", 55],
["2005-02-25", 46],
["2005-02-26", 64],
["2005-02-27", 95],
["2005-02-28", 61],
["2005-03-01", 64],
["2005-03-02", 74],
["2005-03-03", 57],
["2005-03-04", 46],
["2005-03-05", 58],
["2005-03-06", 114],
["2005-03-07", 108],
["2005-03-08", 82],
["2005-03-09", 80],
["2005-03-10", 110],
["2005-03-11", 67],
["2005-03-12", 59],
["2005-03-13", 36],
["2005-03-14", 69],
["2005-03-15", 99],
["2005-03-16", 120],
["2005-03-17", 109],
["2005-03-18", 52],
["2005-03-19", 96],
["2005-03-20", 119],
["2005-03-21", 94],
["2005-03-22", 151],
["2005-03-23", 90],
["2005-03-24", 63],
["2005-03-25", 99],
["2005-03-26", 133],
["2005-03-27", 161],
["2005-03-28", 141],
["2005-03-29", 48],
["2005-03-30", 122],
["2005-03-31", 113],
["2005-04-01", 83],
["2005-04-02", 82],
["2005-04-03", 82],
["2005-04-04", 116],
["2005-04-05", 332],
["2005-04-06", 352],
["2005-04-07", 156],
["2005-04-08", 100],
["2005-04-09", 64],
["2005-04-10", 64],
["2005-04-11", 95],
["2005-04-12", 92],
["2005-04-13", 90],
["2005-04-14", 179],
["2005-04-15", 88],
["2005-04-16", 213],
["2005-04-17", 143],
["2005-04-18", 159],
["2005-04-19", 132],
["2005-04-20", 173],
["2005-04-21", 69],
["2005-04-22", 58],
["2005-04-23", 107],
["2005-04-24", 106],
["2005-04-25", 73],
["2005-04-26", 115],
["2005-04-27", 122],
["2005-04-28", 418],
["2005-04-29", 98],
["2005-04-30", 138],
["2005-05-01", 183],
["2005-05-02", 122],
["2005-05-03", 139],
["2005-05-04", 160],
["2005-05-05", 97],
["2005-05-06", 48],
["2005-05-07", 80],
["2005-05-08", 130],
["2005-05-09", 63],
["2005-05-10", 62],
["2005-05-11", 86],
["2005-05-12", 110],
["2005-05-13", 81],
["2005-05-14", 85],
["2005-05-15", 113],
["2005-05-16", 83],
["2005-05-17", 49],
["2005-05-18", 51],
["2005-05-19", 53],
["2005-05-20", 80],
["2005-05-21", 120],
["2005-05-22", 46],
["2005-05-23", 59],
["2005-05-24", 82],
["2005-05-25", 88],
["2005-05-26", 107],
["2005-05-27", 83],
["2005-05-28", 120],
["2005-05-29", 100],
["2005-05-30", 109],
["2005-05-31", 95],
["2005-06-01", 93],
["2005-06-02", 54],
["2005-06-03", 58],
["2005-06-04", 77],
["2005-06-05", 75],
["2005-06-06", 53],
["2005-06-07", 86],
["2005-06-08", 96],
["2005-06-09", 81],
["2005-06-10", 85],
["2005-06-11", 136],
["2005-06-12", 106],
["2005-06-13", 94],
["2005-06-14", 69],
["2005-06-15", 56],
["2005-06-16", 83],
["2005-06-17", 79],
["2005-06-18", 92],
["2005-06-19", 116],
["2005-06-20", 131],
["2005-06-21", 113],
["2005-06-22", 116],
["2005-06-23", 120],
["2005-06-24", 148],
["2005-06-25", 141],
["2005-06-26", 79],
["2005-06-27", 52],
["2005-06-28", 84],
["2005-06-29", 86],
["2005-06-30", 100],
["2005-07-01", 97],
["2005-07-02", 76],
["2005-07-03", 87],
["2005-07-04", 64],
["2005-07-05", 63],
["2005-07-06", 70],
["2005-07-07", 89],
["2005-07-08", 98],
["2005-07-09", 91],
["2005-07-10", 79],
["2005-07-11", 69],
["2005-07-12", 81],
["2005-07-13", 93],
["2005-07-14", 93],
["2005-07-15", 97],
["2005-07-17", 150],
["2005-07-18", 103],
["2005-07-19", 114],
["2005-07-20", 125],
["2005-07-21", 104],
["2005-07-22", 79],
["2005-07-23", 51],
["2005-07-24", 23],
["2005-07-25", 75],
["2005-07-26", 109],
["2005-07-27", 73],
["2005-07-28", 63],
["2005-07-29", 57],
["2005-07-30", 95],
["2005-07-31", 79],
["2005-08-01", 81],
["2005-08-02", 68],
["2005-08-03", 72],
["2005-08-04", 46],
["2005-08-05", 63],
["2005-08-06", 86],
["2005-08-07", 71],
["2005-08-08", 72],
["2005-08-09", 62],
["2005-08-10", 60],
["2005-08-11", 146],
["2005-08-12", 141],
["2005-08-13", 63],
["2005-08-14", 98],
["2005-08-15", 100],
["2005-08-16", 46],
["2005-08-17", 26],
["2005-08-18", 53],
["2005-08-19", 59],
["2005-08-20", 79],
["2005-08-21", 110],
["2005-08-22", 91],
["2005-08-23", 97],
["2005-08-24", 90],
["2005-08-25", 85],
["2005-08-26", 110],
["2005-08-27", 94],
["2005-08-28", 154],
["2005-08-29", 136],
["2005-08-30", 113],
["2005-08-31", 152],
["2005-09-01", 118],
["2005-09-02", 42],
["2005-09-03", 68],
["2005-09-04", 80],
["2005-09-05", 90],
["2005-09-06", 99],
["2005-09-07", 98],
["2005-09-08", 83],
["2005-09-09", 141],
["2005-09-10", 164],
["2005-09-11", 182],
["2005-09-12", 107],
["2005-09-13", 76],
["2005-09-14", 62],
["2005-09-15", 104],
["2005-09-16", 78],
["2005-09-17", 73],
["2005-09-18", 66],
["2005-09-19", 99],
["2005-09-20", 92],
["2005-09-21", 71],
["2005-09-22", 60],
["2005-09-23", 110],
["2005-09-24", 112],
["2005-09-25", 134],
["2005-09-26", 168],
["2005-09-27", 97],
["2005-09-28", 115],
["2005-09-29", 100],
["2005-09-30", 47],
["2005-10-01", 88],
["2005-10-02", 72],
["2005-10-03", 70],
["2005-10-04", 77],
["2005-10-05", 103],
["2005-10-06", 136],
["2005-10-07", 82],
["2005-10-08", 42],
["2005-10-09", 93],
["2005-10-10", 167],
["2005-10-11", 152],
["2005-10-12", 183],
["2005-10-13", 155],
["2005-10-14", 50],
["2005-10-15", 73],
["2005-10-16", 120],
["2005-10-17", 57],
["2005-10-18", 96],
["2005-10-19", 94],
["2005-10-20", 151],
["2005-10-21", 96],
["2005-10-22", 92],
["2005-10-23", 135],
["2005-10-24", 139],
["2005-10-25", 99],
["2005-10-26", 176],
["2005-10-27", 156],
["2005-10-28", 24],
["2005-10-29", 48],
["2005-10-30", 54],
["2005-10-31", 97],
["2005-11-01", 134],
["2005-11-02", 252],
["2005-11-03", 334],
["2005-11-04", 330],
["2005-11-05", 472],
["2005-11-06", 191],
["2005-11-07", 141],
["2005-11-08", 45],
["2005-11-09", 104],
["2005-11-10", 156],
["2005-11-11", 79],
["2005-11-12", 95],
["2005-11-13", 70],
["2005-11-14", 80],
["2005-11-15", 60],
["2005-11-16", 104],
["2005-11-17", 160],
["2005-11-18", 184],
["2005-11-19", 126],
["2005-11-20", 91],
["2005-11-21", 73],
["2005-11-22", 134],
["2005-11-23", 76],
["2005-11-24", 108],
["2005-11-25", 127],
["2005-11-26", 131],
["2005-11-27", 163],
["2005-11-28", 220],
["2005-11-29", 73],
["2005-11-30", 154],
["2005-12-01", 97],
["2005-12-02", 58],
["2005-12-03", 99],
["2005-12-04", 61],
["2005-12-05", 60],
["2005-12-06", 37],
["2005-12-07", 39],
["2005-12-08", 72],
["2005-12-09", 121],
["2005-12-10", 99],
["2005-12-11", 44],
["2005-12-12", 49],
["2005-12-13", 40],
["2005-12-14", 53],
["2005-12-15", 50],
["2005-12-16", 49],
["2005-12-17", 44],
["2005-12-18", 77],
["2005-12-19", 129],
["2005-12-20", 114],
["2005-12-21", 57],
["2005-12-22", 86],
["2005-12-23", 120],
["2005-12-24", 102],
["2005-12-25", 146],
["2005-12-26", 61],
["2005-12-27", 57],
["2005-12-28", 122],
["2005-12-29", 113],
["2005-12-30", 157],
["2005-12-31", 76],
["2006-01-01", 108],
["2006-01-02", 100],
["2006-01-03", 119],
["2006-01-04", 69],
["2006-01-05", 53],
["2006-01-06", 54],
["2006-01-07", 62],
["2006-01-08", 100],
["2006-01-09", 103],
["2006-01-10", 147],
["2006-01-11", 100],
["2006-01-12", 110],
["2006-01-13", 98],
["2006-01-14", 107],
["2006-01-15", 252],
["2006-01-16", 243],
["2006-01-17", 116],
["2006-01-18", 110],
["2006-01-19", 181],
["2006-01-20", 273],
["2006-01-21", 310],
["2006-01-22", 136],
["2006-01-23", 110],
["2006-01-24", 146],
["2006-01-25", 119],
["2006-01-26", 157],
["2006-01-27", 153],
["2006-01-28", 69],
["2006-01-29", 143],
["2006-01-30", 54],
["2006-01-31", 65],
["2006-02-01", 83],
["2006-02-02", 75],
["2006-02-03", 56],
["2006-02-04", 73],
["2006-02-05", 114],
["2006-02-06", 138],
["2006-02-07", 61],
["2006-02-08", 34],
["2006-02-09", 70],
["2006-02-10", 93],
["2006-02-11", 99],
["2006-02-12", 110],
["2006-02-13", 228],
["2006-02-14", 178],
["2006-02-15", 64],
["2006-02-16", 93],
["2006-02-17", 59],
["2006-02-18", 87],
["2006-02-19", 95],
["2006-02-20", 133],
["2006-02-21", 215],
["2006-02-22", 75],
["2006-02-23", 93],
["2006-02-24", 74],
["2006-02-25", 112],
["2006-02-26", 54],
["2006-02-27", 81],
["2006-02-28", 83],
["2006-03-01", 62],
["2006-03-02", 49],
["2006-03-03", 89],
["2006-03-04", 154],
["2006-03-05", 99],
["2006-03-06", 80],
["2006-03-07", 90],
["2006-03-08", 98],
["2006-03-09", 91],
["2006-03-10", 408],
["2006-03-11", 95],
["2006-03-12", 85],
["2006-03-13", 90],
["2006-03-14", 88],
["2006-03-15", 109],
["2006-03-16", 91],
["2006-03-17", 135],
["2006-03-18", 256],
["2006-03-19", 84],
["2006-03-20", 226],
["2006-03-21", 197],
["2006-03-22", 181],
["2006-03-23", 66],
["2006-03-24", 97],
["2006-03-25", 206],
["2006-03-26", 99],
["2006-03-27", 347],
["2006-03-28", 98],
["2006-03-29", 124],
["2006-03-30", 92],
["2006-03-31", 96],
["2006-04-01", 183],
["2006-04-02", 122],
["2006-04-03", 187],
["2006-04-04", 162],
["2006-04-05", 99],
["2006-04-06", 78],
["2006-04-07", 158],
["2006-04-08", 186],
["2006-04-09", 500],
["2006-04-10", 500],
["2006-04-11", 166],
["2006-04-12", 95],
["2006-04-13", 60],
["2006-04-14", 149],
["2006-04-15", 128],
["2006-04-16", 84],
["2006-04-17", 500],
["2006-04-18", 168],
["2006-04-19", 319],
["2006-04-20", 79],
["2006-04-21", 123],
["2006-04-22", 145],
["2006-04-23", 203],
["2006-04-24", 94],
["2006-04-25", 128],
["2006-04-26", 210],
["2006-04-27", 98],
["2006-04-28", 99],
["2006-04-29", 131],
["2006-04-30", 165],
["2006-05-01", 432],
["2006-05-02", 94],
["2006-05-03", 92],
["2006-05-04", 147],
["2006-05-05", 95],
["2006-05-06", 93],
["2006-05-07", 138],
["2006-05-08", 123],
["2006-05-09", 79],
["2006-05-10", 71],
["2006-05-11", 61],
["2006-05-12", 63],
["2006-05-13", 44],
["2006-05-14", 93],
["2006-05-15", 95],
["2006-05-16", 98],
["2006-05-17", 500],
["2006-05-18", 168],
["2006-05-19", 240],
["2006-05-20", 82],
["2006-05-21", 96],
["2006-05-22", 96],
["2006-05-23", 95],
["2006-05-24", 84],
["2006-05-25", 91],
["2006-05-26", 78],
["2006-05-27", 32],
["2006-05-28", 51],
["2006-05-29", 84],
["2006-05-30", 98],
["2006-05-31", 118],
["2006-06-01", 96],
["2006-06-02", 112],
["2006-06-03", 69],
["2006-06-04", 100],
["2006-06-05", 137],
["2006-06-06", 147],
["2006-06-07", 86],
["2006-06-08", 65],
["2006-06-09", 92],
["2006-06-10", 39],
["2006-06-11", 61],
["2006-06-12", 96],
["2006-06-13", 77],
["2006-06-14", 43],
["2006-06-15", 78],
["2006-06-16", 86],
["2006-06-17", 50],
["2006-06-18", 68],
["2006-06-19", 97],
["2006-06-20", 84],
["2006-06-21", 152],
["2006-06-22", 118],
["2006-06-23", 123],
["2006-06-24", 76],
["2006-06-25", 68],
["2006-06-26", 84],
["2006-06-27", 75],
["2006-06-28", 90],
["2006-06-29", 66],
["2006-06-30", 42],
["2006-07-01", 57],
["2006-07-02", 52],
["2006-07-03", 81],
["2006-07-04", 75],
["2006-07-05", 97],
["2006-07-06", 60],
["2006-07-07", 65],
["2006-07-08", 67],
["2006-07-09", 82],
["2006-07-10", 99],
["2006-07-11", 66],
["2006-07-12", 72],
["2006-07-13", 44],
["2006-07-14", 78],
["2006-07-15", 70],
["2006-07-16", 69],
["2006-07-17", 58],
["2006-07-18", 43],
["2006-07-19", 55],
["2006-07-20", 74],
["2006-07-21", 76],
["2006-07-22", 36],
["2006-07-23", 72],
["2006-07-24", 61],
["2006-07-25", 46],
["2006-07-26", 50],
["2006-07-27", 65],
["2006-07-28", 98],
["2006-07-29", 115],
["2006-07-30", 138],
["2006-07-31", 88],
["2006-08-01", 47],
["2006-08-02", 39],
["2006-08-03", 61],
["2006-08-04", 64],
["2006-08-05", 74],
["2006-08-06", 100],
["2006-08-07", 82],
["2006-08-08", 84],
["2006-08-09", 64],
["2006-08-10", 89],
["2006-08-11", 75],
["2006-08-12", 98],
["2006-08-13", 69],
["2006-08-14", 27],
["2006-08-15", 70],
["2006-08-16", 84],
["2006-08-17", 91],
["2006-08-18", 85],
["2006-08-19", 97],
["2006-08-20", 77],
["2006-08-21", 45],
["2006-08-22", 69],
["2006-08-23", 67],
["2006-08-24", 99],
["2006-08-25", 131],
["2006-08-26", 69],
["2006-08-27", 66],
["2006-08-28", 93],
["2006-08-29", 62],
["2006-08-30", 59],
["2006-08-31", 64],
["2006-09-01", 89],
["2006-09-02", 100],
["2006-09-03", 109],
["2006-09-04", 28],
["2006-09-05", 71],
["2006-09-06", 87],
["2006-09-07", 112],
["2006-09-08", 71],
["2006-09-09", 37],
["2006-09-10", 67],
["2006-09-11", 86],
["2006-09-12", 89],
["2006-09-13", 100],
["2006-09-14", 107],
["2006-09-15", 109],
["2006-09-16", 116],
["2006-09-17", 134],
["2006-09-18", 100],
["2006-09-19", 132],
["2006-09-20", 151],
["2006-09-21", 99],
["2006-09-22", 95],
["2006-09-23", 118],
["2006-09-24", 121],
["2006-09-25", 119],
["2006-09-26", 49],
["2006-09-27", 91],
["2006-09-28", 98],
["2006-09-29", 70],
["2006-09-30", 100],
["2006-10-01", 139],
["2006-10-02", 152],
["2006-10-03", 143],
["2006-10-04", 73],
["2006-10-05", 99],
["2006-10-06", 194],
["2006-10-07", 100],
["2006-10-08", 91],
["2006-10-09", 77],
["2006-10-10", 131],
["2006-10-11", 65],
["2006-10-12", 73],
["2006-10-13", 121],
["2006-10-14", 135],
["2006-10-15", 100],
["2006-10-16", 146],
["2006-10-17", 59],
["2006-10-18", 86],
["2006-10-19", 121],
["2006-10-20", 71],
["2006-10-21", 97],
["2006-10-22", 87],
["2006-10-23", 46],
["2006-10-24", 99],
["2006-10-25", 139],
["2006-10-26", 64],
["2006-10-27", 99],
["2006-10-28", 163],
["2006-10-29", 77],
["2006-10-30", 130],
["2006-10-31", 154],
["2006-11-01", 96],
["2006-11-02", 98],
["2006-11-03", 140],
["2006-11-04", 180],
["2006-11-05", 48],
["2006-11-06", 93],
["2006-11-07", 94],
["2006-11-08", 148],
["2006-11-09", 61],
["2006-11-10", 89],
["2006-11-11", 72],
["2006-11-12", 136],
["2006-11-13", 98],
["2006-11-14", 37],
["2006-11-15", 78],
["2006-11-16", 99],
["2006-11-17", 100],
["2006-11-18", 129],
["2006-11-19", 147],
["2006-11-20", 249],
["2006-11-21", 414],
["2006-11-22", 97],
["2006-11-23", 74],
["2006-11-24", 153],
["2006-11-25", 124],
["2006-11-26", 129],
["2006-11-27", 47],
["2006-11-28", 58],
["2006-11-29", 61],
["2006-11-30", 96],
["2006-12-01", 88],
["2006-12-02", 49],
["2006-12-03", 66],
["2006-12-04", 111],
["2006-12-05", 94],
["2006-12-06", 78],
["2006-12-07", 86],
["2006-12-08", 97],
["2006-12-09", 81],
["2006-12-10", 105],
["2006-12-11", 256],
["2006-12-12", 500],
["2006-12-13", 88],
["2006-12-14", 118],
["2006-12-15", 94],
["2006-12-16", 76],
["2006-12-17", 52],
["2006-12-18", 100],
["2006-12-19", 140],
["2006-12-20", 180],
["2006-12-21", 180],
["2006-12-22", 88],
["2006-12-23", 95],
["2006-12-24", 85],
["2006-12-25", 136],
["2006-12-26", 160],
["2006-12-27", 80],
["2006-12-28", 47],
["2006-12-29", 90],
["2006-12-30", 157],
["2006-12-31", 139],
["2007-01-01", 158],
["2007-01-02", 150],
["2007-01-03", 133],
["2007-01-04", 170],
["2007-01-05", 322],
["2007-01-06", 73],
["2007-01-07", 47],
["2007-01-08", 45],
["2007-01-09", 86],
["2007-01-10", 98],
["2007-01-11", 75],
["2007-01-12", 56],
["2007-01-13", 74],
["2007-01-14", 102],
["2007-01-15", 170],
["2007-01-16", 64],
["2007-01-17", 84],
["2007-01-18", 52],
["2007-01-19", 93],
["2007-01-20", 147],
["2007-01-21", 98],
["2007-01-22", 58],
["2007-01-23", 96],
["2007-01-24", 118],
["2007-01-25", 140],
["2007-01-26", 68],
["2007-01-27", 55],
["2007-01-28", 55],
["2007-01-29", 114],
["2007-01-30", 85],
["2007-01-31", 76],
["2007-02-01", 50],
["2007-02-02", 100],
["2007-02-03", 115],
["2007-02-04", 93],
["2007-02-05", 175],
["2007-02-06", 67],
["2007-02-07", 110],
["2007-02-08", 99],
["2007-02-09", 67],
["2007-02-10", 61],
["2007-02-11", 55],
["2007-02-12", 103],
["2007-02-13", 181],
["2007-02-14", 74],
["2007-02-15", 75],
["2007-02-16", 97],
["2007-02-17", 98],
["2007-02-18", 115],
["2007-02-19", 99],
["2007-02-20", 160],
["2007-02-21", 200],
["2007-02-22", 173],
["2007-02-23", 78],
["2007-02-24", 75],
["2007-02-25", 123],
["2007-02-26", 169],
["2007-02-27", 172],
["2007-02-28", 108],
["2007-03-01", 98],
["2007-03-02", 85],
["2007-03-03", 87],
["2007-03-04", 28],
["2007-03-05", 34],
["2007-03-06", 35],
["2007-03-07", 51],
["2007-03-08", 54],
["2007-03-09", 105],
["2007-03-10", 75],
["2007-03-11", 34],
["2007-03-12", 68],
["2007-03-13", 133],
["2007-03-14", 157],
["2007-03-15", 106],
["2007-03-16", 78],
["2007-03-17", 100],
["2007-03-18", 121],
["2007-03-19", 119],
["2007-03-21", 138],
["2007-03-22", 145],
["2007-03-23", 202],
["2007-03-24", 192],
["2007-03-25", 79],
["2007-03-26", 78],
["2007-03-27", 84],
["2007-03-28", 98],
["2007-03-29", 99],
["2007-03-30", 66],
["2007-03-31", 103],
["2007-04-01", 63],
["2007-04-02", 48],
["2007-04-03", 40],
["2007-04-04", 95],
["2007-04-05", 110],
["2007-04-06", 148],
["2007-04-07", 46],
["2007-04-08", 43],
["2007-04-09", 96],
["2007-04-10", 133],
["2007-04-11", 88],
["2007-04-12", 107],
["2007-04-13", 55],
["2007-04-14", 74],
["2007-04-15", 72],
["2007-04-16", 81],
["2007-04-17", 74],
["2007-04-18", 100],
["2007-04-19", 173],
["2007-04-20", 155],
["2007-04-21", 62],
["2007-04-22", 58],
["2007-04-23", 81],
["2007-04-24", 78],
["2007-04-25", 72],
["2007-04-26", 90],
["2007-04-27", 113],
["2007-04-28", 115],
["2007-04-29", 190],
["2007-04-30", 151],
["2007-05-01", 61],
["2007-05-02", 87],
["2007-05-03", 96],
["2007-05-04", 97],
["2007-05-05", 123],
["2007-05-06", 91],
["2007-05-07", 139],
["2007-05-08", 147],
["2007-05-09", 98],
["2007-05-10", 116],
["2007-05-11", 116],
["2007-05-12", 99],
["2007-05-13", 100],
["2007-05-14", 72],
["2007-05-15", 97],
["2007-05-16", 100],
["2007-05-17", 84],
["2007-05-18", 58],
["2007-05-19", 60],
["2007-05-20", 98],
["2007-05-21", 82],
["2007-05-22", 116],
["2007-05-23", 60],
["2007-05-24", 169],
["2007-05-25", 250],
["2007-05-26", 98],
["2007-05-27", 118],
["2007-05-28", 96],
["2007-05-29", 98],
["2007-05-30", 126],
["2007-05-31", 119],
["2007-06-01", 75],
["2007-06-02", 107],
["2007-06-03", 99],
["2007-06-04", 84],
["2007-06-05", 99],
["2007-06-06", 146],
["2007-06-07", 195],
["2007-06-08", 194],
["2007-06-09", 134],
["2007-06-10", 97],
["2007-06-11", 136],
["2007-06-12", 168],
["2007-06-13", 142],
["2007-06-14", 52],
["2007-06-15", 91],
["2007-06-16", 98],
["2007-06-17", 123],
["2007-06-18", 138],
["2007-06-19", 202],
["2007-06-20", 151],
["2007-06-21", 123],
["2007-06-22", 85],
["2007-06-23", 121],
["2007-06-24", 97],
["2007-06-25", 72],
["2007-06-26", 98],
["2007-06-27", 135],
["2007-06-28", 52],
["2007-06-29", 95],
["2007-06-30", 87],
["2007-07-01", 28],
["2007-07-02", 77],
["2007-07-03", 99],
["2007-07-04", 82],
["2007-07-06", 145],
["2007-07-07", 80],
["2007-07-08", 75],
["2007-07-09", 115],
["2007-07-10", 58],
["2007-07-11", 65],
["2007-07-12", 78],
["2007-07-13", 74],
["2007-07-14", 83],
["2007-07-15", 93],
["2007-07-16", 96],
["2007-07-17", 169],
["2007-07-18", 98],
["2007-07-19", 47],
["2007-07-20", 76],
["2007-07-21", 98],
["2007-07-22", 99],
["2007-07-23", 117],
["2007-07-24", 99],
["2007-07-25", 119],
["2007-07-26", 151],
["2007-07-27", 150],
["2007-07-28", 98],
["2007-07-29", 80],
["2007-07-30", 138],
["2007-07-31", 26],
["2007-08-01", 52],
["2007-08-02", 42],
["2007-08-03", 70],
["2007-08-04", 85],
["2007-08-05", 98],
["2007-08-06", 107],
["2007-08-07", 93],
["2007-08-08", 88],
["2007-08-09", 86],
["2007-08-10", 79],
["2007-08-11", 74],
["2007-08-12", 66],
["2007-08-13", 56],
["2007-08-14", 76],
["2007-08-15", 86],
["2007-08-16", 115],
["2007-08-17", 91],
["2007-08-18", 93],
["2007-08-19", 95],
["2007-08-20", 95],
["2007-08-21", 116],
["2007-08-22", 88],
["2007-08-23", 77],
["2007-08-24", 83],
["2007-08-25", 95],
["2007-08-26", 78],
["2007-08-27", 49],
["2007-08-28", 78],
["2007-08-29", 64],
["2007-08-30", 75],
["2007-08-31", 98],
["2007-09-01", 108],
["2007-09-02", 95],
["2007-09-03", 73],
["2007-09-04", 77],
["2007-09-05", 94],
["2007-09-06", 100],
["2007-09-07", 98],
["2007-09-08", 94],
["2007-09-09", 98],
["2007-09-10", 142],
["2007-09-11", 171],
["2007-09-12", 133],
["2007-09-13", 97],
["2007-09-14", 58],
["2007-09-15", 66],
["2007-09-16", 99],
["2007-09-17", 138],
["2007-09-18", 60],
["2007-09-19", 24],
["2007-09-20", 62],
["2007-09-21", 79],
["2007-09-22", 99],
["2007-09-23", 97],
["2007-09-24", 98],
["2007-09-25", 95],
["2007-09-26", 80],
["2007-09-27", 40],
["2007-09-28", 63],
["2007-09-29", 80],
["2007-09-30", 64],
["2007-10-01", 75],
["2007-10-02", 52],
["2007-10-03", 78],
["2007-10-04", 94],
["2007-10-05", 34],
["2007-10-06", 48],
["2007-10-07", 28],
["2007-10-08", 22],
["2007-10-09", 44],
["2007-10-10", 69],
["2007-10-11", 88],
["2007-10-12", 119],
["2007-10-13", 95],
["2007-10-14", 35],
["2007-10-15", 53],
["2007-10-16", 66],
["2007-10-17", 95],
["2007-10-18", 82],
["2007-10-19", 49],
["2007-10-20", 60],
["2007-10-21", 98],
["2007-10-22", 100],
["2007-10-23", 89],
["2007-10-24", 96],
["2007-10-25", 143],
["2007-10-26", 184],
["2007-10-27", 179],
["2007-10-28", 27],
["2007-10-29", 47],
["2007-10-30", 121],
["2007-10-31", 95],
["2007-11-01", 18],
["2007-11-02", 83],
["2007-11-03", 57],
["2007-11-04", 76],
["2007-11-05", 119],
["2007-11-06", 172],
["2007-11-07", 253],
["2007-11-08", 186],
["2007-11-09", 35],
["2007-11-10", 54],
["2007-11-11", 90],
["2007-11-12", 159],
["2007-11-13", 153],
["2007-11-14", 58],
["2007-11-15", 31],
["2007-11-16", 76],
["2007-11-17", 112],
["2007-11-18", 45],
["2007-11-19", 88],
["2007-11-20", 83],
["2007-11-21", 98],
["2007-11-22", 144],
["2007-11-23", 119],
["2007-11-24", 117],
["2007-11-25", 269],
["2007-11-26", 55],
["2007-11-27", 85],
["2007-11-28", 100],
["2007-11-29", 81],
["2007-11-30", 78],
["2007-12-01", 136],
["2007-12-02", 96],
["2007-12-03", 71],
["2007-12-04", 68],
["2007-12-05", 88],
["2007-12-06", 129],
["2007-12-07", 54],
["2007-12-08", 77],
["2007-12-09", 118],
["2007-12-11", 110],
["2007-12-12", 46],
["2007-12-13", 56],
["2007-12-14", 91],
["2007-12-15", 59],
["2007-12-16", 89],
["2007-12-17", 78],
["2007-12-18", 104],
["2007-12-19", 155],
["2007-12-20", 153],
["2007-12-21", 114],
["2007-12-22", 166],
["2007-12-23", 98],
["2007-12-24", 124],
["2007-12-25", 280],
["2007-12-26", 269],
["2007-12-27", 421],
["2007-12-28", 500],
["2007-12-29", 156],
["2007-12-30", 72],
["2007-12-31", 58],
["2008-01-01", 32],
["2008-01-02", 57],
["2008-01-03", 75],
["2008-01-04", 90],
["2008-01-05", 147],
["2008-01-06", 146],
["2008-01-07", 115],
["2008-01-08", 121],
["2008-01-09", 94],
["2008-01-10", 95],
["2008-01-11", 113],
["2008-01-12", 46],
["2008-01-13", 39],
["2008-01-14", 87],
["2008-01-15", 119],
["2008-01-16", 72],
["2008-01-17", 80],
["2008-01-18", 122],
["2008-01-19", 149],
["2008-01-20", 134],
["2008-01-21", 66],
["2008-01-22", 79],
["2008-01-23", 51],
["2008-01-24", 50],
["2008-01-25", 54],
["2008-01-26", 67],
["2008-01-27", 70],
["2008-01-28", 77],
["2008-01-29", 48],
["2008-01-30", 44],
["2008-01-31", 45],
["2008-02-01", 57],
["2008-02-02", 64],
["2008-02-03", 52],
["2008-02-04", 65],
["2008-02-05", 83],
["2008-02-06", 35],
["2008-02-08", 37],
["2008-02-09", 38],
["2008-02-10", 64],
["2008-02-11", 61],
["2008-02-12", 64],
["2008-02-13", 55],
["2008-02-14", 55],
["2008-02-15", 68],
["2008-02-16", 69],
["2008-02-17", 70],
["2008-02-18", 72],
["2008-02-19", 111],
["2008-02-20", 88],
["2008-02-21", 152],
["2008-02-22", 160],
["2008-02-23", 85],
["2008-02-25", 65],
["2008-02-26", 78],
["2008-02-27", 75],
["2008-02-28", 84],
["2008-02-29", 82],
["2008-03-01", 82],
["2008-03-02", 126],
["2008-03-03", 46],
["2008-03-04", 55],
["2008-03-05", 86],
["2008-03-06", 80],
["2008-03-08", 129],
["2008-03-09", 158],
["2008-03-10", 238],
["2008-03-11", 174],
["2008-03-12", 128],
["2008-03-13", 99],
["2008-03-14", 82],
["2008-03-15", 110],
["2008-03-16", 72],
["2008-03-17", 126],
["2008-03-18", 304],
["2008-03-19", 286],
["2008-03-20", 147],
["2008-03-21", 98],
["2008-03-22", 120],
["2008-03-23", 69],
["2008-03-24", 76],
["2008-03-25", 52],
["2008-03-26", 46],
["2008-03-27", 55],
["2008-03-28", 74],
["2008-03-29", 59],
["2008-03-30", 81],
["2008-03-31", 53],
["2008-04-01", 90],
["2008-04-02", 63],
["2008-04-03", 55],
["2008-04-04", 88],
["2008-04-05", 145],
["2008-04-06", 161],
["2008-04-07", 131],
["2008-04-08", 177],
["2008-04-09", 93],
["2008-04-10", 94],
["2008-04-11", 65],
["2008-04-12", 79],
["2008-04-13", 71],
["2008-04-14", 98],
["2008-04-15", 129],
["2008-04-16", 173],
["2008-04-17", 159],
["2008-04-18", 139],
["2008-04-19", 138],
["2008-04-20", 97],
["2008-04-21", 19],
["2008-04-22", 32],
["2008-04-23", 43],
["2008-04-24", 76],
["2008-04-25", 100],
["2008-04-26", 72],
["2008-04-27", 79],
["2008-04-28", 94],
["2008-04-29", 176],
["2008-04-30", 155],
["2008-05-01", 140],
["2008-05-02", 144],
["2008-05-03", 185],
["2008-05-04", 32],
["2008-05-05", 81],
["2008-05-06", 134],
["2008-05-07", 138],
["2008-05-08", 95],
["2008-05-09", 89],
["2008-05-10", 62],
["2008-05-11", 54],
["2008-05-12", 24],
["2008-05-13", 57],
["2008-05-14", 87],
["2008-05-15", 77],
["2008-05-16", 107],
["2008-05-17", 117],
["2008-05-18", 91],
["2008-05-19", 83],
["2008-05-20", 112],
["2008-05-21", 408],
["2008-05-22", 153],
["2008-05-23", 186],
["2008-05-24", 161],
["2008-05-25", 121],
["2008-05-26", 138],
["2008-05-27", 463],
["2008-05-28", 253],
["2008-05-29", 395],
["2008-05-30", 95],
["2008-05-31", 115],
["2008-06-01", 92],
["2008-06-02", 50],
["2008-06-03", 74],
["2008-06-05", 78],
["2008-06-06", 94],
["2008-06-07", 81],
["2008-06-08", 126],
["2008-06-09", 97],
["2008-06-10", 100],
["2008-06-11", 80],
["2008-06-12", 89],
["2008-06-13", 105],
["2008-06-14", 96],
["2008-06-15", 93],
["2008-06-16", 84],
["2008-06-17", 55],
["2008-06-18", 61],
["2008-06-19", 120],
["2008-06-20", 165],
["2008-06-21", 81],
["2008-06-22", 125],
["2008-06-23", 81],
["2008-06-24", 75],
["2008-06-25", 109],
["2008-06-26", 87],
["2008-06-27", 88],
["2008-06-28", 89],
["2008-06-30", 98],
["2008-07-01", 72],
["2008-07-02", 61],
["2008-07-03", 92],
["2008-07-04", 100],
["2008-07-05", 66],
["2008-07-06", 39],
["2008-07-07", 69],
["2008-07-08", 98],
["2008-07-09", 62],
["2008-07-10", 85],
["2008-07-11", 112],
["2008-07-12", 74],
["2008-07-13", 59],
["2008-07-14", 84],
["2008-07-15", 31],
["2008-07-16", 66],
["2008-07-17", 77],
["2008-07-18", 66],
["2008-07-19", 64],
["2008-07-20", 55],
["2008-07-21", 64],
["2008-07-22", 66],
["2008-07-23", 89],
["2008-07-24", 113],
["2008-07-25", 109],
["2008-07-26", 118],
["2008-07-27", 113],
["2008-07-28", 96],
["2008-07-29", 90],
["2008-07-30", 43],
["2008-07-31", 69],
["2008-08-01", 27],
["2008-08-02", 34],
["2008-08-03", 35],
["2008-08-04", 83],
["2008-08-05", 88],
["2008-08-06", 85],
["2008-08-07", 95],
["2008-08-08", 94],
["2008-08-09", 78],
["2008-08-10", 82],
["2008-08-11", 37],
["2008-08-12", 32],
["2008-08-13", 60],
["2008-08-14", 61],
["2008-08-15", 17],
["2008-08-16", 23],
["2008-08-16", 84],
["2008-08-17", 42],
["2008-08-18", 25],
["2008-08-19", 42],
["2008-08-20", 53],
["2008-08-21", 60],
["2008-08-22", 36],
["2008-08-23", 41],
["2008-08-24", 45],
["2008-08-25", 67],
["2008-08-26", 64],
["2008-08-27", 56],
["2008-08-28", 79],
["2008-08-29", 110],
["2008-08-30", 64],
["2008-08-31", 24],
["2008-09-01", 25],
["2008-09-02", 37],
["2008-09-03", 72],
["2008-09-04", 57],
["2008-09-05", 58],
["2008-09-06", 59],
["2008-09-07", 86],
["2008-09-08", 49],
["2008-09-09", 64],
["2008-09-10", 51],
["2008-09-11", 46],
["2008-09-12", 58],
["2008-09-13", 57],
["2008-09-14", 56],
["2008-09-15", 58],
["2008-09-16", 63],
["2008-09-17", 62],
["2008-09-19", 66],
["2008-09-20", 59],
["2008-09-21", 88],
["2008-09-22", 59],
["2008-09-23", 12],
["2008-09-24", 26],
["2008-09-25", 30],
["2008-09-26", 17],
["2008-09-28", 71],
["2008-09-29", 83],
["2008-09-30", 106],
["2008-10-01", 104],
["2008-10-02", 126],
["2008-10-03", 108],
["2008-10-04", 63],
["2008-10-05", 49],
["2008-10-06", 25],
["2008-10-07", 58],
["2008-10-08", 75],
["2008-10-09", 47],
["2008-10-10", 58],
["2008-10-11", 44],
["2008-10-12", 59],
["2008-10-13", 92],
["2008-10-14", 114],
["2008-10-15", 85],
["2008-10-16", 61],
["2008-10-17", 93],
["2008-10-18", 174],
["2008-10-19", 86],
["2008-10-20", 86],
["2008-10-21", 134],
["2008-10-22", 111],
["2008-10-23", 43],
["2008-10-24", 14],
["2008-10-25", 58],
["2008-10-26", 32],
["2008-10-27", 32],
["2008-10-28", 67],
["2008-10-29", 80],
["2008-10-30", 58],
["2008-10-31", 79],
["2008-11-01", 71],
["2008-11-02", 60],
["2008-11-03", 54],
["2008-11-04", 68],
["2008-11-05", 109],
["2008-11-06", 97],
["2008-11-07", 55],
["2008-11-08", 65],
["2008-11-09", 86],
["2008-11-10", 94],
["2008-11-11", 131],
["2008-11-12", 186],
["2008-11-13", 161],
["2008-11-14", 34],
["2008-11-15", 120],
["2008-11-16", 54],
["2008-11-17", 46],
["2008-11-18", 28],
["2008-11-19", 40],
["2008-11-20", 103],
["2008-11-21", 52],
["2008-11-22", 91],
["2008-11-23", 95],
["2008-11-24", 97],
["2008-11-25", 59],
["2008-11-26", 89],
["2008-11-27", 40],
["2008-11-28", 77],
["2008-11-29", 53],
["2008-11-30", 84],
["2008-12-01", 146],
["2008-12-02", 87],
["2008-12-03", 144],
["2008-12-04", 51],
["2008-12-05", 59],
["2008-12-06", 51],
["2008-12-07", 112],
["2008-12-08", 169],
["2008-12-09", 246],
["2008-12-10", 162],
["2008-12-11", 96],
["2008-12-12", 154],
["2008-12-13", 57],
["2008-12-14", 86],
["2008-12-15", 109],
["2008-12-16", 135],
["2008-12-17", 134],
["2008-12-18", 46],
["2008-12-19", 98],
["2008-12-20", 45],
["2008-12-21", 67],
["2008-12-22", 49],
["2008-12-23", 89],
["2008-12-24", 115],
["2008-12-25", 55],
["2008-12-26", 66],
["2008-12-27", 129],
["2008-12-28", 134],
["2008-12-29", 69],
["2008-12-30", 36],
["2008-12-31", 29],
["2009-01-01", 42],
["2009-01-02", 79],
["2009-01-03", 90],
["2009-01-04", 69],
["2009-01-05", 64],
["2009-01-06", 71],
["2009-01-07", 56],
["2009-01-08", 100],
["2009-01-09", 32],
["2009-01-10", 54],
["2009-01-11", 51],
["2009-01-12", 36],
["2009-01-13", 59],
["2009-01-14", 43],
["2009-01-15", 72],
["2009-01-16", 90],
["2009-01-17", 74],
["2009-01-18", 97],
["2009-01-19", 76],
["2009-01-20", 137],
["2009-01-21", 109],
["2009-01-22", 117],
["2009-01-23", 97],
["2009-01-24", 67],
["2009-01-25", 48],
["2009-01-26", 88],
["2009-01-27", 95],
["2009-01-28", 129],
["2009-01-29", 135],
["2009-01-30", 131],
["2009-01-31", 133],
["2009-02-01", 91],
["2009-02-02", 107],
["2009-02-03", 87],
["2009-02-04", 80],
["2009-02-05", 98],
["2009-02-06", 78],
["2009-02-07", 90],
["2009-02-08", 71],
["2009-02-09", 112],
["2009-02-10", 307],
["2009-02-11", 89],
["2009-02-12", 139],
["2009-02-13", 82],
["2009-02-14", 72],
["2009-02-15", 53],
["2009-02-16", 55],
["2009-02-17", 56],
["2009-02-19", 64],
["2009-02-20", 99],
["2009-02-21", 86],
["2009-02-22", 80],
["2009-02-23", 59],
["2009-02-24", 84],
["2009-02-25", 36],
["2009-02-26", 68],
["2009-02-27", 96],
["2009-02-28", 67],
["2009-03-01", 93],
["2009-03-02", 59],
["2009-03-03", 98],
["2009-03-04", 161],
["2009-03-05", 96],
["2009-03-06", 19],
["2009-03-07", 73],
["2009-03-08", 119],
["2009-03-09", 64],
["2009-03-10", 74],
["2009-03-11", 85],
["2009-03-12", 88],
["2009-03-13", 99],
["2009-03-14", 81],
["2009-03-15", 119],
["2009-03-16", 100],
["2009-03-17", 169],
["2009-03-18", 268],
["2009-03-19", 195],
["2009-03-20", 80],
["2009-03-21", 82],
["2009-03-22", 77],
["2009-03-23", 64],
["2009-03-24", 59],
["2009-03-25", 44],
["2009-03-26", 58],
["2009-03-27", 79],
["2009-03-28", 69],
["2009-03-29", 69],
["2009-03-30", 71],
["2009-03-31", 51],
["2009-04-01", 27],
["2009-04-02", 72],
["2009-04-03", 91],
["2009-04-04", 96],
["2009-04-05", 72],
["2009-04-06", 53],
["2009-04-07", 94],
["2009-04-08", 140],
["2009-04-09", 117],
["2009-04-10", 115],
["2009-04-11", 113],
["2009-04-12", 122],
["2009-04-13", 148],
["2009-04-14", 75],
["2009-04-15", 81],
["2009-04-16", 69],
["2009-04-17", 84],
["2009-04-18", 116],
["2009-04-19", 97],
["2009-04-20", 63],
["2009-04-21", 34],
["2009-04-22", 59],
["2009-04-23", 70],
["2009-04-24", 77],
["2009-04-25", 54],
["2009-04-26", 34],
["2009-04-27", 57],
["2009-04-28", 78],
["2009-04-29", 73],
["2009-04-30", 95],
["2009-05-01", 95],
["2009-05-02", 54],
["2009-05-03", 82],
["2009-05-04", 96],
["2009-05-05", 106],
["2009-05-06", 100],
["2009-05-07", 109],
["2009-05-08", 125],
["2009-05-09", 106],
["2009-05-10", 57],
["2009-05-11", 72],
["2009-05-12", 75],
["2009-05-13", 63],
["2009-05-14", 91],
["2009-05-15", 64],
["2009-05-16", 81],
["2009-05-17", 78],
["2009-05-18", 90],
["2009-05-19", 97],
["2009-05-20", 98],
["2009-05-21", 85],
["2009-05-22", 27],
["2009-05-23", 65],
["2009-05-24", 95],
["2009-05-25", 128],
["2009-05-26", 81],
["2009-05-27", 105],
["2009-05-28", 94],
["2009-05-29", 59],
["2009-05-30", 45],
["2009-05-31", 56],
["2009-06-01", 79],
["2009-06-02", 55],
["2009-06-03", 61],
["2009-06-04", 71],
["2009-06-05", 68],
["2009-06-06", 67],
["2009-06-07", 63],
["2009-06-08", 77],
["2009-06-09", 34],
["2009-06-10", 21],
["2009-06-11", 66],
["2009-06-12", 60],
["2009-06-13", 58],
["2009-06-14", 61],
["2009-06-15", 70],
["2009-06-16", 89],
["2009-06-17", 75],
["2009-06-18", 104],
["2009-06-20", 165],
["2009-06-21", 98],
["2009-06-22", 42],
["2009-06-23", 60],
["2009-06-24", 67],
["2009-06-25", 81],
["2009-06-26", 104],
["2009-06-27", 116],
["2009-06-28", 96],
["2009-06-29", 90],
["2009-06-30", 48],
["2009-07-01", 30],
["2009-07-02", 51],
["2009-07-03", 73],
["2009-07-04", 103],
["2009-07-05", 110],
["2009-07-06", 70],
["2009-07-07", 93],
["2009-07-08", 85],
["2009-07-09", 48],
["2009-07-10", 79],
["2009-07-11", 94],
["2009-07-12", 72],
["2009-07-13", 104],
["2009-07-14", 57],
["2009-07-15", 71],
["2009-07-16", 100],
["2009-07-17", 60],
["2009-07-18", 45],
["2009-07-19", 74],
["2009-07-20", 69],
["2009-07-21", 60],
["2009-07-22", 101],
["2009-07-23", 64],
["2009-07-24", 36],
["2009-07-25", 29],
["2009-07-26", 59],
["2009-07-27", 81],
["2009-07-28", 79],
["2009-07-29", 107],
["2009-07-30", 109],
["2009-07-31", 71],
["2009-08-01", 89],
["2009-08-02", 59],
["2009-08-03", 75],
["2009-08-04", 97],
["2009-08-05", 74],
["2009-08-06", 58],
["2009-08-07", 74],
["2009-08-08", 75],
["2009-08-09", 81],
["2009-08-10", 60],
["2009-08-11", 75],
["2009-08-12", 68],
["2009-08-13", 82],
["2009-08-14", 123],
["2009-08-15", 115],
["2009-08-16", 113],
["2009-08-17", 63],
["2009-08-18", 76],
["2009-08-19", 77],
["2009-08-20", 38],
["2009-08-21", 62],
["2009-08-22", 58],
["2009-08-23", 71],
["2009-08-24", 97],
["2009-08-25", 90],
["2009-08-26", 97],
["2009-08-27", 69],
["2009-08-28", 36],
["2009-08-29", 61],
["2009-08-30", 69],
["2009-08-31", 78],
["2009-09-01", 88],
["2009-09-02", 98],
["2009-09-03", 109],
["2009-09-04", 99],
["2009-09-05", 92],
["2009-09-06", 32],
["2009-09-07", 20],
["2009-09-08", 51],
["2009-09-09", 66],
["2009-09-10", 77],
["2009-09-11", 69],
["2009-09-12", 42],
["2009-09-13", 65],
["2009-09-14", 91],
["2009-09-15", 72],
["2009-09-16", 93],
["2009-09-17", 117],
["2009-09-18", 121],
["2009-09-19", 75],
["2009-09-20", 101],
["2009-09-21", 111],
["2009-09-22", 79],
["2009-09-23", 90],
["2009-09-24", 108],
["2009-09-25", 130],
["2009-09-26", 98],
["2009-09-27", 66],
["2009-09-28", 74],
["2009-09-29", 97],
["2009-09-30", 112],
["2009-10-01", 88],
["2009-10-02", 22],
["2009-10-03", 29],
["2009-10-04", 44],
["2009-10-05", 69],
["2009-10-06", 83],
["2009-10-07", 74],
["2009-10-08", 72],
["2009-10-09", 88],
["2009-10-10", 73],
["2009-10-11", 94],
["2009-10-12", 108],
["2009-10-13", 37],
["2009-10-14", 42],
["2009-10-15", 72],
["2009-10-16", 114],
["2009-10-17", 57],
["2009-10-18", 92],
["2009-10-19", 90],
["2009-10-20", 77],
["2009-10-21", 76],
["2009-10-22", 100],
["2009-10-23", 111],
["2009-10-24", 141],
["2009-10-25", 147],
["2009-10-26", 77],
["2009-10-27", 68],
["2009-10-28", 100],
["2009-10-29", 137],
["2009-10-30", 120],
["2009-10-31", 51],
["2009-11-01", 48],
["2009-11-02", 12],
["2009-11-03", 66],
["2009-11-04", 111],
["2009-11-05", 136],
["2009-11-06", 186],
["2009-11-07", 276],
["2009-11-08", 259],
["2009-11-09", 84],
["2009-11-10", 20],
["2009-11-11", 34],
["2009-11-12", 53],
["2009-11-13", 59],
["2009-11-14", 53],
["2009-11-15", 26],
["2009-11-16", 29],
["2009-11-17", 35],
["2009-11-18", 66],
["2009-11-19", 47],
["2009-11-20", 74],
["2009-11-21", 63],
["2009-11-22", 121],
["2009-11-23", 149],
["2009-11-24", 184],
["2009-11-25", 79],
["2009-11-26", 107],
["2009-11-27", 132],
["2009-11-28", 99],
["2009-11-29", 167],
["2009-11-30", 117],
["2009-12-01", 86],
["2009-12-02", 133],
["2009-12-03", 36],
["2009-12-04", 99],
["2009-12-05", 62],
["2009-12-06", 94],
["2009-12-07", 141],
["2009-12-08", 186],
["2009-12-10", 167],
["2009-12-11", 147],
["2009-12-12", 31],
["2009-12-13", 80],
["2009-12-14", 96],
["2009-12-15", 49],
["2009-12-16", 55],
["2009-12-17", 45],
["2009-12-18", 42],
["2009-12-19", 44],
["2009-12-20", 48],
["2009-12-21", 63],
["2009-12-22", 94],
["2009-12-23", 93],
["2009-12-24", 133],
["2009-12-25", 500],
["2009-12-26", 96],
["2009-12-27", 94],
["2009-12-28", 89],
["2009-12-29", 160],
["2009-12-30", 55],
["2009-12-31", 55],
["2010-01-01", 91],
["2010-01-02", 105],
["2010-01-03", 90],
["2010-01-04", 49],
["2010-01-05", 47],
["2010-01-06", 59],
["2010-01-07", 64],
["2010-01-08", 80],
["2010-01-09", 100],
["2010-01-10", 60],
["2010-01-11", 52],
["2010-01-12", 30],
["2010-01-13", 54],
["2010-01-14", 76],
["2010-01-15", 58],
["2010-01-16", 85],
["2010-01-17", 124],
["2010-01-18", 143],
["2010-01-19", 183],
["2010-01-20", 140],
["2010-01-21", 24],
["2010-01-22", 57],
["2010-01-23", 78],
["2010-01-24", 66],
["2010-01-25", 99],
["2010-01-26", 76],
["2010-01-27", 128],
["2010-01-28", 63],
["2010-01-29", 43],
["2010-01-30", 58],
["2010-01-31", 56],
["2010-02-01", 65],
["2010-02-02", 61],
["2010-02-03", 54],
["2010-02-04", 54],
["2010-02-05", 63],
["2010-02-06", 70],
["2010-02-07", 61],
["2010-02-08", 87],
["2010-02-09", 109],
["2010-02-10", 50],
["2010-02-11", 23],
["2010-02-12", 31],
["2010-02-14", 137],
["2010-02-15", 38],
["2010-02-16", 52],
["2010-02-17", 94],
["2010-02-18", 58],
["2010-02-19", 98],
["2010-02-20", 87],
["2010-02-21", 118],
["2010-02-22", 82],
["2010-02-23", 92],
["2010-02-24", 152],
["2010-02-25", 153],
["2010-02-26", 76],
["2010-02-27", 65],
["2010-02-28", 80],
["2010-03-01", 56],
["2010-03-02", 72],
["2010-03-03", 113],
["2010-03-04", 140],
["2010-03-05", 97],
["2010-03-06", 27],
["2010-03-07", 71],
["2010-03-08", 68],
["2010-03-09", 25],
["2010-03-10", 56],
["2010-03-11", 89],
["2010-03-12", 98],
["2010-03-13", 76],
["2010-03-14", 90],
["2010-03-16", 77],
["2010-03-17", 66],
["2010-03-18", 76],
["2010-03-19", 145],
["2010-03-20", 500],
["2010-03-21", 136],
["2010-03-22", 245],
["2010-03-23", 157],
["2010-03-24", 92],
["2010-03-25", 60],
["2010-03-26", 83],
["2010-03-27", 110],
["2010-03-28", 82],
["2010-03-29", 100],
["2010-03-30", 159],
["2010-03-31", 94],
["2010-04-01", 99],
["2010-04-02", 63],
["2010-04-03", 73],
["2010-04-04", 147],
["2010-04-05", 125],
["2010-04-06", 56],
["2010-04-07", 77],
["2010-04-08", 147],
["2010-04-09", 163],
["2010-04-10", 69],
["2010-04-11", 77],
["2010-04-12", 66],
["2010-04-13", 61],
["2010-04-14", 59],
["2010-04-15", 93],
["2010-04-16", 147],
["2010-04-17", 94],
["2010-04-18", 109],
["2010-04-19", 150],
["2010-04-20", 74],
["2010-04-21", 60],
["2010-04-22", 31],
["2010-04-23", 40],
["2010-04-24", 72],
["2010-04-25", 100],
["2010-04-26", 45],
["2010-04-27", 50],
["2010-04-28", 52],
["2010-04-29", 46],
["2010-04-30", 54],
["2010-05-01", 90],
["2010-05-02", 116],
["2010-05-03", 97],
["2010-05-04", 149],
["2010-05-05", 119],
["2010-05-06", 17],
["2010-05-07", 86],
["2010-05-08", 145],
["2010-05-09", 144],
["2010-05-10", 146],
["2010-05-11", 58],
["2010-05-12", 59],
["2010-05-13", 78],
["2010-05-14", 95],
["2010-05-15", 133],
["2010-05-16", 121],
["2010-05-17", 52],
["2010-05-18", 53],
["2010-05-19", 61],
["2010-05-20", 75],
["2010-05-22", 127],
["2010-05-23", 122],
["2010-05-24", 91],
["2010-05-25", 46],
["2010-05-26", 76],
["2010-05-27", 82],
["2010-05-28", 63],
["2010-05-29", 84],
["2010-05-30", 39],
["2010-05-31", 58],
["2010-06-01", 69],
["2010-06-02", 68],
["2010-06-03", 83],
["2010-06-04", 88],
["2010-06-05", 96],
["2010-06-06", 114],
["2010-06-07", 118],
["2010-06-08", 98],
["2010-06-09", 86],
["2010-06-10", 64],
["2010-06-11", 58],
["2010-06-12", 81],
["2010-06-13", 82],
["2010-06-14", 66],
["2010-06-15", 95],
["2010-06-16", 77],
["2010-06-17", 56],
["2010-06-18", 47],
["2010-06-19", 77],
["2010-06-20", 71],
["2010-06-21", 71],
["2010-06-23", 77],
["2010-06-24", 83],
["2010-06-25", 99],
["2010-06-26", 112],
["2010-06-27", 93],
["2010-06-28", 94],
["2010-06-29", 123],
["2010-06-30", 100],
["2010-07-01", 118],
["2010-07-02", 40],
["2010-07-03", 63],
["2010-07-04", 86],
["2010-07-05", 66],
["2010-07-06", 54],
["2010-07-07", 73],
["2010-07-08", 80],
["2010-07-09", 74],
["2010-07-10", 59],
["2010-07-11", 68],
["2010-07-12", 73],
["2010-07-13", 84],
["2010-07-14", 78],
["2010-07-15", 89],
["2010-07-16", 115],
["2010-07-17", 84],
["2010-07-18", 87],
["2010-07-19", 121],
["2010-07-21", 63],
["2010-07-22", 90],
["2010-07-23", 123],
["2010-07-24", 88],
["2010-07-25", 100],
["2010-07-26", 121],
["2010-07-27", 139],
["2010-07-28", 100],
["2010-07-29", 119],
["2010-07-30", 113],
["2010-07-31", 92],
["2010-08-01", 48],
["2010-08-02", 68],
["2010-08-03", 83],
["2010-08-04", 98],
["2010-08-05", 26],
["2010-08-06", 31],
["2010-08-07", 71],
["2010-08-08", 57],
["2010-08-09", 94],
["2010-08-10", 90],
["2010-08-11", 94],
["2010-08-12", 64],
["2010-08-13", 83],
["2010-08-13", 83],
["2010-08-14", 84],
["2010-08-15", 57],
["2010-08-16", 66],
["2010-08-17", 94],
["2010-08-18", 137],
["2010-08-19", 73],
["2010-08-20", 76],
["2010-08-21", 56],
["2010-08-22", 23],
["2010-08-23", 54],
["2010-08-24", 87],
["2010-08-25", 65],
["2010-08-26", 66],
["2010-08-27", 52],
["2010-08-28", 55],
["2010-08-29", 76],
["2010-08-30", 79],
["2010-08-31", 78],
["2010-09-01", 67],
["2010-09-02", 54],
["2010-09-03", 73],
["2010-09-04", 64],
["2010-09-05", 80],
["2010-09-06", 87],
["2010-09-07", 95],
["2010-09-08", 67],
["2010-09-09", 89],
["2010-09-10", 75],
["2010-09-11", 49],
["2010-09-12", 67],
["2010-09-13", 84],
["2010-09-14", 97],
["2010-09-15", 134],
["2010-09-16", 122],
["2010-09-17", 62],
["2010-09-18", 19],
["2010-09-19", 50],
["2010-09-20", 60],
["2010-09-21", 23],
["2010-09-22", 24],
["2010-09-23", 52],
["2010-09-24", 72],
["2010-09-25", 93],
["2010-09-26", 84],
["2010-09-27", 57],
["2010-09-28", 32],
["2010-09-29", 65],
["2010-09-30", 92],
["2010-10-01", 125],
["2010-10-02", 88],
["2010-10-03", 17],
["2010-10-04", 36],
["2010-10-05", 63],
["2010-10-06", 95],
["2010-10-07", 186],
["2010-10-08", 192],
["2010-10-09", 177],
["2010-10-10", 202],
["2010-10-11", 70],
["2010-10-12", 27],
["2010-10-13", 65],
["2010-10-14", 58],
["2010-10-15", 30],
["2010-10-16", 80],
["2010-10-17", 65],
["2010-10-18", 80],
["2010-10-19", 50],
["2010-10-20", 66],
["2010-10-21", 83],
["2010-10-22", 95],
["2010-10-23", 103],
["2010-10-24", 96],
["2010-10-25", 17],
["2010-10-26", 15],
["2010-10-27", 63],
["2010-10-28", 92],
["2010-10-29", 67],
["2010-10-30", 62],
["2010-10-31", 70],
["2010-11-01", 65],
["2010-11-02", 36],
["2010-11-03", 86],
["2010-11-04", 81],
["2010-11-05", 86],
["2010-11-06", 107],
["2010-11-07", 142],
["2010-11-08", 34],
["2010-11-09", 34],
["2010-11-10", 85],
["2010-11-11", 139],
["2010-11-12", 51],
["2010-11-13", 66],
["2010-11-14", 39],
["2010-11-15", 34],
["2010-11-16", 96],
["2010-11-17", 122],
["2010-11-18", 243],
["2010-11-19", 313],
["2010-11-20", 165],
["2010-11-21", 192],
["2010-11-22", 37],
["2010-11-23", 100],
["2010-11-24", 141],
["2010-11-25", 42],
["2010-11-26", 88],
["2010-11-27", 130],
["2010-11-28", 72],
["2010-11-29", 143],
["2010-11-30", 132],
["2010-12-01", 177],
["2010-12-02", 199],
["2010-12-03", 52],
["2010-12-04", 97],
["2010-12-05", 125],
["2010-12-06", 37],
["2010-12-07", 65],
["2010-12-08", 81],
["2010-12-09", 97],
["2010-12-10", 176],
["2010-12-11", 50],
["2010-12-12", 85],
["2010-12-13", 72],
["2010-12-14", 31],
["2010-12-15", 53],
["2010-12-16", 92],
["2010-12-17", 105],
["2010-12-18", 156],
["2010-12-19", 182],
["2010-12-20", 100],
["2010-12-21", 165],
["2010-12-22", 222],
["2010-12-23", 30],
["2010-12-24", 40],
["2010-12-25", 57],
["2010-12-26", 66],
["2010-12-27", 82],
["2010-12-28", 70],
["2010-12-29", 63],
["2010-12-30", 67],
["2010-12-31", 47],
["2011-01-01", 34],
["2011-01-02", 41],
["2011-01-03", 82],
["2011-01-04", 96],
["2011-01-05", 55],
["2011-01-06", 35],
["2011-01-07", 36],
["2011-01-08", 78],
["2011-01-09", 35],
["2011-01-10", 34],
["2011-01-11", 67],
["2011-01-12", 49],
["2011-01-13", 90],
["2011-01-14", 73],
["2011-01-16", 35],
["2011-01-17", 62],
["2011-01-18", 30],
["2011-01-19", 39],
["2011-01-20", 36],
["2011-01-21", 61],
["2011-01-22", 76],
["2011-01-23", 50],
["2011-01-24", 35],
["2011-01-25", 61],
["2011-01-26", 41],
["2011-01-27", 59],
["2011-01-28", 41],
["2011-01-29", 30],
["2011-01-30", 25],
["2011-01-31", 48],
["2011-02-01", 53],
["2011-02-02", 58],
["2011-02-03", 83],
["2011-02-04", 111],
["2011-02-05", 75],
["2011-02-06", 84],
["2011-02-07", 77],
["2011-02-09", 83],
["2011-02-10", 58],
["2011-02-11", 58],
["2011-02-12", 21],
["2011-02-13", 53],
["2011-02-14", 41],
["2011-02-15", 74],
["2011-02-16", 146],
["2011-02-17", 132],
["2011-02-18", 115],
["2011-02-19", 112],
["2011-02-20", 100],
["2011-02-21", 333],
["2011-02-22", 270],
["2011-02-23", 208],
["2011-02-25", 56],
["2011-02-26", 56],
["2011-02-27", 60],
["2011-02-28", 30],
["2011-03-01", 21],
["2011-03-02", 33],
["2011-03-03", 34],
["2011-03-04", 59],
["2011-03-05", 77],
["2011-03-06", 65],
["2011-03-07", 26],
["2011-03-08", 41],
["2011-03-09", 33],
["2011-03-10", 64],
["2011-03-11", 58],
["2011-03-12", 135],
["2011-03-13", 197],
["2011-03-14", 54],
["2011-03-15", 56],
["2011-03-16", 72],
["2011-03-17", 98],
["2011-03-18", 161],
["2011-03-19", 123],
["2011-03-20", 250],
["2011-03-21", 121],
["2011-03-22", 67],
["2011-03-23", 51],
["2011-03-24", 51],
["2011-03-25", 48],
["2011-03-26", 78],
["2011-03-27", 41],
["2011-03-28", 71],
["2011-03-29", 86],
["2011-03-30", 98],
["2011-03-31", 140],
["2011-04-01", 137],
["2011-04-02", 38],
["2011-04-03", 59],
["2011-04-04", 67],
["2011-04-05", 88],
["2011-04-06", 95],
["2011-04-07", 96],
["2011-04-08", 70],
["2011-04-09", 108],
["2011-04-10", 142],
["2011-04-11", 53],
["2011-04-12", 88],
["2011-04-13", 157],
["2011-04-14", 138],
["2011-04-15", 98],
["2011-04-16", 128],
["2011-04-17", 164],
["2011-04-18", 99],
["2011-04-19", 83],
["2011-04-20", 127],
["2011-04-21", 154],
["2011-04-22", 44],
["2011-04-23", 49],
["2011-04-24", 26],
["2011-04-25", 76],
["2011-04-26", 111],
["2011-04-27", 60],
["2011-04-28", 76],
["2011-04-29", 119],
["2011-04-30", 141],
["2011-05-01", 500],
["2011-05-02", 85],
["2011-05-03", 60],
["2011-05-04", 79],
["2011-05-05", 87],
["2011-05-06", 99],
["2011-05-07", 57],
["2011-05-08", 74],
["2011-05-09", 53],
["2011-05-10", 50],
["2011-05-11", 80],
["2011-05-12", 197],
["2011-05-13", 52],
["2011-05-14", 70],
["2011-05-15", 76],
["2011-05-16", 90],
["2011-05-17", 91],
["2011-05-18", 155],
["2011-05-19", 64],
["2011-05-20", 59],
["2011-05-21", 54],
["2011-05-22", 83],
["2011-05-23", 98],
["2011-05-24", 94],
["2011-05-25", 75],
["2011-05-26", 86],
["2011-05-27", 65],
["2011-05-28", 102],
["2011-05-29", 98],
["2011-05-30", 75],
["2011-05-31", 47],
["2011-06-01", 28],
["2011-06-02", 75],
["2011-06-03", 75],
["2011-06-04", 66],
["2011-06-05", 79],
["2011-06-06", 83],
["2011-06-07", 98],
["2011-06-08", 51],
["2011-06-10", 105],
["2011-06-11", 75],
["2011-06-12", 28],
["2011-06-13", 71],
["2011-06-14", 99],
["2011-06-15", 107],
["2011-06-16", 77],
["2011-06-17", 81],
["2011-06-18", 97],
["2011-06-19", 119],
["2011-06-20", 122],
["2011-06-21", 130],
["2011-06-22", 128],
["2011-06-23", 123],
["2011-06-24", 24],
["2011-06-25", 38],
["2011-06-26", 57],
["2011-06-27", 56],
["2011-06-28", 90],
["2011-06-29", 129],
["2011-06-30", 99],
["2011-07-01", 94],
["2011-07-02", 71],
["2011-07-03", 71],
["2011-07-04", 55],
["2011-07-05", 80],
["2011-07-06", 115],
["2011-07-07", 73],
["2011-07-08", 42],
["2011-07-09", 37],
["2011-07-10", 75],
["2011-07-11", 112],
["2011-07-12", 88],
["2011-07-13", 83],
["2011-07-14", 83],
["2011-07-15", 65],
["2011-07-16", 65],
["2011-07-17", 67],
["2011-07-18", 65],
["2011-07-19", 83],
["2011-07-20", 42],
["2011-07-21", 53],
["2011-07-22", 71],
["2011-07-23", 148],
["2011-07-24", 159],
["2011-07-25", 19],
["2011-07-26", 28],
["2011-07-27", 52],
["2011-07-28", 92],
["2011-07-29", 113],
["2011-07-30", 21],
["2011-07-31", 54],
["2011-08-01", 78],
["2011-08-02", 94],
["2011-08-03", 69],
["2011-08-04", 82],
["2011-08-05", 98],
["2011-08-06", 91],
["2011-08-07", 74],
["2011-08-08", 77],
["2011-08-09", 108],
["2011-08-10", 58],
["2011-08-11", 68],
["2011-08-12", 90],
["2011-08-13", 93],
["2011-08-14", 78],
["2011-08-15", 73],
["2011-08-16", 29],
["2011-08-17", 58],
["2011-08-18", 28],
["2011-08-19", 65],
["2011-08-20", 72],
["2011-08-21", 80],
["2011-08-22", 78],
["2011-08-23", 88],
["2011-08-24", 95],
["2011-08-25", 80],
["2011-08-26", 61],
["2011-08-27", 63],
["2011-08-28", 65],
["2011-08-29", 80],
["2011-08-30", 99],
["2011-08-31", 117],
["2011-09-01", 89],
["2011-09-02", 54],
["2011-09-03", 69],
["2011-09-04", 77],
["2011-09-05", 76],
["2011-09-06", 76],
["2011-09-07", 126],
["2011-09-08", 48],
["2011-09-09", 39],
["2011-09-10", 35],
["2011-09-11", 24],
["2011-09-12", 61],
["2011-09-13", 81],
["2011-09-14", 87],
["2011-09-15", 93],
["2011-09-16", 52],
["2011-09-17", 22],
["2011-09-18", 35],
["2011-09-19", 45],
["2011-09-20", 50],
["2011-09-21", 52],
["2011-09-22", 58],
["2011-09-24", 96],
["2011-09-25", 125],
["2011-09-26", 160],
["2011-09-27", 121],
["2011-09-28", 128],
["2011-09-29", 94],
["2011-09-30", 30],
["2011-10-01", 56],
["2011-10-02", 33],
["2011-10-03", 47],
["2011-10-04", 79],
["2011-10-05", 157],
["2011-10-06", 61],
["2011-10-07", 84],
["2011-10-08", 106],
["2011-10-09", 159],
["2011-10-10", 137],
["2011-10-11", 87],
["2011-10-12", 130],
["2011-10-13", 98],
["2011-10-14", 32],
["2011-10-15", 33],
["2011-10-16", 31],
["2011-10-17", 35],
["2011-10-18", 72],
["2011-10-19", 87],
["2011-10-20", 149],
["2011-10-21", 146],
["2011-10-22", 139],
["2011-10-23", 155],
["2011-10-24", 19],
["2011-10-25", 28],
["2011-10-26", 78],
["2011-10-27", 129],
["2011-10-29", 97],
["2011-10-30", 147],
["2011-10-31", 131],
["2011-11-01", 128],
["2011-11-02", 53],
["2011-11-03", 68],
["2011-11-04", 82],
["2011-11-05", 60],
["2011-11-06", 52],
["2011-11-07", 63],
["2011-11-08", 73],
["2011-11-09", 49],
["2011-11-10", 60],
["2011-11-11", 84],
["2011-11-12", 99],
["2011-11-13", 65],
["2011-11-14", 73],
["2011-11-15", 124],
["2011-11-16", 128],
["2011-11-17", 97],
["2011-11-18", 62],
["2011-11-19", 36],
["2011-11-20", 27],
["2011-11-21", 80],
["2011-11-22", 131],
["2011-11-23", 40],
["2011-11-24", 68],
["2011-11-25", 120],
["2011-11-26", 142],
["2011-11-27", 135],
["2011-11-28", 109],
["2011-11-29", 66],
["2011-11-30", 81],
["2011-12-01", 71],
["2011-12-02", 144],
["2011-12-03", 97],
["2011-12-04", 80],
["2011-12-05", 193],
["2011-12-06", 131],
["2011-12-07", 111],
["2011-12-08", 17],
["2011-12-09", 19],
["2011-12-10", 23],
["2011-12-11", 77],
["2011-12-12", 56],
["2011-12-13", 76],
["2011-12-14", 84],
["2011-12-15", 19],
["2011-12-16", 27],
["2011-12-17", 63],
["2011-12-18", 63],
["2011-12-19", 53],
["2011-12-20", 70],
["2011-12-21", 67],
["2011-12-22", 31],
["2011-12-23", 61],
["2011-12-24", 27],
["2011-12-25", 59],
["2011-12-26", 69],
["2011-12-27", 100],
["2011-12-28", 114],
["2011-12-29", 81],
["2011-12-30", 75],
["2011-12-31", 109],
["2012-01-01", 81],
["2012-01-02", 74],
["2012-01-03", 35],
["2012-01-04", 30],
["2012-01-05", 63],
["2012-01-06", 95],
["2012-01-07", 65],
["2012-01-08", 89],
["2012-01-09", 102],
["2012-01-10", 161],
["2012-01-11", 25],
["2012-01-12", 86],
["2012-01-13", 79],
["2012-01-14", 60],
["2012-01-15", 70],
["2012-01-16", 106],
["2012-01-17", 111],
["2012-01-18", 193],
["2012-01-19", 269],
["2012-01-20", 131],
["2012-01-21", 21],
["2012-01-22", 23],
["2012-01-23", 149],
["2012-01-24", 49],
["2012-01-25", 45],
["2012-01-26", 78],
["2012-01-27", 67],
["2012-01-28", 74],
["2012-01-29", 62],
["2012-01-30", 66],
["2012-01-31", 92],
["2012-02-01", 30],
["2012-02-02", 26],
["2012-02-03", 60],
["2012-02-04", 52],
["2012-02-05", 84],
["2012-02-06", 112],
["2012-02-07", 64],
["2012-02-08", 34],
["2012-02-09", 58],
["2012-02-10", 49],
["2012-02-11", 73],
["2012-02-12", 75],
["2012-02-13", 100],
["2012-02-14", 125],
["2012-02-15", 62],
["2012-02-16", 61],
["2012-02-17", 34],
["2012-02-18", 29],
["2012-02-19", 68],
["2012-02-20", 73],
["2012-02-21", 118],
["2012-02-22", 118],
["2012-02-23", 73],
["2012-02-24", 73],
["2012-02-25", 57],
["2012-02-26", 57],
["2012-02-27", 95],
["2012-02-28", 152],
["2012-02-29", 118],
["2012-03-01", 142],
["2012-03-02", 111],
["2012-03-03", 68],
["2012-03-04", 90],
["2012-03-05", 97],
["2012-03-06", 63],
["2012-03-07", 38],
["2012-03-08", 31],
["2012-03-09", 65],
["2012-03-10", 78],
["2012-03-11", 36],
["2012-03-12", 62],
["2012-03-13", 104],
["2012-03-14", 57],
["2012-03-15", 64],
["2012-03-16", 109],
["2012-03-17", 144],
["2012-03-18", 61],
["2012-03-19", 57],
["2012-03-20", 81],
["2012-03-21", 105],
["2012-03-22", 146],
["2012-03-23", 55],
["2012-03-24", 56],
["2012-03-25", 30],
["2012-03-26", 90],
["2012-03-27", 112],
["2012-03-28", 65],
["2012-03-29", 90],
["2012-03-30", 76],
["2012-03-31", 159],
["2012-04-01", 78],
["2012-04-02", 103],
["2012-04-03", 73],
["2012-04-03", 73],
["2012-04-04", 73],
["2012-04-05", 64],
["2012-04-06", 70],
["2012-04-07", 71],
["2012-04-08", 119],
["2012-04-09", 118],
["2012-04-10", 138],
["2012-04-11", 41],
["2012-04-12", 69],
["2012-04-13", 81],
["2012-04-14", 100],
["2012-04-15", 109],
["2012-04-16", 84],
["2012-04-17", 100],
["2012-04-18", 140],
["2012-04-19", 98],
["2012-04-20", 133],
["2012-04-21", 81],
["2012-04-22", 102],
["2012-04-23", 140],
["2012-04-24", 133],
["2012-04-25", 32],
["2012-04-26", 60],
["2012-04-27", 147],
["2012-04-28", 164],
["2012-04-29", 473],
["2012-04-30", 268],
["2012-05-01", 208],
["2012-05-02", 111],
["2012-05-03", 106],
["2012-05-04", 100],
["2012-05-05", 99],
["2012-05-06", 100],
["2012-05-07", 100],
["2012-05-08", 111],
["2012-05-09", 107],
["2012-05-10", 129],
["2012-05-11", 133],
["2012-05-12", 90],
["2012-05-13", 96],
["2012-05-14", 64],
["2012-05-15", 58],
["2012-05-16", 58],
["2012-05-17", 78],
["2012-05-18", 84],
["2012-05-19", 143],
["2012-05-20", 85],
["2012-05-21", 97],
["2012-05-22", 109],
["2012-05-23", 64],
["2012-05-24", 69],
["2012-05-25", 63],
["2012-05-26", 90],
["2012-05-27", 88],
["2012-05-28", 133],
["2012-05-29", 116],
["2012-05-30", 29],
["2012-05-31", 64],
["2012-06-01", 54],
["2012-06-02", 90],
["2012-06-03", 112],
["2012-06-04", 80],
["2012-06-05", 65],
["2012-06-06", 98],
["2012-06-07", 71],
["2012-06-08", 77],
["2012-06-09", 91],
["2012-06-10", 32],
["2012-06-11", 50],
["2012-06-12", 58],
["2012-06-13", 62],
["2012-06-14", 50],
["2012-06-15", 22],
["2012-06-16", 33],
["2012-06-17", 69],
["2012-06-18", 137],
["2012-06-19", 132],
["2012-06-20", 105],
["2012-06-21", 112],
["2012-06-22", 84],
["2012-06-23", 81],
["2012-06-24", 95],
["2012-06-25", 49],
["2012-06-26", 65],
["2012-06-27", 55],
["2012-06-28", 54],
["2012-06-29", 60],
["2012-06-30", 46],
["2012-07-01", 70],
["2012-07-02", 69],
["2012-07-03", 59],
["2012-07-04", 71],
["2012-07-05", 70],
["2012-07-06", 59],
["2012-07-07", 86],
["2012-07-08", 84],
["2012-07-09", 64],
["2012-07-10", 50],
["2012-07-11", 44],
["2012-07-12", 46],
["2012-07-13", 31],
["2012-07-14", 48],
["2012-07-15", 53],
["2012-07-16", 70],
["2012-07-17", 78],
["2012-07-18", 71],
["2012-07-19", 82],
["2012-07-20", 111],
["2012-07-21", 131],
["2012-07-22", 15],
["2012-07-24", 60],
["2012-07-25", 72],
["2012-07-26", 55],
["2012-07-26", 55],
["2012-07-27", 50],
["2012-07-28", 56],
["2012-07-29", 57],
["2012-07-30", 30],
["2012-07-31", 28],
["2012-08-01", 20],
["2012-08-02", 17],
["2012-08-03", 53],
["2012-08-04", 40],
["2012-08-05", 48],
["2012-08-06", 60],
["2012-08-07", 59],
["2012-08-08", 68],
["2012-08-09", 43],
["2012-08-10", 72],
["2012-08-11", 80],
["2012-08-12", 41],
["2012-08-13", 36],
["2012-08-14", 62],
["2012-08-15", 60],
["2012-08-16", 68],
["2012-08-17", 83],
["2012-08-18", 110],
["2012-08-19", 84],
["2012-08-20", 92],
["2012-08-21", 25],
["2012-08-22", 40],
["2012-08-23", 74],
["2012-08-24", 94],
["2012-08-25", 92],
["2012-08-26", 117],
["2012-08-27", 100],
["2012-08-28", 59],
["2012-08-29", 84],
["2012-08-30", 135],
["2012-08-31", 150],
["2012-09-01", 128],
["2012-09-02", 52],
["2012-09-03", 15],
["2012-09-04", 22],
["2012-09-05", 50],
["2012-09-06", 70],
["2012-09-07", 77],
["2012-09-08", 40],
["2012-09-09", 79],
["2012-09-10", 96],
["2012-09-11", 93],
["2012-09-12", 44],
["2012-09-13", 28],
["2012-09-14", 31],
["2012-09-15", 50],
["2012-09-16", 65],
["2012-09-17", 63],
["2012-09-18", 61],
["2012-09-19", 56],
["2012-09-21", 128],
["2012-09-22", 93],
["2012-09-23", 85],
["2012-09-24", 74],
["2012-09-25", 78],
["2012-09-26", 26],
["2012-09-27", 65],
["2012-09-28", 15],
["2012-09-29", 24],
["2012-09-30", 38],
["2012-10-01", 52],
["2012-10-02", 78],
["2012-10-03", 108],
["2012-10-04", 28],
["2012-10-05", 41],
["2012-10-06", 74],
["2012-10-07", 83],
["2012-10-08", 123],
["2012-10-09", 140],
["2012-10-10", 18],
["2012-10-11", 73],
["2012-10-12", 121],
["2012-10-13", 97],
["2012-10-14", 40],
["2012-10-15", 83],
["2012-10-16", 78],
["2012-10-17", 23],
["2012-10-18", 65],
["2012-10-19", 79],
["2012-10-20", 139],
["2012-10-21", 81],
["2012-10-22", 26],
["2012-10-23", 54],
["2012-10-24", 89],
["2012-10-25", 90],
["2012-10-26", 163],
["2012-10-27", 154],
["2012-10-28", 22],
["2012-10-29", 59],
["2012-10-30", 36],
["2012-10-31", 51],
["2012-11-01", 67],
["2012-11-02", 103],
["2012-11-03", 135],
["2012-11-04", 20],
["2012-11-05", 16],
["2012-11-06", 48],
["2012-11-07", 80],
["2012-11-08", 62],
["2012-11-09", 93],
["2012-11-10", 82],
["2012-11-11", 17],
["2012-11-12", 27],
["2012-11-13", 30],
["2012-11-14", 26],
["2012-11-15", 71],
["2012-11-16", 92],
["2012-11-17", 47],
["2012-11-18", 96],
["2012-11-19", 55],
["2012-11-20", 74],
["2012-11-21", 123],
["2012-11-22", 156],
["2012-11-23", 22],
["2012-11-24", 80],
["2012-11-25", 133],
["2012-11-26", 44],
["2012-11-27", 105],
["2012-11-28", 151],
["2012-11-29", 54],
["2012-12-01", 50],
["2012-12-02", 96],
["2012-12-03", 123],
["2012-12-04", 50],
["2012-12-05", 64],
["2012-12-06", 50],
["2012-12-07", 73],
["2012-12-08", 53],
["2012-12-09", 38],
["2012-12-10", 53],
["2012-12-11", 86],
["2012-12-12", 103],
["2012-12-13", 130],
["2012-12-14", 107],
["2012-12-15", 114],
["2012-12-16", 108],
["2012-12-17", 45],
["2012-12-18", 22],
["2012-12-19", 72],
["2012-12-20", 121],
["2012-12-21", 120],
["2012-12-22", 24],
["2012-12-23", 36],
["2012-12-24", 53],
["2012-12-25", 58],
["2012-12-26", 67],
["2012-12-28", 137],
["2012-12-29", 94],
["2012-12-30", 38],
["2012-12-31", 57],
["2013-01-01", 71],
["2013-01-02", 27],
["2013-01-03", 35],
["2013-01-04", 57],
["2013-01-05", 79],
["2013-01-06", 58],
["2013-01-07", 105],
["2013-01-08", 124],
["2013-01-09", 32],
["2013-01-10", 87],
["2013-01-11", 232],
["2013-01-12", 174],
["2013-01-13", 498],
["2013-01-14", 184],
["2014-01-01", 85],
["2014-01-02", 158],
["2014-01-03", 74],
["2014-01-04", 165],
["2014-01-05", 113],
["2014-01-06", 190],
["2014-01-07", 122],
["2014-01-10", 95],
["2014-01-11", 159],
["2014-01-12", 52],
["2014-01-13", 117],
["2014-01-14", 113],
["2014-01-15", 180],
["2014-01-16", 403],
["2014-01-17", 209],
["2014-01-18", 113],
["2014-01-19", 149],
["2014-01-21", 68],
["2014-01-22", 162],
["2014-01-23", 276],
["2014-01-24", 195],
["2014-01-26", 77],
["2014-01-27", 114],
["2014-01-28", 67],
["2014-01-29", 165],
["2014-01-30", 93],
["2014-01-31", 188],
["2014-02-01", 178],
["2014-02-02", 85],
["2014-02-05", 119],
["2014-02-06", 158],
["2014-02-07", 124],
["2014-02-08", 84],
["2014-02-10", 53],
["2014-02-11", 142],
["2014-02-12", 150],
["2014-02-13", 242],
["2014-02-14", 329],
["2014-02-15", 429],
["2014-02-16", 348],
["2014-02-17", 118],
["2014-02-18", 98],
["2014-02-19", 92],
["2014-02-20", 270],
["2014-02-21", 311],
["2014-02-22", 311],
["2014-02-23", 255],
["2014-02-24", 313],
["2014-02-25", 404],
["2014-02-28", 113],
["2014-03-01", 68],
["2014-03-02", 189],
["2014-03-03", 268],
["2014-03-04", 67],
["2014-03-07", 70],
["2014-03-08", 179],
["2014-03-09", 127],
["2014-03-10", 110],
["2014-03-11", 195],
["2014-03-13", 69],
["2014-03-14", 64],
["2014-03-15", 133],
["2014-03-16", 145],
["2014-03-17", 142],
["2014-03-18", 85],
["2014-03-19", 73],
["2014-03-21", 62],
["2014-03-22", 86],
["2014-03-23", 186],
["2014-03-24", 271],
["2014-03-25", 255],
["2014-03-26", 331],
["2014-03-27", 285],
["2014-03-28", 169],
["2014-03-29", 63],
["2014-03-30", 77],
["2014-03-31", 183],
["2014-04-01", 147],
["2014-04-02", 133],
["2014-04-03", 66],
["2014-04-04", 91],
["2014-04-05", 68],
["2014-04-06", 98],
["2014-04-07", 135],
["2014-04-08", 223],
["2014-04-09", 156],
["2014-04-10", 246],
["2014-04-11", 83],
["2014-04-12", 133],
["2014-04-13", 212],
["2014-04-14", 270],
["2014-04-15", 109],
["2014-04-16", 90],
["2014-04-17", 124],
["2014-04-18", 182],
["2014-04-19", 84],
["2014-04-20", 84],
["2014-04-21", 73],
["2014-04-22", 85],
["2014-04-23", 156],
["2014-04-24", 156],
["2014-04-25", 163],
["2014-04-26", 69],
["2014-04-27", 74],
["2014-04-28", 83],
["2014-04-29", 122],
["2014-04-30", 139],
["2014-05-01", 156],
["2014-05-03", 93],
["2014-05-04", 57],
["2014-05-05", 54],
["2014-05-06", 105],
["2014-05-07", 82],
["2014-05-08", 104],
["2014-05-09", 84],
["2014-05-10", 69],
["2014-05-12", 74],
["2014-05-13", 86],
["2014-05-14", 59],
["2014-05-15", 122],
["2014-05-16", 92],
["2014-05-17", 124],
["2014-05-18", 171],
["2014-05-19", 146],
["2014-05-20", 113],
["2014-05-21", 170],
["2014-05-22", 183],
["2014-05-23", 140],
["2014-05-24", 104],
["2014-05-25", 91],
["2014-05-26", 77],
["2014-05-27", 107],
["2014-05-28", 121],
["2014-05-29", 120],
["2014-05-30", 192],
["2014-05-31", 177],
["2014-06-01", 130],
["2014-06-02", 90],
["2014-06-03", 117],
["2014-06-04", 124],
["2014-06-05", 157],
["2014-06-06", 103],
["2014-06-07", 51],
["2014-06-08", 70],
["2014-06-09", 87],
["2014-06-10", 95],
["2014-06-11", 74],
["2014-06-12", 90],
["2014-06-13", 116],
["2014-06-14", 165],
["2014-06-15", 178],
["2014-06-16", 178],
["2014-06-17", 104],
["2014-06-18", 116],
["2014-06-19", 116],
["2014-06-20", 84],
["2014-06-21", 96],
["2014-06-22", 91],
["2014-06-23", 115],
["2014-06-24", 161],
["2014-06-25", 138],
["2014-06-26", 163],
["2014-06-27", 68],
["2014-06-28", 77],
["2014-06-29", 161],
["2014-06-30", 185],
["2014-07-01", 172],
["2014-07-02", 80],
["2014-07-03", 248],
["2014-07-04", 237],
["2014-07-05", 165],
["2014-07-06", 256],
["2014-07-07", 216],
["2014-07-08", 134],
["2014-07-09", 63],
["2014-07-10", 114],
["2014-07-11", 77],
["2014-07-12", 80],
["2014-07-13", 64],
["2014-07-14", 156],
["2014-07-15", 140],
["2014-07-16", 133],
["2014-07-17", 186],
["2014-07-18", 182],
["2014-07-19", 106],
["2014-07-20", 119],
["2014-07-21", 68],
["2014-07-22", 54],
["2014-07-23", 82],
["2014-07-24", 90],
["2014-07-25", 134],
["2014-07-26", 188],
["2014-07-27", 194],
["2014-07-28", 159],
["2014-07-29", 159],
["2014-07-30", 169],
["2014-07-31", 244],
["2014-08-01", 199],
["2014-08-02", 163],
["2014-08-03", 149],
["2014-08-05", 80],
["2014-08-06", 67],
["2014-08-07", 162],
["2014-08-08", 140],
["2014-08-09", 143],
["2014-08-10", 125],
["2014-08-11", 76],
["2014-08-12", 119],
["2014-08-13", 70],
["2014-08-14", 104],
["2014-08-15", 109],
["2014-08-16", 159],
["2014-08-17", 124],
["2014-08-18", 135],
["2014-08-19", 150],
["2014-08-20", 164],
["2014-08-21", 169],
["2014-08-22", 83],
["2014-08-23", 155],
["2014-08-24", 75],
["2014-08-25", 59],
["2014-08-26", 78],
["2014-08-27", 136],
["2014-08-28", 103],
["2014-08-29", 104],
["2014-08-30", 176],
["2014-08-31", 89],
["2014-09-01", 127],
["2014-09-03", 54],
["2014-09-04", 100],
["2014-09-05", 140],
["2014-09-06", 186],
["2014-09-07", 200],
["2014-09-08", 61],
["2014-09-09", 109],
["2014-09-10", 111],
["2014-09-11", 114],
["2014-09-12", 97],
["2014-09-13", 94],
["2014-09-14", 66],
["2014-09-15", 54],
["2014-09-16", 87],
["2014-09-17", 80],
["2014-09-18", 84],
["2014-09-19", 117],
["2014-09-20", 168],
["2014-09-21", 129],
["2014-09-22", 127],
["2014-09-23", 64],
["2014-09-24", 60],
["2014-09-25", 144],
["2014-09-26", 170],
["2014-09-27", 58],
["2014-09-28", 87],
["2014-09-29", 70],
["2014-09-30", 53],
["2014-10-01", 92],
["2014-10-02", 78],
["2014-10-03", 123],
["2014-10-04", 95],
["2014-10-05", 54],
["2014-10-06", 68],
["2014-10-07", 200],
["2014-10-08", 314],
["2014-10-09", 379],
["2014-10-10", 346],
["2014-10-11", 233],
["2014-10-14", 80],
["2014-10-15", 73],
["2014-10-16", 76],
["2014-10-17", 132],
["2014-10-18", 211],
["2014-10-19", 289],
["2014-10-20", 250],
["2014-10-21", 82],
["2014-10-22", 99],
["2014-10-23", 163],
["2014-10-24", 267],
["2014-10-25", 353],
["2014-10-26", 78],
["2014-10-27", 72],
["2014-10-28", 88],
["2014-10-29", 140],
["2014-10-30", 206],
["2014-10-31", 204],
["2014-11-01", 65],
["2014-11-03", 59],
["2014-11-04", 150],
["2014-11-05", 79],
["2014-11-07", 63],
["2014-11-08", 93],
["2014-11-09", 80],
["2014-11-10", 95],
["2014-11-11", 59],
["2014-11-13", 65],
["2014-11-14", 77],
["2014-11-15", 143],
["2014-11-16", 98],
["2014-11-17", 64],
["2014-11-18", 93],
["2014-11-19", 282],
["2014-11-23", 155],
["2014-11-24", 94],
["2014-11-25", 196],
["2014-11-26", 293],
["2014-11-27", 83],
["2014-11-28", 114],
["2014-11-29", 276],
["2014-12-01", 54],
["2014-12-02", 65],
["2014-12-03", 51],
["2014-12-05", 62],
["2014-12-06", 89],
["2014-12-07", 65],
["2014-12-08", 82],
["2014-12-09", 276],
["2014-12-10", 153],
["2014-12-11", 52],
["2014-12-13", 69],
["2014-12-14", 113],
["2014-12-15", 82],
["2014-12-17", 99],
["2014-12-19", 53],
["2014-12-22", 103],
["2014-12-23", 100],
["2014-12-25", 73],
["2014-12-26", 155],
["2014-12-27", 243],
["2014-12-28", 155],
["2014-12-29", 125],
["2014-12-30", 65],
["2015-01-01", 65],
["2015-01-02", 79],
["2015-01-03", 200],
["2015-01-04", 226],
["2015-01-05", 122],
["2015-01-06", 60],
["2015-01-07", 85],
["2015-01-08", 190],
["2015-01-09", 105],
["2015-01-10", 208],
["2015-01-11", 59],
["2015-01-12", 160],
["2015-01-13", 211],
["2015-01-14", 265],
["2015-01-15", 386],
["2015-01-16", 118],
["2015-01-17", 89],
["2015-01-18", 94],
["2015-01-19", 77],
["2015-01-20", 113],
["2015-01-22", 143],
["2015-01-23", 257],
["2015-01-24", 117],
["2015-01-25", 185],
["2015-01-26", 119],
["2015-01-28", 65],
["2015-01-29", 87],
["2015-01-31", 60],
["2015-02-01", 108],
["2015-02-02", 188],
["2015-02-03", 143],
["2015-02-05", 62],
["2015-02-06", 100],
["2015-02-09", 152],
["2015-02-10", 166],
["2015-02-11", 55],
["2015-02-12", 59],
["2015-02-13", 175],
["2015-02-14", 293],
["2015-02-15", 326],
["2015-02-16", 153],
["2015-02-18", 73],
["2015-02-19", 267],
["2015-02-20", 183],
["2015-02-21", 394],
["2015-02-22", 158],
["2015-02-23", 86],
["2015-02-24", 207],
]
(
Line()
.add_xaxis(xaxis_data=[item[0] for item in all_data])
.add_yaxis(
series_name="",
y_axis=[item[1] for item in all_data],
yaxis_index=0,
is_smooth=True,
is_symbol_show=False,
)
.set_global_opts(
title_opts=opts.TitleOpts(title="Beijing AQI"),
tooltip_opts=opts.TooltipOpts(trigger="axis"),
datazoom_opts=[
opts.DataZoomOpts(yaxis_index=0),
opts.DataZoomOpts(type_="inside", yaxis_index=0),
],
visualmap_opts=opts.VisualMapOpts(
pos_top="10",
pos_right="10",
is_piecewise=True,
pieces=[
{"gt": 0, "lte": 50, "color": "#096"},
{"gt": 50, "lte": 100, "color": "#ffde33"},
{"gt": 100, "lte": 150, "color": "#ff9933"},
{"gt": 150, "lte": 200, "color": "#cc0033"},
{"gt": 200, "lte": 300, "color": "#660099"},
{"gt": 300, "color": "#7e0023"},
],
out_of_range={"color": "#999"},
),
xaxis_opts=opts.AxisOpts(type_="category"),
yaxis_opts=opts.AxisOpts(
type_="value",
name_location="start",
min_=0,
max_=500,
is_scale=True,
axistick_opts=opts.AxisTickOpts(is_inside=False),
),
)
.set_series_opts(
markline_opts=opts.MarkLineOpts(
data=[
{"yAxis": 50},
{"yAxis": 100},
{"yAxis": 150},
{"yAxis": 200},
{"yAxis": 300},
],
label_opts=opts.LabelOpts(position="end"),
)
)
.render("beijing_aqi.html")
)
| 24.410882 | 63 | 0.426575 |
e9ff12848b4786dd9b5181f046c3b8596891ad5d | 1,416 | py | Python | test_scripts/test_stack_and_visualize.py | jakevdp/spheredb | e5e5ff8b8902459b3f38a1a413a712ac1695accc | [
"BSD-3-Clause"
] | 1 | 2021-08-29T06:01:28.000Z | 2021-08-29T06:01:28.000Z | test_scripts/test_stack_and_visualize.py | jakevdp/spheredb | e5e5ff8b8902459b3f38a1a413a712ac1695accc | [
"BSD-3-Clause"
] | null | null | null | test_scripts/test_stack_and_visualize.py | jakevdp/spheredb | e5e5ff8b8902459b3f38a1a413a712ac1695accc | [
"BSD-3-Clause"
] | 2 | 2018-08-03T20:27:35.000Z | 2021-08-29T06:01:30.000Z | """
Stacking and Visualizing
------------------------
This script does the following:
1. Input LSST images, warp to sparse matrix, store as scidb arrays.
This tests the warping of a single LSST exposure into a sparse matrix
representation of a HEALPix grid.
"""
import os
import sys
import glob
import matplotlib.pyplot as plt
import numpy as np
sys.path.append(os.path.abspath('..'))
from spheredb.scidb_tools import HPXPixels3D, find_index_bounds
filenames = glob.glob("/home/jakevdp/research/LSST_IMGS/*/R*/S*.fits")
print "total number of files:", len(filenames)
HPX_data = HPXPixels3D(input_files=filenames[:20],
name='LSSTdata', force_reload=False)
times = HPX_data.unique_times()
xlim, ylim, tlim = HPX_data.index_bounds()
for time in times[:2]:
tslice = HPX_data.time_slice(time)
tslice_arr = tslice.arr[xlim[0]:xlim[1],
ylim[0]:ylim[1]].toarray()
fig, ax = plt.subplots()
im = ax.imshow(np.log(tslice_arr), cmap=plt.cm.binary)
ax.set_xlim(400, 440)
ax.set_ylim(860, 820)
fig.colorbar(im, ax=ax)
ax.set_title("time = {0}".format(time))
coadd = HPX_data.coadd().arr[xlim[0]:xlim[1],
ylim[0]:ylim[1]].toarray()
fig, ax = plt.subplots()
im = ax.imshow(np.log(coadd), cmap=plt.cm.binary)
ax.set_xlim(400, 440)
ax.set_ylim(860, 820)
fig.colorbar(im, ax=ax)
ax.set_title("coadd")
plt.show()
| 27.230769 | 70 | 0.664548 |
18007f3ffa7e153ffa5c57f5301a0d773f024cb8 | 307 | py | Python | Problem/PeopleFund/concatenate.py | yeojin-dev/coding-test | 30ce8507838beaa9232c6fc6c62a7dcb62d51464 | [
"MIT"
] | 2 | 2018-07-11T08:13:06.000Z | 2018-07-11T08:47:12.000Z | Problem/PeopleFund/concatenate.py | yeojin-dev/coding-test | 30ce8507838beaa9232c6fc6c62a7dcb62d51464 | [
"MIT"
] | null | null | null | Problem/PeopleFund/concatenate.py | yeojin-dev/coding-test | 30ce8507838beaa9232c6fc6c62a7dcb62d51464 | [
"MIT"
] | null | null | null | import numpy as np
sizes = list(map(int, input().split()))
arr1 = list()
arr2 = list()
for _ in range(sizes[0]):
arr1.append(list(map(int, input().split())))
for _ in range(sizes[1]):
arr2.append(list(map(int, input().split())))
print(np.concatenate((np.array(arr1), np.array(arr2)), axis=0))
| 19.1875 | 63 | 0.635179 |
18012d97d113307f75b71fea1cea0948b4e7a4b1 | 28,941 | py | Python | tests/test_splitname.py | goerz/bibdeskparser | 4f60f9960f6f0156c2f3c89033065c4e121800ab | [
"BSD-3-Clause"
] | null | null | null | tests/test_splitname.py | goerz/bibdeskparser | 4f60f9960f6f0156c2f3c89033065c4e121800ab | [
"BSD-3-Clause"
] | null | null | null | tests/test_splitname.py | goerz/bibdeskparser | 4f60f9960f6f0156c2f3c89033065c4e121800ab | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from bibdeskparser.customization import InvalidName, splitname
splitname_test_cases = (
(
r'Per Brinch Hansen',
{'first': ['Per', 'Brinch'], 'von': [], 'last': ['Hansen'], 'jr': []},
),
(
r'Brinch Hansen, Per',
{'first': ['Per'], 'von': [], 'last': ['Brinch', 'Hansen'], 'jr': []},
),
(
r'Brinch Hansen,, Per',
{'first': ['Per'], 'von': [], 'last': ['Brinch', 'Hansen'], 'jr': []},
),
(
r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin",
{
'first': ['Charles', 'Louis', 'Xavier', 'Joseph'],
'von': ['de', 'la'],
'last': [r'Vall{\'e}e', 'Poussin'],
'jr': [],
},
),
(
r'D[onald] E. Knuth',
{'first': ['D[onald]', 'E.'], 'von': [], 'last': ['Knuth'], 'jr': []},
),
(
r'A. {Delgado de Molina}',
{
'first': ['A.'],
'von': [],
'last': ['{Delgado de Molina}'],
'jr': [],
},
),
(
r"M. Vign{\'e}",
{'first': ['M.'], 'von': [], 'last': [r"Vign{\'e}"], 'jr': []},
),
###############################################################################
#
# Test cases from
# http://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html
#
###############################################################################
(r'AA BB', {'first': ['AA'], 'von': [], 'last': ['BB'], 'jr': []}),
(r'AA', {'first': [], 'von': [], 'last': ['AA'], 'jr': []}),
(r'AA bb', {'first': ['AA'], 'von': [], 'last': ['bb'], 'jr': []}),
(r'aa', {'first': [], 'von': [], 'last': ['aa'], 'jr': []}),
(r'AA bb CC', {'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': []}),
(
r'AA bb CC dd EE',
{'first': ['AA'], 'von': ['bb', 'CC', 'dd'], 'last': ['EE'], 'jr': []},
),
(
r'AA 1B cc dd',
{'first': ['AA', '1B'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA 1b cc dd',
{'first': ['AA'], 'von': ['1b', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {b}B cc dd',
{'first': ['AA', '{b}B'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {b}b cc dd',
{'first': ['AA'], 'von': ['{b}b', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {B}b cc dd',
{'first': ['AA'], 'von': ['{B}b', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {B}B cc dd',
{'first': ['AA', '{B}B'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA \BB{b} cc dd',
{'first': ['AA', r'\BB{b}'], 'von': ['cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA \bb{b} cc dd',
{'first': ['AA'], 'von': [r'\bb{b}', 'cc'], 'last': ['dd'], 'jr': []},
),
(
r'AA {bb} cc DD',
{'first': ['AA', '{bb}'], 'von': ['cc'], 'last': ['DD'], 'jr': []},
),
(
r'AA bb {cc} DD',
{'first': ['AA'], 'von': ['bb'], 'last': ['{cc}', 'DD'], 'jr': []},
),
(
r'AA {bb} CC',
{'first': ['AA', '{bb}'], 'von': [], 'last': ['CC'], 'jr': []},
),
(r'bb CC, AA', {'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': []}),
(r'bb CC, aa', {'first': ['aa'], 'von': ['bb'], 'last': ['CC'], 'jr': []}),
(
r'bb CC dd EE, AA',
{'first': ['AA'], 'von': ['bb', 'CC', 'dd'], 'last': ['EE'], 'jr': []},
),
(r'bb, AA', {'first': ['AA'], 'von': [], 'last': ['bb'], 'jr': []}),
(
r'bb CC,XX, AA',
{'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': ['XX']},
),
(
r'bb CC,xx, AA',
{'first': ['AA'], 'von': ['bb'], 'last': ['CC'], 'jr': ['xx']},
),
(r'BB,, AA', {'first': ['AA'], 'von': [], 'last': ['BB'], 'jr': []}),
(
r"Paul \'Emile Victor",
{
'first': ['Paul', r"\'Emile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Paul {\'E}mile Victor",
{
'first': ['Paul', r"{\'E}mile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Paul \'emile Victor",
{'first': ['Paul'], 'von': [r"\'emile"], 'last': ['Victor'], 'jr': []},
),
(
r"Paul {\'e}mile Victor",
{
'first': ['Paul'],
'von': [r"{\'e}mile"],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul \'Emile",
{
'first': ['Paul', r"\'Emile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul {\'E}mile",
{
'first': ['Paul', r"{\'E}mile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul \'emile",
{
'first': ['Paul', r"\'emile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r"Victor, Paul {\'e}mile",
{
'first': ['Paul', r"{\'e}mile"],
'von': [],
'last': ['Victor'],
'jr': [],
},
),
(
r'Dominique Galouzeau de Villepin',
{
'first': ['Dominique', 'Galouzeau'],
'von': ['de'],
'last': ['Villepin'],
'jr': [],
},
),
(
r'Dominique {G}alouzeau de Villepin',
{
'first': ['Dominique'],
'von': ['{G}alouzeau', 'de'],
'last': ['Villepin'],
'jr': [],
},
),
(
r'Galouzeau de Villepin, Dominique',
{
'first': ['Dominique'],
'von': ['Galouzeau', 'de'],
'last': ['Villepin'],
'jr': [],
},
),
###############################################################################
#
# Test cases from pybtex
# See file /pybtex/tests/parse_name_test.py in the pybtex source.
#
###############################################################################
(
r'A. E. Siegman',
{'first': ['A.', 'E.'], 'von': [], 'last': ['Siegman'], 'jr': []},
),
(
r'A. G. W. Cameron',
{
'first': ['A.', 'G.', 'W.'],
'von': [],
'last': ['Cameron'],
'jr': [],
},
),
(r'A. Hoenig', {'first': ['A.'], 'von': [], 'last': ['Hoenig'], 'jr': []}),
(
r'A. J. Van Haagen',
{
'first': ['A.', 'J.', 'Van'],
'von': [],
'last': ['Haagen'],
'jr': [],
},
),
(
r'A. S. Berdnikov',
{'first': ['A.', 'S.'], 'von': [], 'last': ['Berdnikov'], 'jr': []},
),
(
r'A. Trevorrow',
{'first': ['A.'], 'von': [], 'last': ['Trevorrow'], 'jr': []},
),
(
r'Adam H. Lewenberg',
{'first': ['Adam', 'H.'], 'von': [], 'last': ['Lewenberg'], 'jr': []},
),
(
r'Addison-Wesley Publishing Company',
{
'first': ['Addison-Wesley', 'Publishing'],
'von': [],
'last': ['Company'],
'jr': [],
},
),
(
r'Advogato (Raph Levien)',
{
'first': ['Advogato', '(Raph'],
'von': [],
'last': ['Levien)'],
'jr': [],
},
),
(
r'Andrea de Leeuw van Weenen',
{
'first': ['Andrea'],
'von': ['de', 'Leeuw', 'van'],
'last': ['Weenen'],
'jr': [],
},
),
(
r'Andreas Geyer-Schulz',
{'first': ['Andreas'], 'von': [], 'last': ['Geyer-Schulz'], 'jr': []},
),
(
r'Andr{\'e} Heck',
{'first': [r'Andr{\'e}'], 'von': [], 'last': ['Heck'], 'jr': []},
),
(
r'Anne Br{\"u}ggemann-Klein',
{
'first': ['Anne'],
'von': [],
'last': [r'Br{\"u}ggemann-Klein'],
'jr': [],
},
),
(r'Anonymous', {'first': [], 'von': [], 'last': ['Anonymous'], 'jr': []}),
(r'B. Beeton', {'first': ['B.'], 'von': [], 'last': ['Beeton'], 'jr': []}),
(
r'B. Hamilton Kelly',
{'first': ['B.', 'Hamilton'], 'von': [], 'last': ['Kelly'], 'jr': []},
),
(
r'B. V. Venkata Krishna Sastry',
{
'first': ['B.', 'V.', 'Venkata', 'Krishna'],
'von': [],
'last': ['Sastry'],
'jr': [],
},
),
(
r'Benedict L{\o}fstedt',
{'first': ['Benedict'], 'von': [], 'last': [r'L{\o}fstedt'], 'jr': []},
),
(
r'Bogus{\l}aw Jackowski',
{'first': ['Bogus{\l}aw'], 'von': [], 'last': ['Jackowski'], 'jr': []},
),
(
r'Christina A. L.\ Thiele',
{
'first': ['Christina', 'A.', 'L.\\'],
'von': [],
'last': ['Thiele'],
'jr': [],
},
),
(
r"D. Men'shikov",
{'first': ['D.'], 'von': [], 'last': ["Men'shikov"], 'jr': []},
),
(
r'Darko \v{Z}ubrini{\'c}',
{
'first': ['Darko'],
'von': [],
'last': [r'\v{Z}ubrini{\'c}'],
'jr': [],
},
),
(
r'Dunja Mladeni{\'c}',
{'first': ['Dunja'], 'von': [], 'last': [r'Mladeni{\'c}'], 'jr': []},
),
(
r'Edwin V. {Bell, II}',
{
'first': ['Edwin', 'V.'],
'von': [],
'last': ['{Bell, II}'],
'jr': [],
},
),
(
r'Frank G. {Bennett, Jr.}',
{
'first': ['Frank', 'G.'],
'von': [],
'last': ['{Bennett, Jr.}'],
'jr': [],
},
),
(
r'Fr{\'e}d{\'e}ric Boulanger',
{
'first': [r'Fr{\'e}d{\'e}ric'],
'von': [],
'last': ['Boulanger'],
'jr': [],
},
),
(
r'Ford, Jr., Henry',
{'first': ['Henry'], 'von': [], 'last': ['Ford'], 'jr': ['Jr.']},
),
(
r'mr Ford, Jr., Henry',
{'first': ['Henry'], 'von': ['mr'], 'last': ['Ford'], 'jr': ['Jr.']},
),
(r'Fukui Rei', {'first': ['Fukui'], 'von': [], 'last': ['Rei'], 'jr': []}),
(
r'G. Gr{\"a}tzer',
{'first': ['G.'], 'von': [], 'last': [r'Gr{\"a}tzer'], 'jr': []},
),
(
r'George Gr{\"a}tzer',
{'first': ['George'], 'von': [], 'last': [r'Gr{\"a}tzer'], 'jr': []},
),
(
r'Georgia K. M. Tobin',
{
'first': ['Georgia', 'K.', 'M.'],
'von': [],
'last': ['Tobin'],
'jr': [],
},
),
(
r'Gilbert van den Dobbelsteen',
{
'first': ['Gilbert'],
'von': ['van', 'den'],
'last': ['Dobbelsteen'],
'jr': [],
},
),
(
r'Gy{\"o}ngyi Bujdos{\'o}',
{
'first': [r'Gy{\"o}ngyi'],
'von': [],
'last': [r'Bujdos{\'o}'],
'jr': [],
},
),
(
r'Helmut J{\"u}rgensen',
{'first': ['Helmut'], 'von': [], 'last': [r'J{\"u}rgensen'], 'jr': []},
),
(
r'Herbert Vo{\ss}',
{'first': ['Herbert'], 'von': [], 'last': ['Vo{\ss}'], 'jr': []},
),
(
r"H{\'a}n Th{\^e}\llap{\raise 0.5ex\hbox{\'{\relax}}} Th{\'a}nh",
{
'first': [
r'H{\'a}n',
r"Th{\^e}\llap{\raise 0.5ex\hbox{\'{\relax}}}",
],
'von': [],
'last': [r"Th{\'a}nh"],
'jr': [],
},
),
(
r"H{\`a}n Th\^e\llap{\raise0.5ex\hbox{\'{\relax}}} Th{\`a}nh",
{
'first': [r'H{\`a}n', r"Th\^e\llap{\raise0.5ex\hbox{\'{\relax}}}"],
'von': [],
'last': [r"Th{\`a}nh"],
'jr': [],
},
),
(
r'J. Vesel{\'y}',
{'first': ['J.'], 'von': [], 'last': [r'Vesel{\'y}'], 'jr': []},
),
(
r'Javier Rodr\'{\i}guez Laguna',
{
'first': ['Javier', r'Rodr\'{\i}guez'],
'von': [],
'last': ['Laguna'],
'jr': [],
},
),
(
r'Ji\v{r}\'{\i} Vesel{\'y}',
{
'first': [r'Ji\v{r}\'{\i}'],
'von': [],
'last': [r'Vesel{\'y}'],
'jr': [],
},
),
(
r'Ji\v{r}\'{\i} Zlatu{\v{s}}ka',
{
'first': [r'Ji\v{r}\'{\i}'],
'von': [],
'last': [r'Zlatu{\v{s}}ka'],
'jr': [],
},
),
(
r'Ji\v{r}{\'\i} Vesel{\'y}',
{
'first': [r'Ji\v{r}{\'\i}'],
'von': [],
'last': [r'Vesel{\'y}'],
'jr': [],
},
),
(
r'Ji\v{r}{\'{\i}}Zlatu{\v{s}}ka',
{
'first': [],
'von': [],
'last': [r'Ji\v{r}{\'{\i}}Zlatu{\v{s}}ka'],
'jr': [],
},
),
(
r'Jim Hef{}feron',
{'first': ['Jim'], 'von': [], 'last': ['Hef{}feron'], 'jr': []},
),
(
r'J{\"o}rg Knappen',
{'first': [r'J{\"o}rg'], 'von': [], 'last': ['Knappen'], 'jr': []},
),
(
r'J{\"o}rgen L. Pind',
{
'first': [r'J{\"o}rgen', 'L.'],
'von': [],
'last': ['Pind'],
'jr': [],
},
),
(
r'J{\'e}r\^ome Laurens',
{'first': [r'J{\'e}r\^ome'], 'von': [], 'last': ['Laurens'], 'jr': []},
),
(
r'J{{\"o}}rg Knappen',
{'first': [r'J{{\"o}}rg'], 'von': [], 'last': ['Knappen'], 'jr': []},
),
(
r'K. Anil Kumar',
{'first': ['K.', 'Anil'], 'von': [], 'last': ['Kumar'], 'jr': []},
),
(
r'Karel Hor{\'a}k',
{'first': ['Karel'], 'von': [], 'last': [r'Hor{\'a}k'], 'jr': []},
),
(
r'Karel P\'{\i}{\v{s}}ka',
{
'first': ['Karel'],
'von': [],
'last': [r'P\'{\i}{\v{s}}ka'],
'jr': [],
},
),
(
r'Karel P{\'\i}{\v{s}}ka',
{
'first': ['Karel'],
'von': [],
'last': [r'P{\'\i}{\v{s}}ka'],
'jr': [],
},
),
(
r'Karel Skoup\'{y}',
{'first': ['Karel'], 'von': [], 'last': [r'Skoup\'{y}'], 'jr': []},
),
(
r'Karel Skoup{\'y}',
{'first': ['Karel'], 'von': [], 'last': [r'Skoup{\'y}'], 'jr': []},
),
(
r'Kent McPherson',
{'first': ['Kent'], 'von': [], 'last': ['McPherson'], 'jr': []},
),
(
r'Klaus H{\"o}ppner',
{'first': ['Klaus'], 'von': [], 'last': [r'H{\"o}ppner'], 'jr': []},
),
(
r'Lars Hellstr{\"o}m',
{'first': ['Lars'], 'von': [], 'last': [r'Hellstr{\"o}m'], 'jr': []},
),
(
r'Laura Elizabeth Jackson',
{
'first': ['Laura', 'Elizabeth'],
'von': [],
'last': ['Jackson'],
'jr': [],
},
),
(
r'M. D{\'{\i}}az',
{'first': ['M.'], 'von': [], 'last': [r'D{\'{\i}}az'], 'jr': []},
),
(
r'M/iche/al /O Searc/oid',
{
'first': [r'M/iche/al', r'/O'],
'von': [],
'last': [r'Searc/oid'],
'jr': [],
},
),
(
r'Marek Ry{\'c}ko',
{'first': ['Marek'], 'von': [], 'last': [r'Ry{\'c}ko'], 'jr': []},
),
(
r'Marina Yu. Nikulina',
{
'first': ['Marina', 'Yu.'],
'von': [],
'last': ['Nikulina'],
'jr': [],
},
),
(
r'Max D{\'{\i}}az',
{'first': ['Max'], 'von': [], 'last': [r'D{\'{\i}}az'], 'jr': []},
),
(
r'Merry Obrecht Sawdey',
{
'first': ['Merry', 'Obrecht'],
'von': [],
'last': ['Sawdey'],
'jr': [],
},
),
(
r'Miroslava Mis{\'a}kov{\'a}',
{
'first': ['Miroslava'],
'von': [],
'last': [r'Mis{\'a}kov{\'a}'],
'jr': [],
},
),
(
r'N. A. F. M. Poppelier',
{
'first': ['N.', 'A.', 'F.', 'M.'],
'von': [],
'last': ['Poppelier'],
'jr': [],
},
),
(
r'Nico A. F. M. Poppelier',
{
'first': ['Nico', 'A.', 'F.', 'M.'],
'von': [],
'last': ['Poppelier'],
'jr': [],
},
),
(
r'Onofrio de Bari',
{'first': ['Onofrio'], 'von': ['de'], 'last': ['Bari'], 'jr': []},
),
(
r'Pablo Rosell-Gonz{\'a}lez',
{
'first': ['Pablo'],
'von': [],
'last': [r'Rosell-Gonz{\'a}lez'],
'jr': [],
},
),
(
r'Paco La Bruna',
{'first': ['Paco', 'La'], 'von': [], 'last': ['Bruna'], 'jr': []},
),
(
r'Paul Franchi-Zannettacci',
{
'first': ['Paul'],
'von': [],
'last': ['Franchi-Zannettacci'],
'jr': [],
},
),
(
r'Pavel \v{S}eve\v{c}ek',
{
'first': ['Pavel'],
'von': [],
'last': [r'\v{S}eve\v{c}ek'],
'jr': [],
},
),
(
r'Petr Ol{\v{s}}ak',
{'first': ['Petr'], 'von': [], 'last': [r'Ol{\v{s}}ak'], 'jr': []},
),
(
r'Petr Ol{\v{s}}{\'a}k',
{'first': ['Petr'], 'von': [], 'last': [r'Ol{\v{s}}{\'a}k'], 'jr': []},
),
(
r'Primo\v{z} Peterlin',
{'first': [r'Primo\v{z}'], 'von': [], 'last': ['Peterlin'], 'jr': []},
),
(
r'Prof. Alban Grimm',
{'first': ['Prof.', 'Alban'], 'von': [], 'last': ['Grimm'], 'jr': []},
),
(
r'P{\'e}ter Husz{\'a}r',
{
'first': [r'P{\'e}ter'],
'von': [],
'last': [r'Husz{\'a}r'],
'jr': [],
},
),
(
r'P{\'e}ter Szab{\'o}',
{'first': [r'P{\'e}ter'], 'von': [], 'last': [r'Szab{\'o}'], 'jr': []},
),
(
r'Rafa{\l}\.Zbikowski',
{'first': [], 'von': [], 'last': [r'Rafa{\l}\.Zbikowski'], 'jr': []},
),
(
r'Rainer Sch{\"o}pf',
{'first': ['Rainer'], 'von': [], 'last': [r'Sch{\"o}pf'], 'jr': []},
),
(
r'T. L. (Frank) Pappas',
{
'first': ['T.', 'L.', '(Frank)'],
'von': [],
'last': ['Pappas'],
'jr': [],
},
),
(
r'TUG 2004 conference',
{
'first': ['TUG', '2004'],
'von': [],
'last': ['conference'],
'jr': [],
},
),
(
r'TUG {\sltt DVI} Driver Standards Committee',
{
'first': ['TUG', '{\sltt DVI}', 'Driver', 'Standards'],
'von': [],
'last': ['Committee'],
'jr': [],
},
),
(
r'TUG {\sltt xDVIx} Driver Standards Committee',
{
'first': ['TUG'],
'von': ['{\sltt xDVIx}'],
'last': ['Driver', 'Standards', 'Committee'],
'jr': [],
},
),
(
r'University of M{\"u}nster',
{
'first': ['University'],
'von': ['of'],
'last': [r'M{\"u}nster'],
'jr': [],
},
),
(
r'Walter van der Laan',
{
'first': ['Walter'],
'von': ['van', 'der'],
'last': ['Laan'],
'jr': [],
},
),
(
r'Wendy G. McKay',
{'first': ['Wendy', 'G.'], 'von': [], 'last': ['McKay'], 'jr': []},
),
(
r'Wendy McKay',
{'first': ['Wendy'], 'von': [], 'last': ['McKay'], 'jr': []},
),
(
r'W{\l}odek Bzyl',
{'first': [r'W{\l}odek'], 'von': [], 'last': ['Bzyl'], 'jr': []},
),
(
r'\LaTeX Project Team',
{
'first': [r'\LaTeX', 'Project'],
'von': [],
'last': ['Team'],
'jr': [],
},
),
(
r'\rlap{Lutz Birkhahn}',
{'first': [], 'von': [], 'last': [r'\rlap{Lutz Birkhahn}'], 'jr': []},
),
(
r'{Jim Hef{}feron}',
{'first': [], 'von': [], 'last': ['{Jim Hef{}feron}'], 'jr': []},
),
(
r'{Kristoffer H\o{}gsbro Rose}',
{
'first': [],
'von': [],
'last': ['{Kristoffer H\o{}gsbro Rose}'],
'jr': [],
},
),
(
r'{TUG} {Working} {Group} on a {\TeX} {Directory} {Structure}',
{
'first': ['{TUG}', '{Working}', '{Group}'],
'von': ['on', 'a'],
'last': [r'{\TeX}', '{Directory}', '{Structure}'],
'jr': [],
},
),
(
r'{The \TUB{} Team}',
{'first': [], 'von': [], 'last': [r'{The \TUB{} Team}'], 'jr': []},
),
(
r'{\LaTeX} project team',
{
'first': [r'{\LaTeX}'],
'von': ['project'],
'last': ['team'],
'jr': [],
},
),
(
r'{\NTG{} \TeX{} future working group}',
{
'first': [],
'von': [],
'last': [r'{\NTG{} \TeX{} future working group}'],
'jr': [],
},
),
(
r'{{\LaTeX\,3} Project Team}',
{
'first': [],
'von': [],
'last': [r'{{\LaTeX\,3} Project Team}'],
'jr': [],
},
),
(
r'Johansen Kyle, Derik Mamania M.',
{
'first': ['Derik', 'Mamania', 'M.'],
'von': [],
'last': ['Johansen', 'Kyle'],
'jr': [],
},
),
(
r"Johannes Adam Ferdinand Alois Josef Maria Marko d'Aviano Pius von und zu Liechtenstein",
{
'first': [
'Johannes',
'Adam',
'Ferdinand',
'Alois',
'Josef',
'Maria',
'Marko',
],
'von': ["d'Aviano", 'Pius', 'von', 'und', 'zu'],
'last': ['Liechtenstein'],
'jr': [],
},
),
(
r"Brand\~{a}o, F",
{'first': ['F'], 'von': [], 'last': ['Brand\\', '{a}o'], 'jr': []},
),
)
if __name__ == '__main__':
unittest.main()
| 27.174648 | 98 | 0.338171 |
1801df02ecd58a8f78ca27f271870b89690c5eb0 | 1,349 | py | Python | db_model.py | Build-Week-Saltiest-Hacker/machine-learning | 1822e2ecdca8279bc49095f6da527152e298b95d | [
"MIT"
] | null | null | null | db_model.py | Build-Week-Saltiest-Hacker/machine-learning | 1822e2ecdca8279bc49095f6da527152e298b95d | [
"MIT"
] | null | null | null | db_model.py | Build-Week-Saltiest-Hacker/machine-learning | 1822e2ecdca8279bc49095f6da527152e298b95d | [
"MIT"
] | null | null | null | # schema for SQL database
from data import app, db
| 31.372093 | 74 | 0.604151 |
1804da1fa980c8e71b8a65bd6282db015d7cd076 | 2,608 | py | Python | acl/utils.py | stjordanis/aspect-document-similarity | ca17e0a8730caa224b0efe8909b1e5a87bb456ea | [
"MIT"
] | 47 | 2020-10-14T09:28:39.000Z | 2022-03-01T01:54:32.000Z | acl/utils.py | stjordanis/aspect-document-similarity | ca17e0a8730caa224b0efe8909b1e5a87bb456ea | [
"MIT"
] | 2 | 2021-11-21T20:07:10.000Z | 2022-02-10T09:25:40.000Z | acl/utils.py | stjordanis/aspect-document-similarity | ca17e0a8730caa224b0efe8909b1e5a87bb456ea | [
"MIT"
] | 8 | 2020-11-07T08:43:01.000Z | 2022-02-15T05:45:13.000Z | import re
import logging
logger = logging.getLogger(__name__)
def get_text_from_doc(doc) -> str:
"""
Build document text from title + abstract
:param doc: S2 paper
:return: Document text
"""
text = ''
if 'title' in doc:
text += doc['title']
if doc['abstract']:
text += '\n' + doc['abstract']
return text
def get_text_from_doc_id(doc_id: str, doc_index) -> str:
"""
Build document text from title + abstract
:param doc_id: S2-id
:param doc_index: S2-id to S2-paper data
:return: Document text
"""
if doc_id in doc_index:
return get_text_from_doc(doc_index[doc_id])
else:
raise ValueError(f'Document not found in index: {doc_id}')
# resolve 'and' titles and filter for out-of-index docs
| 26.612245 | 84 | 0.536426 |
1805a8be23b715a568fd9d510dee5510be26a4d2 | 995 | py | Python | build-a-django-content-aggregator/source_code_step_2/podcasts/tests.py | syberflea/materials | 54f44725b40edf00c1b523d7a85b34a85014d7eb | [
"MIT"
] | 3,682 | 2018-05-07T19:45:24.000Z | 2022-03-31T15:19:10.000Z | build-a-django-content-aggregator/source_code_step_2/podcasts/tests.py | sribarrow/materials | c17c4a4d6f8487e59eac1df8c88ca92b73d6d2a5 | [
"MIT"
] | 148 | 2018-05-15T21:18:49.000Z | 2022-03-21T11:25:39.000Z | build-a-django-content-aggregator/source_code_step_2/podcasts/tests.py | sribarrow/materials | c17c4a4d6f8487e59eac1df8c88ca92b73d6d2a5 | [
"MIT"
] | 5,535 | 2018-05-25T23:36:08.000Z | 2022-03-31T16:55:52.000Z | from django.test import TestCase
from django.utils import timezone
from .models import Episode
| 34.310345 | 78 | 0.639196 |
18062b275cb72a752756840a4bbb8ef63a17377e | 4,114 | py | Python | superset/migrations/versions/070c043f2fdb_add_granularity_to_charts_where_missing.py | razzius/superset | 93f59e055e8312fb28687bc9fc22342b4be68d0e | [
"Apache-2.0"
] | 18,621 | 2017-06-19T09:57:44.000Z | 2021-01-05T06:28:21.000Z | superset/migrations/versions/070c043f2fdb_add_granularity_to_charts_where_missing.py | changeiot/superset | 299b5dc64448d04abe6b35ee85fbd2b938c781bc | [
"Apache-2.0"
] | 9,043 | 2017-07-05T16:10:48.000Z | 2021-01-05T17:58:01.000Z | superset/migrations/versions/070c043f2fdb_add_granularity_to_charts_where_missing.py | changeiot/superset | 299b5dc64448d04abe6b35ee85fbd2b938c781bc | [
"Apache-2.0"
] | 5,527 | 2017-07-06T01:39:43.000Z | 2021-01-05T06:01:11.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add granularity to charts where missing
Revision ID: 070c043f2fdb
Revises: 41ce8799acc3
Create Date: 2021-02-04 09:34:13.608891
"""
# revision identifiers, used by Alembic.
revision = "070c043f2fdb"
down_revision = "41ce8799acc3"
import json
from alembic import op
from sqlalchemy import and_, Boolean, Column, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db
Base = declarative_base()
def upgrade():
"""
Adds the granularity param to charts without it populated. This is required for
time range filtering to work properly. Uses the following approach:
- Find all charts without a granularity or granularity_sqla param.
- Get the dataset that backs the chart.
- If the dataset has the main dttm column set, use it.
- Otherwise, find all the dttm columns in the dataset and use the first one (this
matches the behavior of Explore view on the frontend)
- If no dttm columns exist in the dataset, don't change the chart.
"""
bind = op.get_bind()
session = db.Session(bind=bind)
slices_changed = 0
for slc in (
session.query(Slice)
.filter(
and_(
Slice.datasource_type == "table", Slice.params.notlike('%"granularity%')
)
)
.all()
):
try:
params = json.loads(slc.params)
if "granularity" in params or "granularity_sqla" in params:
continue
table = session.query(SqlaTable).get(slc.datasource_id)
if not table:
continue
if table.main_dttm_col:
params["granularity"] = table.main_dttm_col
slc.params = json.dumps(params, sort_keys=True)
print(f"Set granularity for slice {slc.id} to {table.main_dttm_col}")
slices_changed += 1
continue
table_columns = (
session.query(TableColumn)
.filter(TableColumn.table_id == table.id)
.filter(TableColumn.is_dttm == True)
.all()
)
if len(table_columns):
params["granularity"] = table_columns[0].column_name
slc.params = json.dumps(params, sort_keys=True)
print(
f"Set granularity for slice {slc.id} to {table_columns[0].column_name}"
)
slices_changed += 1
except Exception as e:
print(e)
print(f"Parsing params for slice {slc.id} failed.")
pass
print(f"{slices_changed} slices altered")
session.commit()
session.close()
def downgrade():
"""
It's impossible to downgrade this migration.
"""
pass
| 30.474074 | 91 | 0.645357 |
1806f8b39a3aeab210ed874956e25e9bd4d01444 | 325 | py | Python | AtCoder/ABC/B/page-13/090B.py | Nishi05/Competitive-programming | e59a6755b706d9d5c1f359f4511d92c114e6a94e | [
"MIT"
] | null | null | null | AtCoder/ABC/B/page-13/090B.py | Nishi05/Competitive-programming | e59a6755b706d9d5c1f359f4511d92c114e6a94e | [
"MIT"
] | null | null | null | AtCoder/ABC/B/page-13/090B.py | Nishi05/Competitive-programming | e59a6755b706d9d5c1f359f4511d92c114e6a94e | [
"MIT"
] | null | null | null | # A B
# 0 10
#
#
a, b = map(int, input().split())
cnt = 0
for i in range(a, b+1):
s = str(i)
s_r = s[::-1]
n = int(len(str(s))/2)
if s[: n] == s_r[:n]:
cnt += 1
print(cnt)
| 23.214286 | 42 | 0.609231 |
18070effada07af1c287eb2501ebc5c7848149ff | 2,499 | py | Python | __init__.py | kotn3l/blender-flver | 3476d720337a6d7a28bd55f9b112524c0f61581d | [
"MIT"
] | 11 | 2020-04-28T03:21:13.000Z | 2022-03-23T13:18:33.000Z | __init__.py | kotn3l/blender-flver | 3476d720337a6d7a28bd55f9b112524c0f61581d | [
"MIT"
] | 2 | 2021-06-28T07:44:42.000Z | 2022-03-18T00:47:42.000Z | __init__.py | elizagamedev/blender-flver | 25cc152de19acb4028035d3ed389706df25e094a | [
"MIT"
] | 2 | 2021-12-23T13:31:57.000Z | 2022-03-16T06:30:13.000Z | bl_info = {
"name": "Import Fromsoft FLVER models",
"description":
"Import models from various Fromsoft games such as Dark Souls",
"author": "Eliza Velasquez",
"version": (0, 1, 0),
"blender": (2, 80, 0),
"category": "Import-Export",
"location": "File > Import",
"warning": "",
"support": "COMMUNITY",
"wiki_url": "", # TODO: wiki url
"tracker_url": "", # TODO: tracker url
}
_submodules = {
"importer",
"flver",
"reader",
}
# Reload submodules on addon reload
if "bpy" in locals():
import importlib
for submodule in _submodules:
if submodule in locals():
importlib.reload(locals()[submodule])
import bpy
from . import importer
from bpy_extras.io_utils import ImportHelper
from bpy.props import StringProperty, BoolProperty
| 30.47561 | 74 | 0.620648 |
1809819c2d6283b15f8fc4c9f611ea65d6e320d3 | 32,193 | py | Python | plugin.video.vstream/resources/lib/gui/gui.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | 2 | 2018-11-02T19:55:30.000Z | 2020-08-14T02:22:20.000Z | plugin.video.vstream/resources/lib/gui/gui.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | null | null | null | plugin.video.vstream/resources/lib/gui/gui.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | 3 | 2019-12-17T20:47:00.000Z | 2021-02-11T19:03:59.000Z | # -*- coding: utf-8 -*-
# https://github.com/Kodi-vStream/venom-xbmc-addons
from resources.lib.gui.contextElement import cContextElement
from resources.lib.gui.guiElement import cGuiElement
from resources.lib.db import cDb
from resources.lib.handler.outputParameterHandler import cOutputParameterHandler
from resources.lib.handler.inputParameterHandler import cInputParameterHandler
from resources.lib.handler.pluginHandler import cPluginHandler
from resources.lib.parser import cParser
from resources.lib.util import cUtil, QuotePlus
from resources.lib.comaddon import listitem, addon, dialog, isKrypton, window, xbmc
import re, xbmcplugin
| 42.192661 | 178 | 0.671916 |
1809e4f7973197265ce5a6a201169c2856659885 | 1,555 | py | Python | src/jt/rubicon/java/_typemanager.py | karpierz/jtypes.rubicon | 8f8196e47de93183eb9728fec0d08725fc368ee0 | [
"BSD-3-Clause"
] | 2 | 2018-11-29T06:19:05.000Z | 2018-12-09T09:47:55.000Z | src/jt/rubicon/java/_typemanager.py | karpierz/jtypes.rubicon | 8f8196e47de93183eb9728fec0d08725fc368ee0 | [
"BSD-3-Clause"
] | null | null | null | src/jt/rubicon/java/_typemanager.py | karpierz/jtypes.rubicon | 8f8196e47de93183eb9728fec0d08725fc368ee0 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2016-2019, Adam Karpierz
# Licensed under the BSD license
# http://opensource.org/licenses/BSD-3-Clause
from ...jvm.lib.compat import *
from ...jvm.lib import annotate
from ...jvm.lib import public
from ._typehandler import * # noqa
| 28.272727 | 82 | 0.652733 |
180ba7fe8e58c4e3cae590b1f061d367ca5c9d22 | 63,592 | py | Python | rest/models.py | istarnes/restit | 24d2805ab68696cab7718cc1164b7f716582ffb7 | [
"0BSD"
] | null | null | null | rest/models.py | istarnes/restit | 24d2805ab68696cab7718cc1164b7f716582ffb7 | [
"0BSD"
] | null | null | null | rest/models.py | istarnes/restit | 24d2805ab68696cab7718cc1164b7f716582ffb7 | [
"0BSD"
] | null | null | null | import os
from django.conf import settings
from django.core.exceptions import FieldDoesNotExist
from hashids import Hashids
import hashlib
import string
from datetime import datetime, date, timedelta
from decimal import Decimal
TWOPLACES = Decimal(10) ** -2
from django.db import models
from django.apps import apps
get_model = apps.get_model
from django.http import Http404
from django.core.exceptions import ValidationError
import threading
from rest import helpers as rest_helpers
from rest.uberdict import UberDict
from rest import search
from rest.privpub import PrivatePublicEncryption
import importlib
GRAPH_HELPERS = UberDict()
GRAPH_HELPERS.restGet = None
GRAPH_HELPERS.get_request = None
GRAPH_HELPERS.views = None
ENCRYPTER_KEY_FILE = os.path.join(settings.ROOT, "config", "encrypt_key.pem")
ENCRYPTER = None
if os.path.exists(ENCRYPTER_KEY_FILE):
ENCRYPTER = PrivatePublicEncryption(private_key_file=ENCRYPTER_KEY_FILE)
def requestHasPerms(request, perms, group=None):
if not request.user.is_authenticated:
return False, "auth required", 401
if not hasattr(request, 'member'):
request.member, request.group = request.user.__class__.getMemberGroup(request, False, False)
if request.member.hasPerm(perms):
return True, None, None
if group is None and hasattr(request, "group"):
group = request.group
if group and request.member.hasGroupPerm(group, perms):
return True, None, None
return False, "permission denied", 402
def toGraph(self, request=None, graph="basic"):
RestModel._setupGraphHelpers()
if not request:
request = GRAPH_HELPERS.get_request()
return GRAPH_HELPERS.restGet(request, self, return_httpresponse=False, **self.getGraph(graph))
def restGetGenericModel(self, field):
# called by the rest module to magically parse
# a component that is marked genericrelation in a graph
if not hasattr(self, field):
rest_helpers.log_print("model has no field: {0}".format(field))
return None
name = getattr(self, field)
if not name or "." not in name:
return None
a_name, m_name = name.split(".")
model = RestModel.getModel(a_name, m_name)
if not model:
rest_helpers.log_print("GENERIC MODEL DOES NOT EXIST: {0}".format(name))
return model
def restGetGenericRelation(self, field):
# called by the rest module to magically parse
# a component that is marked genericrelation in a graph
GenericModel = self.restGetGenericModel(field)
if not GenericModel:
return None
key = getattr(self, "{0}_id".format(field))
return GenericModel.rw_objects().filter(pk=key).first()
def saveFields(self, allow_null=True, **kwargs):
"""
Helper method to save a list of fields
"""
self._changed__ = UberDict()
for key, value in list(kwargs.items()):
if value is None and not allow_null:
continue
self.restSaveField(key, value)
if len(self._changed__):
self.save()
def saveMediaFile(self, file, name, file_name=None, is_base64=False, group=None):
"""
Generic method to save a media file
"""
if file_name is None:
file_name = name
MediaItem = RestModel.getModel("medialib", "MediaItem")
# make sure we set the name base64_data
if is_base64:
mi = MediaItem(name=file_name, base64_data=file, group=group)
elif type(file) in [str, str] and (file.startswith("https:") or file.startswith("http:")):
mi = MediaItem(name=name, downloadurl=file, group=group)
else:
mi = MediaItem(name=name, newfile=file, group=group)
mi.save()
setattr(self, name, mi)
self.save()
return mi
def restStatus(self, request, status, **kwargs):
RestModel._setupGraphHelpers()
return GRAPH_HELPERS.restStatus(request, status, **kwargs)
def restGet(self, request, graph=None, as_dict=False):
RestModel._setupGraphHelpers()
if not request:
request = self.getActiveRequest()
if not graph and request:
graph = request.DATA.get("graph", "default")
elif not graph:
graph = "default"
return_response = not as_dict
return GRAPH_HELPERS.restGet(request, self, return_httpresponse=return_response, **self.getGraph(graph))
def toDict(self, graph=None):
RestModel._setupGraphHelpers()
return self.restGet(None, graph=graph, as_dict=True)
| 40.146465 | 152 | 0.544754 |
180d1820c70ce1e075a46251cae4f2ab29f2929f | 803 | py | Python | examples/rp_analytics.py | eirrgang/radical.pilot | ceccd1867dd172935d602ff4c33a5ed4467e0dc8 | [
"MIT"
] | 47 | 2015-03-16T01:08:11.000Z | 2022-02-02T10:36:39.000Z | examples/rp_analytics.py | eirrgang/radical.pilot | ceccd1867dd172935d602ff4c33a5ed4467e0dc8 | [
"MIT"
] | 1,856 | 2015-01-02T09:32:20.000Z | 2022-03-31T21:45:06.000Z | examples/rp_analytics.py | eirrgang/radical.pilot | ceccd1867dd172935d602ff4c33a5ed4467e0dc8 | [
"MIT"
] | 28 | 2015-06-10T18:15:14.000Z | 2021-11-07T04:36:45.000Z | #!/usr/bin/env python3
__copyright__ = 'Copyright 2013-2016, http://radical.rutgers.edu'
__license__ = 'MIT'
import sys
import radical.utils as ru
import radical.pilot as rp
rpu = rp.utils
# ------------------------------------------------------------------------------
#
if __name__ == '__main__':
if len(sys.argv) <= 1:
print("\n\tusage: %s <session_id>\n")
sys.exit(1)
sid = sys.argv[1]
profiles = rpu.fetch_profiles(sid=sid, skip_existing=True)
for p in profiles:
print(p)
profs = ru.read_profiles(profiles)
for p in profs:
print(type(p))
prof = ru.combine_profiles(profs)
print(len(prof))
for entry in prof:
print(entry)
# ------------------------------------------------------------------------------
| 18.25 | 80 | 0.495641 |
180d3a3f60ca987d84a73cb66042ea85d5cffea9 | 758 | py | Python | tests/contrib/django/testapp/middleware.py | mvas/apm-agent-python | f4582e90eb5308b915ca51e2e98620fc22af09ec | [
"BSD-3-Clause"
] | null | null | null | tests/contrib/django/testapp/middleware.py | mvas/apm-agent-python | f4582e90eb5308b915ca51e2e98620fc22af09ec | [
"BSD-3-Clause"
] | null | null | null | tests/contrib/django/testapp/middleware.py | mvas/apm-agent-python | f4582e90eb5308b915ca51e2e98620fc22af09ec | [
"BSD-3-Clause"
] | null | null | null | try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
# no-op class for Django < 1.10
| 27.071429 | 56 | 0.740106 |
180dd0f316d9175e1decc0de1732de58c97bdcf4 | 3,874 | py | Python | run.py | Yvonne-Ouma/Password-Locker | b16f8e9ee36d3cb70eefb58bf7be2de1bb1948fc | [
"MIT"
] | null | null | null | run.py | Yvonne-Ouma/Password-Locker | b16f8e9ee36d3cb70eefb58bf7be2de1bb1948fc | [
"MIT"
] | null | null | null | run.py | Yvonne-Ouma/Password-Locker | b16f8e9ee36d3cb70eefb58bf7be2de1bb1948fc | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.6
from user import User
from credential import Credential
def createUser(userName,password):
'''
Function to create a new user
'''
newUser = User(userName,password)
return newUser
def saveUsers(user):
'''
Function to save users
'''
user.saveUser()
def saveCredential(credential):
'''
Function to save a new credential
'''
Credential.saveCredential(credential)
def delCredential(credential):
'''
Function to delete a credential
'''
credential.deleteCredential()
def findCredential(name):
'''
Function that finds a credential by name returns the credential
'''
return Credential.find_by_name(name)
def check_existingCredentials(name):
'''
Function that checks if a credential exists with that name and return a boolean
'''
return Credential.credential_exist(name)
def displayCredentials():
'''
Function that returns all the saved credentials
'''
return Credential.displayCredentials()
if __name__ == '__main__':
main()
| 27.28169 | 177 | 0.558596 |
180e054f46ac36903917c85a5ca1fbddc3d6ad0b | 844 | py | Python | soundrts/constants.py | ctoth/soundrts | 1a1271182d53c16d3e29f5dc8f8e987415a9467b | [
"BSD-3-Clause"
] | null | null | null | soundrts/constants.py | ctoth/soundrts | 1a1271182d53c16d3e29f5dc8f8e987415a9467b | [
"BSD-3-Clause"
] | null | null | null | soundrts/constants.py | ctoth/soundrts | 1a1271182d53c16d3e29f5dc8f8e987415a9467b | [
"BSD-3-Clause"
] | null | null | null | # constants used in more than one module
# Some of them might find a better home later.
from lib.nofloat import PRECISION
MAIN_METASERVER_URL = open("cfg/metaserver.txt").read().strip()
# old value used by some features (stats, ...)
METASERVER_URL = "http://jlpo.free.fr/soundrts/metaserver/"
# simulation
VIRTUAL_TIME_INTERVAL = 300 # milliseconds
COLLISION_RADIUS = 175 # millimeters # 350 / 2
USE_RANGE_MARGIN = 175 # millimeters
ORDERS_QUEUE_LIMIT = 10
MAX_NB_OF_RESOURCE_TYPES = 10
DEFAULT_MINIMAL_DAMAGE = int(.17 * PRECISION)
# used for packing the orders
NEWLINE_REPLACEMENT = ";"
SPACE_REPLACEMENT = ","
# minimal interval (in seconds) between 2 sounds
ALERT_LIMIT = .5
FOOTSTEP_LIMIT = .1
# don't play events after this limit (in seconds)
EVENT_LIMIT = 3
# use the profiler (warning: will slow down the game)
PROFILE = False
| 25.575758 | 63 | 0.755924 |
180efba78897c0fa073f01ffc1050d72acb958e1 | 9,104 | py | Python | Modules/Attention/Steps.py | ishine/GST_Tacotron | 0c3d8e51042dc5d49abc842b59a13ea70f927f9d | [
"MIT"
] | 21 | 2020-02-23T03:35:27.000Z | 2021-11-01T11:08:18.000Z | Modules/Attention/Steps.py | ishine/GST_Tacotron | 0c3d8e51042dc5d49abc842b59a13ea70f927f9d | [
"MIT"
] | 6 | 2020-03-14T15:43:38.000Z | 2021-07-06T09:06:57.000Z | Modules/Attention/Steps.py | ishine/GST_Tacotron | 0c3d8e51042dc5d49abc842b59a13ea70f927f9d | [
"MIT"
] | 7 | 2020-03-07T11:33:09.000Z | 2021-11-28T16:19:01.000Z | import tensorflow as tf
import numpy as np
'''
TF 2.0's basic attention layers(Attention and AdditiveAttention) calculate parallelly.
TO USE MONOTONIC FUNCTION, ATTENTION MUST KNOW 'n-1 ALIGNMENT'.
Thus, this parallel versions do not support the monotonic function.
''' | 39.755459 | 161 | 0.611819 |
180f8229eeb538cba11111f51d0cfaabcfe979dc | 14,002 | py | Python | test.py | gmberton/deep-visual-geo-localization-benchmark | 7ac395411b7eeff99da66675dedc5372839e5632 | [
"MIT"
] | 1 | 2022-03-25T06:48:16.000Z | 2022-03-25T06:48:16.000Z | test.py | gmberton/deep-visual-geo-localization-benchmark | 7ac395411b7eeff99da66675dedc5372839e5632 | [
"MIT"
] | null | null | null | test.py | gmberton/deep-visual-geo-localization-benchmark | 7ac395411b7eeff99da66675dedc5372839e5632 | [
"MIT"
] | null | null | null |
import faiss
import torch
import logging
import numpy as np
from tqdm import tqdm
from torch.utils.data import DataLoader
from torch.utils.data.dataset import Subset
def test_efficient_ram_usage(args, eval_ds, model, test_method="hard_resize"):
"""This function gives the same output as test(), but uses much less RAM.
This can be useful when testing with large descriptors (e.g. NetVLAD) on large datasets (e.g. San Francisco).
Obviously it is slower than test(), and can't be used with PCA.
"""
model = model.eval()
if test_method == 'nearest_crop' or test_method == "maj_voting":
distances = np.empty([eval_ds.queries_num * 5, eval_ds.database_num], dtype=np.float32)
else:
distances = np.empty([eval_ds.queries_num, eval_ds.database_num], dtype=np.float32)
with torch.no_grad():
if test_method == 'nearest_crop' or test_method == 'maj_voting':
queries_features = np.ones((eval_ds.queries_num * 5, args.features_dim), dtype="float32")
else:
queries_features = np.ones((eval_ds.queries_num, args.features_dim), dtype="float32")
logging.debug("Extracting queries features for evaluation/testing")
queries_infer_batch_size = 1 if test_method == "single_query" else args.infer_batch_size
eval_ds.test_method = test_method
queries_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num, eval_ds.database_num+eval_ds.queries_num)))
queries_dataloader = DataLoader(dataset=queries_subset_ds, num_workers=args.num_workers,
batch_size=queries_infer_batch_size, pin_memory=(args.device=="cuda"))
for inputs, indices in tqdm(queries_dataloader, ncols=100):
if test_method == "five_crops" or test_method == "nearest_crop" or test_method == 'maj_voting':
inputs = torch.cat(tuple(inputs)) # shape = 5*bs x 3 x 480 x 480
features = model(inputs.to(args.device))
if test_method == "five_crops": # Compute mean along the 5 crops
features = torch.stack(torch.split(features, 5)).mean(1)
if test_method == "nearest_crop" or test_method == 'maj_voting':
start_idx = (indices[0] - eval_ds.database_num) * 5
end_idx = start_idx + indices.shape[0] * 5
indices = np.arange(start_idx, end_idx)
queries_features[indices, :] = features.cpu().numpy()
else:
queries_features[indices.numpy()-eval_ds.database_num, :] = features.cpu().numpy()
queries_features = torch.tensor(queries_features).type(torch.float32).cuda()
logging.debug("Extracting database features for evaluation/testing")
# For database use "hard_resize", although it usually has no effect because database images have same resolution
eval_ds.test_method = "hard_resize"
database_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num)))
database_dataloader = DataLoader(dataset=database_subset_ds, num_workers=args.num_workers,
batch_size=args.infer_batch_size, pin_memory=(args.device=="cuda"))
for inputs, indices in tqdm(database_dataloader, ncols=100):
inputs = inputs.to(args.device)
features = model(inputs)
for pn, (index, pred_feature) in enumerate(zip(indices, features)):
distances[:, index] = ((queries_features-pred_feature)**2).sum(1).cpu().numpy()
del features, queries_features, pred_feature
predictions = distances.argsort(axis=1)[:, :max(args.recall_values)]
if test_method == 'nearest_crop':
distances = np.array([distances[row, index] for row, index in enumerate(predictions)])
distances = np.reshape(distances, (eval_ds.queries_num, 20 * 5))
predictions = np.reshape(predictions, (eval_ds.queries_num, 20 * 5))
for q in range(eval_ds.queries_num):
# sort predictions by distance
sort_idx = np.argsort(distances[q])
predictions[q] = predictions[q, sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(predictions[q], return_index=True)
# unique_idx is sorted based on the unique values, sort it again
predictions[q, :20] = predictions[q, np.sort(unique_idx)][:20]
predictions = predictions[:, :20] # keep only the closer 20 predictions for each
elif test_method == 'maj_voting':
distances = np.array([distances[row, index] for row, index in enumerate(predictions)])
distances = np.reshape(distances, (eval_ds.queries_num, 5, 20))
predictions = np.reshape(predictions, (eval_ds.queries_num, 5, 20))
for q in range(eval_ds.queries_num):
# votings, modify distances in-place
top_n_voting('top1', predictions[q], distances[q], args.majority_weight)
top_n_voting('top5', predictions[q], distances[q], args.majority_weight)
top_n_voting('top10', predictions[q], distances[q], args.majority_weight)
# flatten dist and preds from 5, 20 -> 20*5
# and then proceed as usual to keep only first 20
dists = distances[q].flatten()
preds = predictions[q].flatten()
# sort predictions by distance
sort_idx = np.argsort(dists)
preds = preds[sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(preds, return_index=True)
# unique_idx is sorted based on the unique values, sort it again
# here the row corresponding to the first crop is used as a
# 'buffer' for each query, and in the end the dimension
# relative to crops is eliminated
predictions[q, 0, :20] = preds[np.sort(unique_idx)][:20]
predictions = predictions[:, 0, :20] # keep only the closer 20 predictions for each query
del distances
#### For each query, check if the predictions are correct
positives_per_query = eval_ds.get_positives()
# args.recall_values by default is [1, 5, 10, 20]
recalls = np.zeros(len(args.recall_values))
for query_index, pred in enumerate(predictions):
for i, n in enumerate(args.recall_values):
if np.any(np.in1d(pred[:n], positives_per_query[query_index])):
recalls[i:] += 1
break
recalls = recalls / eval_ds.queries_num * 100
recalls_str = ", ".join([f"R@{val}: {rec:.1f}" for val, rec in zip(args.recall_values, recalls)])
return recalls, recalls_str
def test(args, eval_ds, model, test_method="hard_resize", pca=None):
"""Compute features of the given dataset and compute the recalls."""
assert test_method in ["hard_resize", "single_query", "central_crop", "five_crops",
"nearest_crop", "maj_voting"], f"test_method can't be {test_method}"
if args.efficient_ram_testing:
return test_efficient_ram_usage(args, eval_ds, model, test_method)
model = model.eval()
with torch.no_grad():
logging.debug("Extracting database features for evaluation/testing")
# For database use "hard_resize", although it usually has no effect because database images have same resolution
eval_ds.test_method = "hard_resize"
database_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num)))
database_dataloader = DataLoader(dataset=database_subset_ds, num_workers=args.num_workers,
batch_size=args.infer_batch_size, pin_memory=(args.device=="cuda"))
if test_method == "nearest_crop" or test_method == 'maj_voting':
all_features = np.empty((5 * eval_ds.queries_num + eval_ds.database_num, args.features_dim), dtype="float32")
else:
all_features = np.empty((len(eval_ds), args.features_dim), dtype="float32")
for inputs, indices in tqdm(database_dataloader, ncols=100):
features = model(inputs.to(args.device))
features = features.cpu().numpy()
if pca != None:
features = pca.transform(features)
all_features[indices.numpy(), :] = features
logging.debug("Extracting queries features for evaluation/testing")
queries_infer_batch_size = 1 if test_method == "single_query" else args.infer_batch_size
eval_ds.test_method = test_method
queries_subset_ds = Subset(eval_ds, list(range(eval_ds.database_num, eval_ds.database_num+eval_ds.queries_num)))
queries_dataloader = DataLoader(dataset=queries_subset_ds, num_workers=args.num_workers,
batch_size=queries_infer_batch_size, pin_memory=(args.device=="cuda"))
for inputs, indices in tqdm(queries_dataloader, ncols=100):
if test_method == "five_crops" or test_method == "nearest_crop" or test_method == 'maj_voting':
inputs = torch.cat(tuple(inputs)) # shape = 5*bs x 3 x 480 x 480
features = model(inputs.to(args.device))
if test_method == "five_crops": # Compute mean along the 5 crops
features = torch.stack(torch.split(features, 5)).mean(1)
features = features.cpu().numpy()
if pca != None:
features = pca.transform(features)
if test_method == "nearest_crop" or test_method == 'maj_voting': # store the features of all 5 crops
start_idx = eval_ds.database_num + (indices[0] - eval_ds.database_num) * 5
end_idx = start_idx + indices.shape[0] * 5
indices = np.arange(start_idx, end_idx)
all_features[indices, :] = features
else:
all_features[indices.numpy(), :] = features
queries_features = all_features[eval_ds.database_num:]
database_features = all_features[:eval_ds.database_num]
faiss_index = faiss.IndexFlatL2(args.features_dim)
faiss_index.add(database_features)
del database_features, all_features
logging.debug("Calculating recalls")
distances, predictions = faiss_index.search(queries_features, max(args.recall_values))
if test_method == 'nearest_crop':
distances = np.reshape(distances, (eval_ds.queries_num, 20 * 5))
predictions = np.reshape(predictions, (eval_ds.queries_num, 20 * 5))
for q in range(eval_ds.queries_num):
# sort predictions by distance
sort_idx = np.argsort(distances[q])
predictions[q] = predictions[q, sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(predictions[q], return_index=True)
# unique_idx is sorted based on the unique values, sort it again
predictions[q, :20] = predictions[q, np.sort(unique_idx)][:20]
predictions = predictions[:, :20] # keep only the closer 20 predictions for each query
elif test_method == 'maj_voting':
distances = np.reshape(distances, (eval_ds.queries_num, 5, 20))
predictions = np.reshape(predictions, (eval_ds.queries_num, 5, 20))
for q in range(eval_ds.queries_num):
# votings, modify distances in-place
top_n_voting('top1', predictions[q], distances[q], args.majority_weight)
top_n_voting('top5', predictions[q], distances[q], args.majority_weight)
top_n_voting('top10', predictions[q], distances[q], args.majority_weight)
# flatten dist and preds from 5, 20 -> 20*5
# and then proceed as usual to keep only first 20
dists = distances[q].flatten()
preds = predictions[q].flatten()
# sort predictions by distance
sort_idx = np.argsort(dists)
preds = preds[sort_idx]
# remove duplicated predictions, i.e. keep only the closest ones
_, unique_idx = np.unique(preds, return_index=True)
# unique_idx is sorted based on the unique values, sort it again
# here the row corresponding to the first crop is used as a
# 'buffer' for each query, and in the end the dimension
# relative to crops is eliminated
predictions[q, 0, :20] = preds[np.sort(unique_idx)][:20]
predictions = predictions[:, 0, :20] # keep only the closer 20 predictions for each query
#### For each query, check if the predictions are correct
positives_per_query = eval_ds.get_positives()
# args.recall_values by default is [1, 5, 10, 20]
recalls = np.zeros(len(args.recall_values))
for query_index, pred in enumerate(predictions):
for i, n in enumerate(args.recall_values):
if np.any(np.in1d(pred[:n], positives_per_query[query_index])):
recalls[i:] += 1
break
# Divide by the number of queries*100, so the recalls are in percentages
recalls = recalls / eval_ds.queries_num * 100
recalls_str = ", ".join([f"R@{val}: {rec:.1f}" for val, rec in zip(args.recall_values, recalls)])
return recalls, recalls_str
| 54.909804 | 121 | 0.644337 |
18107664baf7404f0465f06470c192a8803624ac | 355 | py | Python | insertionsort.py | emcd123/Matroids | f1ab7a5164a60b753ba429ef7ba9ce36517d4439 | [
"MIT"
] | null | null | null | insertionsort.py | emcd123/Matroids | f1ab7a5164a60b753ba429ef7ba9ce36517d4439 | [
"MIT"
] | null | null | null | insertionsort.py | emcd123/Matroids | f1ab7a5164a60b753ba429ef7ba9ce36517d4439 | [
"MIT"
] | 1 | 2021-11-21T18:03:07.000Z | 2021-11-21T18:03:07.000Z | import random
li=[]
for i in range(10):#creating a random list using code from blackboard
li=li+[random.randrange(0,50)]
print(li)
print(insertionSort(li))
#print(li)
| 18.684211 | 69 | 0.630986 |
1810948fff7ddb4956a7253f2de040223223f990 | 1,442 | py | Python | python-packages/hyperopt-0.0.2/hyperopt/tests/test_fmin.py | rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | ee45bee6f96cdb6d91184abc16f41bba1546c943 | [
"BSD-3-Clause"
] | 2 | 2017-08-13T14:09:32.000Z | 2018-07-16T23:39:00.000Z | python-packages/hyperopt-0.0.2/hyperopt/tests/test_fmin.py | rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | ee45bee6f96cdb6d91184abc16f41bba1546c943 | [
"BSD-3-Clause"
] | null | null | null | python-packages/hyperopt-0.0.2/hyperopt/tests/test_fmin.py | rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | ee45bee6f96cdb6d91184abc16f41bba1546c943 | [
"BSD-3-Clause"
] | 2 | 2018-04-02T06:45:11.000Z | 2018-07-16T23:39:02.000Z | import nose.tools
from hyperopt import fmin, rand, tpe, hp, Trials, exceptions, space_eval
| 23.258065 | 72 | 0.489598 |
1810ed3f25b77f5724cfa46b09080dd25d3ba89c | 737 | py | Python | aaweb/__init__.py | cpelite/astorian-airways | 55498f308de7a4b8159519e191b492675ec5612a | [
"CC0-1.0"
] | null | null | null | aaweb/__init__.py | cpelite/astorian-airways | 55498f308de7a4b8159519e191b492675ec5612a | [
"CC0-1.0"
] | null | null | null | aaweb/__init__.py | cpelite/astorian-airways | 55498f308de7a4b8159519e191b492675ec5612a | [
"CC0-1.0"
] | 3 | 2020-04-14T20:46:50.000Z | 2021-03-11T19:07:20.000Z | # -*- coding: utf-8 -*-
import os
from datetime import timedelta
from flask import Flask, session
default_timezone = 'Europe/Berlin'
app = Flask(__name__, static_folder='../static', static_url_path='/static', template_folder="../templates/")
app.permanent_session_lifetime = timedelta(minutes=60)
app.config.update(
SESSION_COOKIE_NAME = "AAsession",
ERROR_LOG_FILE = "%s/app.log" % os.environ.get('OPENSHIFT_LOG_DIR', 'logs')
)
# VIEWS
import aaweb.views
import aaweb.forms
# API
import aaweb.api
# additional functionalities
import aaweb.error
import aaweb.log
| 20.472222 | 108 | 0.738128 |
1812cc808e8b51d1262a39abd3b6e4c2337c6ac5 | 1,528 | py | Python | Examples/Segmentation/WatershedSegmentation1.py | nalinimsingh/ITK_4D | 95a2eacaeaffe572889832ef0894239f89e3f303 | [
"Apache-2.0"
] | 3 | 2018-10-01T20:46:17.000Z | 2019-12-17T19:39:50.000Z | Examples/Segmentation/WatershedSegmentation1.py | nalinimsingh/ITK_4D | 95a2eacaeaffe572889832ef0894239f89e3f303 | [
"Apache-2.0"
] | null | null | null | Examples/Segmentation/WatershedSegmentation1.py | nalinimsingh/ITK_4D | 95a2eacaeaffe572889832ef0894239f89e3f303 | [
"Apache-2.0"
] | 4 | 2018-05-17T16:34:54.000Z | 2020-09-24T02:12:40.000Z | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
import InsightToolkit as itk
import sys
reader = itk.itkImageFileReaderF2_New()
reader.SetFileName( sys.argv[1] )
diffusion = itk.itkGradientAnisotropicDiffusionImageFilterF2F2_New()
diffusion.SetInput(reader.GetOutput())
diffusion.SetTimeStep(0.0625)
diffusion.SetConductanceParameter(9.0)
diffusion.SetNumberOfIterations( 5 );
gradient = itk.itkGradientMagnitudeImageFilterF2F2_New()
gradient.SetInput(diffusion.GetOutput())
watershed = itk.itkWatershedImageFilterF2_New()
watershed.SetInput(gradient.GetOutput())
watershed.SetThreshold(0.01)
watershed.SetLevel(0.2)
writer = itk.itkImageFileWriterUL2_New()
writer.SetFileName( sys.argv[2] )
writer.SetInput( watershed.GetOutput() )
writer.Update()
| 33.217391 | 78 | 0.676702 |
1815ed2b6c358f6414fe0404d22b0c279e749b59 | 1,520 | py | Python | study_roadmaps/python_sample_examples/gluon/3_aux_functions/train.py | Shreyashwaghe/monk_v1 | 4ee4d9483e8ffac9b73a41f3c378e5abf5fc799b | [
"Apache-2.0"
] | 7 | 2020-07-26T08:37:29.000Z | 2020-10-30T10:23:11.000Z | study_roadmaps/python_sample_examples/gluon/3_aux_functions/train.py | mursalfk/monk_v1 | 62f34a52f242772186ffff7e56764e958fbcd920 | [
"Apache-2.0"
] | 9 | 2020-01-28T21:40:39.000Z | 2022-02-10T01:24:06.000Z | study_roadmaps/python_sample_examples/gluon/3_aux_functions/train.py | mursalfk/monk_v1 | 62f34a52f242772186ffff7e56764e958fbcd920 | [
"Apache-2.0"
] | 1 | 2020-10-07T12:57:44.000Z | 2020-10-07T12:57:44.000Z | import os
import sys
sys.path.append("../../../monk/");
import psutil
from gluon_prototype import prototype
gtf = prototype(verbose=1);
gtf.Prototype("sample-project-1", "sample-experiment-1");
gtf.Default(dataset_path="../../../monk/system_check_tests/datasets/dataset_cats_dogs_train",
model_name="resnet18_v1", freeze_base_network=True, num_epochs=2);
######################################################## Summary #####################################################
gtf.Summary()
###########################################################################################################################
##################################################### EDA - Find Num images per class #####################################
gtf.EDA(show_img=True, save_img=True);
###########################################################################################################################
##################################################### EDA - Find Missing and corrupted images #####################################
gtf.EDA(check_missing=True, check_corrupt=True);
###########################################################################################################################
##################################################### Estimate Training Time #####################################
gtf.Estimate_Train_Time(num_epochs=50);
########################################################################################################################### | 33.043478 | 131 | 0.309211 |
18173f17dd015c09e3b1cfc44c736b20bfea7170 | 126 | py | Python | ppa-mirror/config.py | elprup/ppa-mirror | 29e8a5027bbb698fcb36a250484b08ea945f65cf | [
"MIT"
] | null | null | null | ppa-mirror/config.py | elprup/ppa-mirror | 29e8a5027bbb698fcb36a250484b08ea945f65cf | [
"MIT"
] | null | null | null | ppa-mirror/config.py | elprup/ppa-mirror | 29e8a5027bbb698fcb36a250484b08ea945f65cf | [
"MIT"
] | 1 | 2021-03-04T13:43:34.000Z | 2021-03-04T13:43:34.000Z | cache_root = '/home/ubuntu/ppa-mirror/cache/'
mirror_root = '/home/ubuntu/ppa-mirror/repo'
http_proxy = "188.112.194.222:8080" | 42 | 45 | 0.746032 |
181aa4e686c7e2eb75b68979882bfaab2af06de9 | 3,031 | py | Python | downloader.py | tuxetuxe/downloader | 76a1ac01189a6946b15ac6f58661931551dfc0ef | [
"Apache-2.0"
] | 3 | 2016-11-09T13:02:46.000Z | 2020-06-04T10:38:11.000Z | downloader.py | tuxetuxe/downloader | 76a1ac01189a6946b15ac6f58661931551dfc0ef | [
"Apache-2.0"
] | null | null | null | downloader.py | tuxetuxe/downloader | 76a1ac01189a6946b15ac6f58661931551dfc0ef | [
"Apache-2.0"
] | null | null | null | import sys, getopt
import sched
import time
import csv
from pprint import pprint
import urllib, urllib2
from random import randint
import threading
proxies_file = ""
targets_file = ""
proxies = []
targets = []
scheduler = sched.scheduler(time.time, time.sleep)
if __name__ == "__main__":
main(sys.argv[1:]) | 25.470588 | 88 | 0.626856 |
181b018a34f9e83a9ca0468d516a71155390ba8b | 1,799 | py | Python | backend/api/views/utils.py | pm5/Disfactory | 2cceec2544b1bd5bb624882be626494d54a08119 | [
"MIT"
] | null | null | null | backend/api/views/utils.py | pm5/Disfactory | 2cceec2544b1bd5bb624882be626494d54a08119 | [
"MIT"
] | null | null | null | backend/api/views/utils.py | pm5/Disfactory | 2cceec2544b1bd5bb624882be626494d54a08119 | [
"MIT"
] | null | null | null | import random
from django.conf import settings
from django.db.models import Prefetch
from django.db.models.functions.math import Radians, Cos, ACos, Sin
from ..models import Factory, ReportRecord, Image, Document
def _get_nearby_factories(latitude, longitude, radius):
"""Return nearby factories based on position and search range."""
# ref: https://stackoverflow.com/questions/574691/mysql-great-circle-distance-haversine-formula
distance = 6371 * ACos(
Cos(Radians(latitude)) * Cos(Radians("lat")) * Cos(Radians("lng") - Radians(longitude))
+ Sin(Radians(latitude)) * Sin(Radians("lat"))
)
radius_km = radius
ids = Factory.objects.annotate(distance=distance).only("id").filter(distance__lt=radius_km).order_by("id")
if len(ids) > settings.MAX_FACTORY_PER_GET:
ids = _sample(ids, settings.MAX_FACTORY_PER_GET)
return (
Factory.objects.filter(id__in=[obj.id for obj in ids])
.prefetch_related(Prefetch('report_records', queryset=ReportRecord.objects.only("created_at").all()))
.prefetch_related(Prefetch('images', queryset=Image.objects.only("id").all()))
.prefetch_related(Prefetch('documents', queryset=Document.objects.only('created_at', 'display_status').all()))
.all()
)
| 36.714286 | 125 | 0.692051 |
181cfdf188f95cef8715790def585eab0fdb4f44 | 886 | py | Python | tests/test_pyros_schemas/test_decorators.py | pyros-dev/pyros-schemas | a460920260ee77a1b5b6d5c0b97df52f1572ff79 | [
"MIT"
] | 3 | 2018-01-01T17:10:16.000Z | 2018-11-15T15:41:46.000Z | tests/test_pyros_schemas/test_decorators.py | pyros-dev/pyros-schemas | a460920260ee77a1b5b6d5c0b97df52f1572ff79 | [
"MIT"
] | 7 | 2018-02-02T10:05:55.000Z | 2018-02-17T15:15:46.000Z | tests/test_pyros_schemas/test_decorators.py | pyros-dev/pyros-schemas | a460920260ee77a1b5b6d5c0b97df52f1572ff79 | [
"MIT"
] | 2 | 2017-09-27T09:46:31.000Z | 2018-02-02T09:37:13.000Z | from __future__ import absolute_import
from __future__ import print_function
import pytest
import std_srvs.srv as std_srvs
# public decorators
from pyros_schemas.ros import with_service_schemas
#
# Testing with_service_schemas decorator
#
# Just in case we run this directly
if __name__ == '__main__':
pytest.main([
'test_decorators.py::test_decorated_service'
])
| 22.717949 | 70 | 0.72912 |
181dd4525734f8cc34fa28f835971bb355463f95 | 516 | py | Python | src/removeElement.py | ianxin/algorithm | 22214b6c81bee926f5a1c74c9417b2e7edd3ceed | [
"MIT"
] | 2 | 2018-03-13T08:59:14.000Z | 2018-03-13T08:59:25.000Z | src/removeElement.py | ianxin/Algorithm | 22214b6c81bee926f5a1c74c9417b2e7edd3ceed | [
"MIT"
] | null | null | null | src/removeElement.py | ianxin/Algorithm | 22214b6c81bee926f5a1c74c9417b2e7edd3ceed | [
"MIT"
] | null | null | null | """
@param: A: A list of integers
@param: elem: An integer
@return: The new length after remove
"""
#list
#list list
| 23.454545 | 40 | 0.484496 |
181e8052c8ceced20aed0b9306fa76476c4461fb | 2,057 | py | Python | setup.py | codespider/flagon | d94a50844025ea88fd67dc7651c4a860c3be6d1a | [
"MIT"
] | 3 | 2018-08-29T19:01:10.000Z | 2018-09-14T16:07:30.000Z | setup.py | codespider/flagon | d94a50844025ea88fd67dc7651c4a860c3be6d1a | [
"MIT"
] | 8 | 2018-08-24T08:56:09.000Z | 2018-09-15T11:13:27.000Z | setup.py | codespider/flagon | d94a50844025ea88fd67dc7651c4a860c3be6d1a | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
import io
from collections import OrderedDict
with io.open('README.rst', 'rt', encoding='utf8') as f:
readme = f.read()
setup(
name='Flask-Wired',
version=get_version(),
license='MIT',
author='Karthikkannan Maruthamuthu',
author_email='karthikkannan@gmail.com',
maintainer='Karthikkannan Maruthamuthu',
maintainer_email='karthikkannan@gmail.com',
description='Package for Flask wiring.',
long_description=readme,
url='https://github.com/treebohotels/Flask-Wired',
project_urls=OrderedDict((
('Documentation', 'https://github.com/treebohotels/Flask-Wired'),
('Code', 'https://github.com/treebohotels/Flask-Wired'),
('Issue tracker', 'https://github.com/treebohotels/Flask-Wired/issues'),
)),
package_dir={'': '.'},
packages=find_packages(".", exclude=['tests', 'sample_app']),
include_package_data=True,
zip_safe=False,
platforms='any',
python_requires='>=3.6',
install_requires=[
'Flask==1.0.2',
'Flask-Script==2.0.6',
'Flask-Migrate==2.2.1',
'flask-marshmallow==0.9.0',
'Flask-SQLAlchemy==2.3.2',
'marshmallow-sqlalchemy==0.14.1',
'psycopg2==2.7.5',
],
entry_points={
},
test_suite="tests",
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Flask',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| 31.646154 | 80 | 0.616918 |
181ed57e3eb39153ad141aa8f03aeb15ee7f7127 | 510 | py | Python | idManager/view/authentication_view.py | lgarciasbr/idm-api | 3517d29d55eb2a06fb5b4b21359b6cf6d11529a0 | [
"Apache-2.0"
] | 2 | 2018-01-14T22:43:43.000Z | 2018-01-14T22:43:48.000Z | idManager/view/authentication_view.py | lgarciasbr/idm-api | 3517d29d55eb2a06fb5b4b21359b6cf6d11529a0 | [
"Apache-2.0"
] | null | null | null | idManager/view/authentication_view.py | lgarciasbr/idm-api | 3517d29d55eb2a06fb5b4b21359b6cf6d11529a0 | [
"Apache-2.0"
] | null | null | null | from flask import jsonify
| 25.5 | 90 | 0.721569 |
181efed1a7997edb4c8e051cadb0058f5afd1105 | 604 | py | Python | setup.py | TheSriram/deuce | 9e8a7a342275aa02d0a59953b5a8c96ffb760b51 | [
"Apache-2.0"
] | null | null | null | setup.py | TheSriram/deuce | 9e8a7a342275aa02d0a59953b5a8c96ffb760b51 | [
"Apache-2.0"
] | null | null | null | setup.py | TheSriram/deuce | 9e8a7a342275aa02d0a59953b5a8c96ffb760b51 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
REQUIRES = ['six', 'pecan', 'setuptools >= 1.1.6',
'cassandra-driver', 'pymongo']
setup(
name='deuce',
version='0.1',
description='Deuce - Block-level de-duplication as-a-service',
author='Rackspace',
author_email='',
install_requires=REQUIRES,
test_suite='deuce',
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['tests'])
)
| 25.166667 | 66 | 0.680464 |
1820ae4e6fd68c69f37f4266bffb6793e643a89a | 6,580 | py | Python | script.py | rahulkmr1/heroku-python-script | 053be38dc8c6c6ab9929ca5af772d19c57f5e498 | [
"MIT"
] | null | null | null | script.py | rahulkmr1/heroku-python-script | 053be38dc8c6c6ab9929ca5af772d19c57f5e498 | [
"MIT"
] | null | null | null | script.py | rahulkmr1/heroku-python-script | 053be38dc8c6c6ab9929ca5af772d19c57f5e498 | [
"MIT"
] | null | null | null | import telepot
import time
import requests
from bs4 import BeautifulSoup as bs
import cPickle
import csv
RAHUL_ID = 931906767
# You can leave this bit out if you're using a paid PythonAnywhere account
# proxy_url = "http://proxy.server:3128"
# telepot.api._pools = {
# 'default': urllib3.ProxyManager(proxy_url=proxy_url, num_pools=3, maxsize=10, retries=False, timeout=30),
# }
# telepot.api._onetime_pool_spec = (urllib3.ProxyManager, dict(proxy_url=proxy_url, num_pools=1, maxsize=1, retries=False, timeout=30))
# end of the stuff that's only needed for free accounts
########################
login_url = 'https://www.placement.iitbhu.ac.in/accounts/login/'
client = requests.session()
login = client.get(login_url)
login = bs(login.content, "html.parser")
payload = {
"login": "rahul.kumar.cse15@itbhu.ac.in",
"password": "rahulkmr",
"csrfmiddlewaretoken": login.input['value']
}
result = client.post(
login_url,
data = payload,
headers = dict(referer=login_url)
)
forum = client.get("https://www.placement.iitbhu.ac.in/forum/c/notice-board/2019-20/")
soup = bs(forum.content, "html.parser")
#load last message delivred to users
try:
with open("posts", "rb") as f:
posts = cPickle.load(f);
except Exception as e:
print e
posts = soup.findAll("td", "topic-name")
for i in range(len(posts)):
posts[i] = posts[i].a
posts.pop(0)
posts.pop(0)
updated = soup.findAll('td','topic-last-post')
# updated.pop()
# updated.pop(0)
#########################
bot = telepot.Bot('940251504:AAG19YYQYtkiEOCrW0fZETvmYQSskElARcc')
# chat_ids = {RAHUL_ID}
with open("IDs", "rb") as f:
chat_ids = cPickle.load(f)
print '#################No of IDs loaded: ', len(chat_ids)
####### Commands ########
#########################
command = {'/add':add_cmd, '/remove':remove_cmd, '/all':allPosts, '/recent':top}
bot.message_loop(handle)
print ('Listening ...')
# for chat_id in chat_ids:
# bot.sendMessage(chat_id, text='Server started', parse_mode="HTML")
bot.sendMessage(RAHUL_ID, text='Server started', parse_mode="HTML")
# Keep the program running.
if __name__ == '__main__':
main()
| 25.019011 | 173 | 0.655623 |
1824cd98e77d7661e6eb7f082d5655ec1a45fa19 | 1,607 | py | Python | examples/4-tensorflow-mnist/tensorflow_mnist/train.py | awcchungster/baklava | ad301afd7aa163ccf662efe08d00eeff68cdb667 | [
"Apache-2.0"
] | 3 | 2021-08-24T03:10:14.000Z | 2022-01-07T20:53:37.000Z | examples/4-tensorflow-mnist/tensorflow_mnist/train.py | awcchungster/baklava | ad301afd7aa163ccf662efe08d00eeff68cdb667 | [
"Apache-2.0"
] | 5 | 2021-07-15T20:19:26.000Z | 2021-08-18T23:26:46.000Z | examples/4-tensorflow-mnist/tensorflow_mnist/train.py | LaudateCorpus1/baklava | 0e029097983db6cea00a7d779b887b149975fbc4 | [
"Apache-2.0"
] | 5 | 2021-07-03T17:46:15.000Z | 2022-02-24T08:05:39.000Z | """
Train
=====
Defines functions which train models and write model artifacts to disk.
"""
from __future__ import print_function
import os
import tempfile
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow_mnist import model, paths
def train(path):
"""
Train a decision tree classifier using a floating point feature matrix and
a categorical classification target.
Arguments:
path (str): The path indicating where to save the final model artifacts
"""
# Construct the model graph
graph, x, y, step, initializer, accuracy, prediction = model.build()
# Start a training session
with tf.Session(graph=graph) as sess:
# Initialize the graph
sess.run(initializer)
# Train the model for 1000 steps
mnist = input_data.read_data_sets(tempfile.mkdtemp(), one_hot=True)
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(step, feed_dict={x: batch_xs, y: batch_ys})
# Display accuracy measurement
print(sess.run(accuracy, feed_dict={x: mnist.test.images,
y: mnist.test.labels}))
# Save the variable data to disk
os.makedirs(path)
saver = tf.train.Saver()
saver.save(sess, path)
print('Success!')
def main():
"""
Load features and labels, train the neural network, and serialize model
artifact.
Note: This is the training entrypoint used by baklava!
"""
path = paths.model('mnist')
train(path)
| 26.344262 | 79 | 0.653391 |
18257b1e23725fb3440c7a7dd07da911552a0f1a | 16,942 | py | Python | google/cloud/binaryauthorization/v1/binaryauthorization-v1-py/google/cloud/binaryauthorization_v1/types/resources.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/cloud/binaryauthorization/v1/binaryauthorization-v1-py/google/cloud/binaryauthorization_v1/types/resources.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/cloud/binaryauthorization/v1/binaryauthorization-v1-py/google/cloud/binaryauthorization_v1/types/resources.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.cloud.binaryauthorization.v1',
manifest={
'Policy',
'AdmissionWhitelistPattern',
'AdmissionRule',
'Attestor',
'UserOwnedGrafeasNote',
'PkixPublicKey',
'AttestorPublicKey',
},
)
__all__ = tuple(sorted(__protobuf__.manifest))
| 37.986547 | 156 | 0.656416 |
1825d71ce3841cab87835439bc5331f28ba2643a | 4,841 | py | Python | builtinPlugins/plugin_spending.py | jscherer26/Icarra | 5bc8b298ae21dcde7e8e2253b9ed9db95fd0d164 | [
"BSD-3-Clause"
] | 1 | 2021-11-09T04:36:57.000Z | 2021-11-09T04:36:57.000Z | builtinPlugins/plugin_spending.py | jscherer26/Icarra | 5bc8b298ae21dcde7e8e2253b9ed9db95fd0d164 | [
"BSD-3-Clause"
] | null | null | null | builtinPlugins/plugin_spending.py | jscherer26/Icarra | 5bc8b298ae21dcde7e8e2253b9ed9db95fd0d164 | [
"BSD-3-Clause"
] | 2 | 2020-03-28T02:55:19.000Z | 2021-11-09T04:37:08.000Z | # Copyright (c) 2006-2010, Jesse Liesch
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE IMPLIED
# DISCLAIMED. IN NO EVENT SHALL JESSE LIESCH BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import copy
from editGrid import *
from plugin import *
from portfolio import *
import appGlobal
| 33.157534 | 148 | 0.736625 |
18299c6187e63ee39b775b8ca8e59d659c576c75 | 5,913 | py | Python | pyro_examples/dpgmm_full.py | hanyas/pyro_examples | 7c8784bd9ac498cfaf2983da158a8209db21966e | [
"MIT"
] | 1 | 2021-01-05T04:58:10.000Z | 2021-01-05T04:58:10.000Z | pyro_examples/dpgmm_full.py | hanyas/pyro_examples | 7c8784bd9ac498cfaf2983da158a8209db21966e | [
"MIT"
] | null | null | null | pyro_examples/dpgmm_full.py | hanyas/pyro_examples | 7c8784bd9ac498cfaf2983da158a8209db21966e | [
"MIT"
] | null | null | null | import torch
from torch.distributions import Gamma
import torch.nn.functional as F
import matplotlib.pyplot as plt
from tqdm import tqdm
from pyro.distributions import *
import pyro
from pyro.optim import Adam
from pyro.infer import SVI, Trace_ELBO, Predictive
assert pyro.__version__.startswith('1')
pyro.enable_validation(True)
pyro.set_rng_seed(1337)
torch.set_num_threads(1)
# device = torch.device("cuda:0") if torch.cuda.is_available() else torch.device("cpu")
device = torch.device("cpu")
data = torch.cat((MultivariateNormal(-2 * torch.ones(2), 0.1 * torch.eye(2)).sample([25]),
MultivariateNormal(2 * torch.ones(2), 0.1 * torch.eye(2)).sample([25]),
MultivariateNormal(torch.tensor([0., 0.]), 0.1 * torch.eye(2)).sample([25])))
data = data.to(device)
N = data.shape[0]
D = data.shape[1]
T = 5
optim = Adam({"lr": 0.01})
svi = SVI(model, guide, optim, loss=Trace_ELBO(num_particles=35))
alpha = 0.1 * torch.ones(1, device=device)
elbo = train(5000)
plt.figure()
plt.plot(elbo)
| 33.596591 | 121 | 0.641468 |
1829f18c9a4a6999de1f057e3d27520859bfe66b | 539 | py | Python | calplus/tests/unit/v1/test_utils.py | nghiadt16/CALplus | 68c108e6abf7eeac4937b870dc7462dd6ee2fcc3 | [
"Apache-2.0"
] | null | null | null | calplus/tests/unit/v1/test_utils.py | nghiadt16/CALplus | 68c108e6abf7eeac4937b870dc7462dd6ee2fcc3 | [
"Apache-2.0"
] | 4 | 2017-04-05T16:14:07.000Z | 2018-12-14T14:19:15.000Z | calplus/tests/unit/v1/test_utils.py | nghiadt16/CALplus | 68c108e6abf7eeac4937b870dc7462dd6ee2fcc3 | [
"Apache-2.0"
] | 2 | 2017-04-18T16:53:58.000Z | 2018-12-04T05:42:51.000Z | from calplus.tests import base
from calplus.v1 import utils
| 22.458333 | 44 | 0.641929 |
182a6b769a1cd6d38014902642d94977a040e698 | 4,213 | py | Python | luna_pathology/cli/load_slide.py | msk-mind-apps/luna-pathology | f0e17ccfeb3dc93de150aed5bbef9fcd7443d6d0 | [
"Apache-2.0"
] | null | null | null | luna_pathology/cli/load_slide.py | msk-mind-apps/luna-pathology | f0e17ccfeb3dc93de150aed5bbef9fcd7443d6d0 | [
"Apache-2.0"
] | 3 | 2021-07-21T20:28:37.000Z | 2021-08-02T18:52:32.000Z | luna_pathology/cli/load_slide.py | msk-mind-apps/luna-pathology | f0e17ccfeb3dc93de150aed5bbef9fcd7443d6d0 | [
"Apache-2.0"
] | null | null | null |
# General imports
import os, json, logging
import click
from pathlib import Path
import yaml
# From common
from luna_core.common.custom_logger import init_logger
from luna_core.common.DataStore import DataStore_v2
from luna_core.common.Node import Node
from luna_core.common.config import ConfigSet
from luna_core.common.sparksession import SparkConfig
def load_slide_with_datastore(app_config, datastore_id, method_data):
"""Load a slide to the datastore from the whole slide image table.
Args:
app_config (string): path to application configuration file.
datastore_id (string): datastore name. usually a slide id.
method_data (dict): method parameters including input, output details.
Returns:
None
"""
logger = logging.getLogger(f"[datastore={datastore_id}]")
# Do some setup
cfg = ConfigSet("APP_CFG", config_file=app_config)
datastore = DataStore_v2(method_data["datastore_path"])
method_id = method_data["job_tag"]
# fetch patient_id column
patient_id_column = method_data.get("patient_id_column_name", None)
if patient_id_column == "": patient_id_column = None
try:
spark = SparkConfig().spark_session("APP_CFG", "query_slide")
slide_id = datastore_id
if patient_id_column:
# assumes if patient_id column, source is parquet from dremio
# right now has nested row-type into dict, todo: account for map type representation of dict in dremio
df = spark.read.parquet(method_data['table_path'])\
.where(f"UPPER(slide_id)='{slide_id}'")\
.select("path", "metadata", patient_id_column)\
.toPandas()
if not len(df) == 1:
print(df)
raise ValueError(f"Resulting query record is not singular, multiple scan's exist given the container address {slide_id}")
record = df.loc[0]
properties = record['metadata']
properties['patient_id'] = str(record[patient_id_column])
else:
df = spark.read.format("delta").load(method_data['table_path'])\
.where(f"UPPER(slide_id)='{slide_id}'")\
.select("path", "metadata")\
.toPandas()
if not len(df) == 1:
print(df)
raise ValueError(f"Resulting query record is not singular, multiple scan's exist given the container address {slide_id}")
record = df.loc[0]
properties = record['metadata']
spark.stop()
except Exception as e:
logger.exception (f"{e}, stopping job execution...")
raise e
# Put results in the data store
data_path = Path(record['path'].split(':')[-1])
print(data_path)
datastore.put(data_path, datastore_id, method_id, "WholeSlideImage", symlink=True)
with open(os.path.join(method_data["datastore_path"], datastore_id, method_id, "WholeSlideImage", "metadata.json"), "w") as fp:
json.dump(properties, fp)
if __name__ == "__main__":
cli()
| 36.318966 | 137 | 0.657963 |
182ab8edcc4ae73b49deea3cf51426229fb8e5ad | 442 | py | Python | classifiers/CornerDetector.py | Vivek2018/OSM_Building-Detection-Custom-Repo | 278b1f5a46e49cb547162d495979056c36945e43 | [
"MIT"
] | null | null | null | classifiers/CornerDetector.py | Vivek2018/OSM_Building-Detection-Custom-Repo | 278b1f5a46e49cb547162d495979056c36945e43 | [
"MIT"
] | null | null | null | classifiers/CornerDetector.py | Vivek2018/OSM_Building-Detection-Custom-Repo | 278b1f5a46e49cb547162d495979056c36945e43 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
from matplotlib import pyplot as plt
image = cv2.imread('champaigneditedcompressed.png')
kernel = np.ones((20, 20), np.float32) / 25
img = cv2.filter2D(image, -1, kernel)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
corners = cv2.goodFeaturesToTrack(gray,10,0.01,10)
corners = np.int0(corners)
print(corners)
for i in corners:
x,y = i.ravel()
cv2.circle(img,(x,y),3,255,-1)
plt.imshow(img),plt.show()
| 23.263158 | 51 | 0.714932 |
182bb85b10503c8fb7bd8a2c09551b2160fe497c | 25,581 | py | Python | ECUSimulation/io_processing/surveillance_handler.py | arturmrowca/IVNS | 8915142d16debe4af780a9eb6859e44dea2ca7e6 | [
"MIT"
] | null | null | null | ECUSimulation/io_processing/surveillance_handler.py | arturmrowca/IVNS | 8915142d16debe4af780a9eb6859e44dea2ca7e6 | [
"MIT"
] | null | null | null | ECUSimulation/io_processing/surveillance_handler.py | arturmrowca/IVNS | 8915142d16debe4af780a9eb6859e44dea2ca7e6 | [
"MIT"
] | 2 | 2018-08-04T07:43:51.000Z | 2018-12-14T14:59:46.000Z | '''
Created on 12 Jun, 2015
@author: artur.mrowca
'''
from enum import Enum
from PyQt5.Qt import QObject
from PyQt5 import QtCore
from tools.ecu_logging import ECULogger
import copy
| 46.008993 | 176 | 0.692741 |
182bd0de90019e26f6a862933d6591b76c148320 | 1,994 | py | Python | breadp/checks/pid.py | tgweber/breadp | 12b97b9d2d997b1345a8e026690d57b3286a04d0 | [
"Apache-2.0"
] | null | null | null | breadp/checks/pid.py | tgweber/breadp | 12b97b9d2d997b1345a8e026690d57b3286a04d0 | [
"Apache-2.0"
] | null | null | null | breadp/checks/pid.py | tgweber/breadp | 12b97b9d2d997b1345a8e026690d57b3286a04d0 | [
"Apache-2.0"
] | null | null | null | ################################################################################
# Copyright: Tobias Weber 2019
#
# Apache 2.0 License
#
# This file contains all code related to pid check objects
#
################################################################################
import re
import requests
from breadp.checks import Check
from breadp.checks.result import BooleanResult
| 28.898551 | 80 | 0.533099 |
182cba7e9952331f563ef145511a6c92d1f0f8eb | 495 | py | Python | tests/infrastructure/persistence/test_holiday_dynamo_repository.py | gabrielleandro0801/holidays-importer | 4a698ded80048ee37161b1f1ff4b4af64f085ab7 | [
"MIT"
] | null | null | null | tests/infrastructure/persistence/test_holiday_dynamo_repository.py | gabrielleandro0801/holidays-importer | 4a698ded80048ee37161b1f1ff4b4af64f085ab7 | [
"MIT"
] | null | null | null | tests/infrastructure/persistence/test_holiday_dynamo_repository.py | gabrielleandro0801/holidays-importer | 4a698ded80048ee37161b1f1ff4b4af64f085ab7 | [
"MIT"
] | null | null | null | from unittest import TestCase
from src.domain.holiday import Holiday
import src.infrastructure.persistence.holiday_dynamo_repository as repository
HOLIDAY = Holiday(
date='2021-12-25',
name='Natal',
category='NATIONAL'
)
| 29.117647 | 83 | 0.779798 |
182e6f7b7c70dcc5da411a03395acac1d83ee9e9 | 3,136 | py | Python | src/models/Models.py | nbrutti/uol-export | c79a1a6b5c68e61a85952a60b935943aec27cdda | [
"MIT"
] | null | null | null | src/models/Models.py | nbrutti/uol-export | c79a1a6b5c68e61a85952a60b935943aec27cdda | [
"MIT"
] | null | null | null | src/models/Models.py | nbrutti/uol-export | c79a1a6b5c68e61a85952a60b935943aec27cdda | [
"MIT"
] | null | null | null | from config.defs import *
import peewee
db = peewee.SqliteDatabase(DATABASE_NAME)
### Relacionamentos ###
db.create_tables([Partida, Substituicao, Penalti, CartaoAmarelo, CartaoVermelho, GolContra, Gol, Time])
db.create_tables([PartidasSubstituicoes, PartidasPenaltis, PartidasCartoesAmarelos, PartidasCartoesVermelhos, PartidasGolsContra, PartidasGols]) | 24.888889 | 144 | 0.748724 |
182eadd7acbf4364e0c9b88cd120533f1ae8e1e3 | 1,165 | py | Python | quantnn/__init__.py | simonpf/qrnn | 1de11ce8cede6b4b3de0734bcc8c198c10226188 | [
"MIT"
] | null | null | null | quantnn/__init__.py | simonpf/qrnn | 1de11ce8cede6b4b3de0734bcc8c198c10226188 | [
"MIT"
] | 3 | 2022-01-11T08:41:03.000Z | 2022-02-11T14:25:09.000Z | quantnn/__init__.py | simonpf/qrnn | 1de11ce8cede6b4b3de0734bcc8c198c10226188 | [
"MIT"
] | 5 | 2020-12-11T03:18:32.000Z | 2022-02-14T10:32:09.000Z | r"""
=======
quantnn
=======
The quantnn package provides functionality for probabilistic modeling and prediction
using deep neural networks.
The two main features of the quantnn package are implemented by the
:py:class:`~quantnn.qrnn.QRNN` and :py:class:`~quantnn.qrnn.DRNN` classes, which implement
quantile regression neural networks (QRNNs) and density regression neural networks (DRNNs),
respectively.
The modules :py:mod:`quantnn.quantiles` and :py:mod:`quantnn.density` provide generic
(backend agnostic) functions to manipulate probabilistic predictions.
"""
import logging as _logging
import os
from rich.logging import RichHandler
from quantnn.neural_network_model import set_default_backend, get_default_backend
from quantnn.qrnn import QRNN
from quantnn.drnn import DRNN
from quantnn.quantiles import (
cdf,
pdf,
posterior_mean,
probability_less_than,
probability_larger_than,
sample_posterior,
sample_posterior_gaussian,
quantile_loss,
)
_LOG_LEVEL = os.environ.get("QUANTNN_LOG_LEVEL", "WARNING").upper()
_logging.basicConfig(
level=_LOG_LEVEL, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()]
)
| 29.871795 | 91 | 0.775107 |
182f0fecd4c6abc4561282446bbffe0f48f4cc60 | 805 | py | Python | habitat_baselines/motion_planning/robot_target.py | srama2512/habitat-api | bc85d0961cef3b4a08bc9263869606109fb6ff0a | [
"MIT"
] | 355 | 2020-08-18T03:48:26.000Z | 2022-03-30T00:22:50.000Z | habitat_baselines/motion_planning/robot_target.py | srama2512/habitat-api | bc85d0961cef3b4a08bc9263869606109fb6ff0a | [
"MIT"
] | 328 | 2020-08-12T21:25:09.000Z | 2022-03-31T10:39:21.000Z | habitat_baselines/motion_planning/robot_target.py | srama2512/habitat-api | bc85d0961cef3b4a08bc9263869606109fb6ff0a | [
"MIT"
] | 159 | 2020-08-12T22:23:36.000Z | 2022-03-30T22:56:52.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import attr
import magnum as mn
import numpy as np
| 23.676471 | 75 | 0.70559 |
18323906f8da6c858e162af77f828aa7dc3d5141 | 1,314 | py | Python | leetcode/445.Add_Two_Numbers_II/python/add_two_numbers_v1.py | realXuJiang/research_algorithms | 8f2876288cb607b9eddb2aa75f51a1d574b51ec4 | [
"Apache-2.0"
] | 1 | 2019-08-12T09:32:30.000Z | 2019-08-12T09:32:30.000Z | leetcode/445.Add_Two_Numbers_II/python/add_two_numbers_v1.py | realXuJiang/research_algorithms | 8f2876288cb607b9eddb2aa75f51a1d574b51ec4 | [
"Apache-2.0"
] | null | null | null | leetcode/445.Add_Two_Numbers_II/python/add_two_numbers_v1.py | realXuJiang/research_algorithms | 8f2876288cb607b9eddb2aa75f51a1d574b51ec4 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
#-*- coding: utf-8 -*-
if __name__ == "__main__":
tn = TwoNumbers()
l1 = tn.builderListNode(1234)
l2 = tn.builderListNode(34)
tn.printLS(tn.addTwoNumbers(l1, l2))
| 22.655172 | 42 | 0.47793 |
1833d1d97b94601d7c7672bd7240b57d03e2cddf | 3,961 | py | Python | recsys/util/feature_helper.py | manazhao/tf_recsys | 6053712d11165c068e5d618989f716b2a0f60186 | [
"Apache-2.0"
] | 1 | 2019-04-20T15:05:37.000Z | 2019-04-20T15:05:37.000Z | recsys/util/feature_helper.py | manazhao/tf_recsys | 6053712d11165c068e5d618989f716b2a0f60186 | [
"Apache-2.0"
] | null | null | null | recsys/util/feature_helper.py | manazhao/tf_recsys | 6053712d11165c068e5d618989f716b2a0f60186 | [
"Apache-2.0"
] | null | null | null | import logging
import tensorflow as tf
import recsys.util.proto.config_pb2 as config
# Constructs a tf.Example with feature dictionary where key is feature name and
# value is tf.train.Feature
# Reads batched features and labels from given files, and consumes them through
# callback function "consum_batch_fn".
# feature_spec: dictionary specifying the type of each feature.
# input_config: settings for generating batched features and labels.
# consume_batch_fn: callback function that defines the consumption of the
# batched features and labels. | 39.61 | 124 | 0.789447 |
18343ff0759e4173734193d8fad780c280807cc1 | 1,894 | py | Python | components/handlers/star_modules.py | nus-mtp/another-cs-study-planner | 02b52871a34f580b779ede08750f2d4e887bcf65 | [
"MIT"
] | 1 | 2017-04-30T17:59:08.000Z | 2017-04-30T17:59:08.000Z | components/handlers/star_modules.py | nus-mtp/another-cs-study-planner | 02b52871a34f580b779ede08750f2d4e887bcf65 | [
"MIT"
] | 87 | 2017-02-13T09:06:13.000Z | 2017-04-14T09:23:08.000Z | components/handlers/star_modules.py | nus-mtp/another-cs-study-planner | 02b52871a34f580b779ede08750f2d4e887bcf65 | [
"MIT"
] | 1 | 2017-04-11T05:26:00.000Z | 2017-04-11T05:26:00.000Z | '''
This module handles starring of modules.
'''
import web
from app import RENDER
from components import model, session
| 33.22807 | 90 | 0.594509 |
183882e7bff2e8589b66d5bada377b9d753cd440 | 27,362 | py | Python | src/features/smarterdb.py | cnr-ibba/SMARTER-database | 837f7d514c33e458ad0e39e26784c761df29e004 | [
"MIT"
] | null | null | null | src/features/smarterdb.py | cnr-ibba/SMARTER-database | 837f7d514c33e458ad0e39e26784c761df29e004 | [
"MIT"
] | 44 | 2021-05-25T16:00:34.000Z | 2022-03-12T01:12:45.000Z | src/features/smarterdb.py | cnr-ibba/SMARTER-database | 837f7d514c33e458ad0e39e26784c761df29e004 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 23 16:21:35 2021
@author: Paolo Cozzi <paolo.cozzi@ibba.cnr.it>
"""
import os
import logging
import pathlib
import pycountry
import mongoengine
from enum import Enum
from typing import Union
from pymongo import database, ReturnDocument
from dotenv import find_dotenv, load_dotenv
from .utils import get_project_dir
SPECIES2CODE = {
"Sheep": "OA",
"Goat": "CH"
}
SMARTERDB = "smarter"
DB_ALIAS = "smarterdb"
# Get an instance of a logger
logger = logging.getLogger(__name__)
def get_or_create_breed(
species: str, name: str, code: str, aliases: list = []):
logger.debug(f"Checking: '{species}':'{name}':'{code}'")
# get a breed object relying on parameters
qs = Breed.objects(species=species, name=name, code=code)
modified = False
if qs.count() == 1:
breed = qs.get()
logger.debug(f"Got {breed}")
for alias in aliases:
if alias not in breed.aliases:
# track for update
modified = True
logger.info(f"Adding '{alias}' to '{breed}' aliases")
breed.aliases.append(alias)
elif qs.count() == 0:
logger.debug("Create a new breed object")
modified = True
breed = Breed(
species=species,
name=name,
code=code,
aliases=aliases,
n_individuals=0
)
else:
# should never see this relying on collection unique keys
raise SmarterDBException(
f"Got {qs.count()} results for '{species}':'{name}':'{code}'")
if modified:
logger.debug(f"Save '{breed}' to database")
breed.save()
return breed, modified
def get_or_create_sample(
SampleSpecies: Union[SampleGoat, SampleSheep],
original_id: str,
dataset: Dataset,
type_: str,
breed: Breed,
country: str,
chip_name: str = None,
sex: SEX = None,
alias: str = None) -> Union[SampleGoat, SampleSheep]:
"""Get or create a sample providing attributes (search for original_id in
provided dataset
Args:
SampleSpecies: (Union[SampleGoat, SampleSheep]): the class required
for insert/update
original_id (str): The original_id in the dataset
dataset (Dataset): the dataset instance used to register sample
type_ (str): "background" or "foreground"
breed (Breed): A breed instance
country (str): Country as a string
chip_name (str): the chip name
sex (SEX): A SEX instance
alias (str): an original_id alias
Returns:
Union[SampleGoat, SampleSheep]: a SampleSpecies instance
"""
created = False
# search for sample in database
qs = SampleSpecies.objects(
original_id=original_id, dataset=dataset)
if qs.count() == 1:
logger.debug(f"Sample '{original_id}' found in database")
sample = qs.get()
elif qs.count() == 0:
# insert sample into database
logger.info(f"Registering sample '{original_id}' in database")
sample = SampleSpecies(
original_id=original_id,
country=country,
species=dataset.species,
breed=breed.name,
breed_code=breed.code,
dataset=dataset,
type_=type_,
chip_name=chip_name,
sex=sex,
alias=alias
)
sample.save()
# incrementing breed n_individuals counter
breed.n_individuals += 1
breed.save()
created = True
else:
raise SmarterDBException(
f"Got {qs.count()} results for '{original_id}'")
return sample, created
def get_sample_type(dataset: Dataset):
"""
test if foreground or background dataset
Args:
dataset (Dataset): the dataset instance used to register sample
Returns:
str: sample type ("background" or "foreground")
"""
type_ = None
for sampletype in SAMPLETYPE:
if sampletype.value in dataset.type_:
logger.debug(
f"Found {sampletype.value} in {dataset.type_}")
type_ = sampletype.value
break
return type_
def __eq__(self, other):
if super().__eq__(other):
return True
else:
# check by positions
for attribute in ["chrom", "position"]:
if getattr(self, attribute) != getattr(other, attribute):
return False
# check genotype equality
if self.illumina_top != other.illumina_top:
return False
return True
def __check_coding(self, genotype: list, coding: str, missing: str):
"""Internal method to check genotype coding"""
# get illumina data as an array
data = getattr(self, coding).split("/")
for allele in genotype:
# mind to missing values. If missing can't be equal to illumina_top
if allele in missing:
continue
if allele not in data:
return False
return True
def is_top(self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with illumina TOP coding
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
bool: True if in top coordinates
"""
return self.__check_coding(genotype, "illumina_top", missing)
def is_forward(self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with illumina FORWARD coding
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
bool: True if in top coordinates
"""
return self.__check_coding(genotype, "illumina_forward", missing)
def is_ab(self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with illumina AB coding
Args:
genotype (list): a list of two alleles (ex ['A','B'])
missing (str): missing allele string (def "-")
Returns:
bool: True if in top coordinates
"""
for allele in genotype:
# mind to missing valies
if allele not in ["A", "B"] + missing:
return False
return True
def is_affymetrix(
self, genotype: list, missing: list = ["0", "-"]) -> bool:
"""Return True if genotype is compatible with affymetrix coding
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
bool: True if in top coordinates
"""
return self.__check_coding(genotype, "affymetrix_ab", missing)
def forward2top(self, genotype: list, missing: list = ["0", "-"]) -> list:
"""Convert an illumina forward SNP in a illumina top snp
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
list: The genotype in top format
"""
# get illumina data as an array
forward = self.illumina_forward.split("/")
top = self.illumina_top.split("/")
result = []
for allele in genotype:
# mind to missing values
if allele in missing:
result.append("0")
elif allele not in forward:
raise SmarterDBException(
f"{genotype} is not in forward coding")
else:
result.append(top[forward.index(allele)])
return result
def ab2top(self, genotype: list, missing: list = ["0", "-"]) -> list:
"""Convert an illumina ab SNP in a illumina top snp
Args:
genotype (list): a list of two alleles (ex ['A','B'])
missing (str): missing allele string (def "-")
Returns:
list: The genotype in top format
"""
# get illumina data as a dict
top = self.illumina_top.split("/")
top = {"A": top[0], "B": top[1]}
result = []
for allele in genotype:
# mind to missing values
if allele in missing:
result.append("0")
elif allele not in ["A", "B"]:
raise SmarterDBException(
f"{genotype} is not in ab coding")
else:
result.append(top[allele])
return result
def affy2top(self, genotype: list, missing: list = ["0", "-"]) -> list:
"""Convert an affymetrix SNP in a illumina top snp
Args:
genotype (list): a list of two alleles (ex ['A','C'])
missing (str): missing allele string (def "0")
Returns:
list: The genotype in top format
"""
# get illumina data as an array
affymetrix = self.affymetrix_ab.split("/")
top = self.illumina_top.split("/")
result = []
for allele in genotype:
# mind to missing values
if allele in missing:
result.append("0")
elif allele not in affymetrix:
raise SmarterDBException(
f"{genotype} is not in affymetrix coding")
else:
result.append(top[affymetrix.index(allele)])
return result
class VariantSpecies(mongoengine.Document):
rs_id = mongoengine.StringField()
chip_name = mongoengine.ListField(mongoengine.StringField())
name = mongoengine.StringField(unique=True)
# sequence should model both illumina or affymetrix sequences
sequence = mongoengine.DictField()
locations = mongoengine.ListField(
mongoengine.EmbeddedDocumentField(Location))
# HINT: should sender be a Location attribute?
sender = mongoengine.StringField()
# Affymetryx specific fields
# more probe could be assigned to the same SNP
probeset_id = mongoengine.ListField(mongoengine.StringField())
affy_snp_id = mongoengine.StringField()
cust_id = mongoengine.StringField()
# abstract class with custom indexes
# TODO: need a index for position (chrom, position, version)
meta = {
'abstract': True,
'indexes': [
{
'fields': [
"locations.chrom",
"locations.position"
],
},
'probeset_id',
'rs_id'
]
}
def save(self, *args, **kwargs):
"""Custom save method. Deal with variant name before save"""
if not self.name and self.affy_snp_id:
logger.debug(f"Set variant name to {self.affy_snp_id}")
self.name = self.affy_snp_id
# default save method
super(VariantSpecies, self).save(*args, **kwargs)
def get_location_index(self, version: str, imported_from='SNPchiMp v.3'):
"""Returns location index for assembly version and imported source
Args:
version (str): assembly version (ex: 'Oar_v3.1')
imported_from (str): coordinates source (ex: 'SNPchiMp v.3')
Returns:
int: the index of the location requested
"""
for index, location in enumerate(self.locations):
if (location.version == version and
location.imported_from == imported_from):
return index
raise SmarterDBException(
f"Location '{version}' '{imported_from}' is not in locations"
)
def get_location(self, version: str, imported_from='SNPchiMp v.3'):
"""Returns location for assembly version and imported source
Args:
version (str): assembly version (ex: 'Oar_v3.1')
imported_from (str): coordinates source (ex: 'SNPchiMp v.3')
Returns:
Location: the genomic coordinates
"""
locations = list(filter(custom_filter, self.locations))
if len(locations) != 1:
raise SmarterDBException(
"Couldn't determine a unique location for "
f"'{self.name}' '{version}' '{imported_from}'")
return locations[0]
class VariantSheep(VariantSpecies):
meta = {
'db_alias': DB_ALIAS,
'collection': 'variantSheep'
}
class VariantGoat(VariantSpecies):
meta = {
'db_alias': DB_ALIAS,
'collection': 'variantGoat'
}
| 28.472425 | 121 | 0.604853 |
1838c0e9c32271122443074ccc035f2557452781 | 6,143 | py | Python | test/utils/multi_objective/test_box_decomposition.py | SamuelMarks/botorch | 7801e2f56dc447322b2b6c92cab683d8900e4c7f | [
"MIT"
] | 2 | 2021-01-11T18:16:27.000Z | 2021-11-30T09:34:44.000Z | test/utils/multi_objective/test_box_decomposition.py | SamuelMarks/botorch | 7801e2f56dc447322b2b6c92cab683d8900e4c7f | [
"MIT"
] | 17 | 2020-12-11T20:07:22.000Z | 2022-03-27T16:46:42.000Z | test/utils/multi_objective/test_box_decomposition.py | SamuelMarks/botorch | 7801e2f56dc447322b2b6c92cab683d8900e4c7f | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import annotations
import torch
from botorch.exceptions.errors import BotorchError, BotorchTensorDimensionError
from botorch.utils.multi_objective.box_decomposition import NondominatedPartitioning
from botorch.utils.testing import BotorchTestCase
| 40.414474 | 86 | 0.453524 |
183903f43cbf11f71276277d26afb62e4bb54ab6 | 34,139 | py | Python | tests/pyupgrade_test.py | sloria/pyupgrade | 18c625150c7118d05e6f15facf77a0423b764230 | [
"MIT"
] | null | null | null | tests/pyupgrade_test.py | sloria/pyupgrade | 18c625150c7118d05e6f15facf77a0423b764230 | [
"MIT"
] | null | null | null | tests/pyupgrade_test.py | sloria/pyupgrade | 18c625150c7118d05e6f15facf77a0423b764230 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import ast
import sys
import pytest
from pyupgrade import _fix_dict_set
from pyupgrade import _fix_escape_sequences
from pyupgrade import _fix_format_literals
from pyupgrade import _fix_fstrings
from pyupgrade import _fix_new_style_classes
from pyupgrade import _fix_percent_format
from pyupgrade import _fix_six
from pyupgrade import _fix_super
from pyupgrade import _fix_tokens
from pyupgrade import _fix_unicode_literals
from pyupgrade import _imports_unicode_literals
from pyupgrade import _is_bytestring
from pyupgrade import _percent_to_format
from pyupgrade import _simplify_conversion_flag
from pyupgrade import main
from pyupgrade import parse_format
from pyupgrade import parse_percent_format
from pyupgrade import unparse_parsed_string
def _has_16806_bug():
# See https://bugs.python.org/issue16806
return ast.parse('"""\n"""').body[0].value.col_offset == -1
def test_main_trivial():
assert main(()) == 0
| 27.982787 | 79 | 0.42365 |
1839ffd1101b5584269c5f29639d17cc7d6a6e7c | 194 | py | Python | Preprocessing/preprocessing.py | nadineazhalia/CSH4H3-TEXT-MINING | 77b2ffb862314d664f575757a40038cc69f86c60 | [
"Apache-2.0"
] | null | null | null | Preprocessing/preprocessing.py | nadineazhalia/CSH4H3-TEXT-MINING | 77b2ffb862314d664f575757a40038cc69f86c60 | [
"Apache-2.0"
] | null | null | null | Preprocessing/preprocessing.py | nadineazhalia/CSH4H3-TEXT-MINING | 77b2ffb862314d664f575757a40038cc69f86c60 | [
"Apache-2.0"
] | null | null | null | file_berita = open("berita.txt", "r")
berita = file_berita.read()
berita = berita.split()
berita = [x.lower() for x in berita]
berita = list(set(berita))
berita = sorted(berita)
print (berita) | 21.555556 | 37 | 0.695876 |
183a36737605defc576589d45932fdf08d365a08 | 2,139 | py | Python | demo_scripts/charts/bar_chart_index_translator_demo.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 198 | 2019-08-16T15:09:23.000Z | 2022-03-30T12:44:00.000Z | demo_scripts/charts/bar_chart_index_translator_demo.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 13 | 2021-01-07T10:15:19.000Z | 2022-03-29T13:01:47.000Z | demo_scripts/charts/bar_chart_index_translator_demo.py | webclinic017/qf-lib | 96463876719bba8a76c8269cef76addf3a2d836d | [
"Apache-2.0"
] | 29 | 2019-08-16T15:21:28.000Z | 2022-02-23T09:53:49.000Z | # Copyright 2016-present CERN European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import matplotlib.pyplot as plt
import pandas as pd
from qf_lib.common.enums.orientation import Orientation
from qf_lib.plotting.charts.bar_chart import BarChart
from qf_lib.plotting.decorators.data_element_decorator import DataElementDecorator
from qf_lib.plotting.helpers.index_translator import IndexTranslator
index = ['constant', 'b', 'c', 'd']
# index = [0, 4, 5, 6]
labels_to_locations_dict = {
'constant': 0,
'b': 4,
'c': 5,
'd': 6
}
colors = ['orange'] + ['forestgreen'] * 3
if __name__ == '__main__':
main()
| 38.196429 | 120 | 0.71482 |
183c49112552415248f084e0c358b6ea11192708 | 2,771 | py | Python | tests/request/test_parameter_invalid.py | Colin-b/pyxelrest | 5c8db40d1537d0f9c29acd928ec9519b6bb557ec | [
"MIT"
] | 7 | 2018-12-07T10:08:53.000Z | 2021-03-24T07:52:36.000Z | tests/request/test_parameter_invalid.py | Colin-b/pyxelrest | 5c8db40d1537d0f9c29acd928ec9519b6bb557ec | [
"MIT"
] | 76 | 2018-12-07T10:29:48.000Z | 2021-11-17T00:54:24.000Z | tests/request/test_parameter_invalid.py | Colin-b/pyxelrest | 5c8db40d1537d0f9c29acd928ec9519b6bb557ec | [
"MIT"
] | null | null | null | import pytest
from responses import RequestsMock
from tests import loader
| 30.119565 | 123 | 0.339589 |
183cd22d8adcd570cdd6c5eceb4ba00ee9152282 | 61 | py | Python | src/yookassa_payout/domain/response/__init__.py | yoomoney/yookassa-payout-sdk-python | f6953e97573bb4a4ee6f830f726a6fcfdf504e2a | [
"MIT"
] | 5 | 2021-03-11T14:38:25.000Z | 2021-08-13T10:41:50.000Z | src/yookassa_payout/domain/common/__init__.py | yoomoney/yookassa-payout-sdk-python | f6953e97573bb4a4ee6f830f726a6fcfdf504e2a | [
"MIT"
] | 2 | 2021-02-15T18:18:34.000Z | 2021-08-13T13:49:46.000Z | src/yookassa_payout/domain/request/__init__.py | yoomoney/yookassa-payout-sdk-python | f6953e97573bb4a4ee6f830f726a6fcfdf504e2a | [
"MIT"
] | 1 | 2022-01-29T08:47:02.000Z | 2022-01-29T08:47:02.000Z | """Package for YooKassa Payout API Python Client Library."""
| 30.5 | 60 | 0.754098 |
183d4dac8cfc4c8ac345fb08043e4248c6a0257b | 467 | py | Python | tests/integration/test_entry_point.py | jacksmith15/delfino | 38972e0e0e610c2694462306250a51537a04b1e9 | [
"MIT"
] | null | null | null | tests/integration/test_entry_point.py | jacksmith15/delfino | 38972e0e0e610c2694462306250a51537a04b1e9 | [
"MIT"
] | null | null | null | tests/integration/test_entry_point.py | jacksmith15/delfino | 38972e0e0e610c2694462306250a51537a04b1e9 | [
"MIT"
] | null | null | null | import toml
from delfino.constants import ENTRY_POINT, PYPROJECT_TOML_FILENAME
from delfino.models.pyproject_toml import PyprojectToml
from tests.constants import PROJECT_ROOT
| 33.357143 | 66 | 0.807281 |
183ecccecd1a87d9ecdaf239b0b8acab5f9e8ed2 | 6,888 | py | Python | gamble/gamble.py | lookma/simple-coin-gamble | 8f1684e62b62f28a176458606ed193c812d97bc7 | [
"MIT"
] | null | null | null | gamble/gamble.py | lookma/simple-coin-gamble | 8f1684e62b62f28a176458606ed193c812d97bc7 | [
"MIT"
] | null | null | null | gamble/gamble.py | lookma/simple-coin-gamble | 8f1684e62b62f28a176458606ed193c812d97bc7 | [
"MIT"
] | null | null | null | from random import randint
from typing import Callable, List, Optional
class RoundResults:
def __init__(self, players: List[Player]) -> None:
self.__total_amounts: List[float] = []
self.__number_of_winners: List[int] = []
self.__number_of_losers: List[int] = []
self.__number_of_total_losses: List[int] = []
self.__winner_percentages: List[float] = []
self.__min_amounts: List[float] = []
self.__max_amounts: List[float] = []
self.__avg_amounts: List[float] = []
self.add_round(players)
| 30.477876 | 99 | 0.620499 |
18412368254bcf43c33a2c706aa24bebe16b5a08 | 16 | py | Python | roomai/games/__init__.py | tonyxxq/RoomAI | 5f28e31e659dd7808127c3c3cc386e6892a93982 | [
"MIT"
] | 1 | 2018-11-29T01:57:18.000Z | 2018-11-29T01:57:18.000Z | roomai/models/texasholdem/__init__.py | tonyxxq/RoomAI | 5f28e31e659dd7808127c3c3cc386e6892a93982 | [
"MIT"
] | null | null | null | roomai/models/texasholdem/__init__.py | tonyxxq/RoomAI | 5f28e31e659dd7808127c3c3cc386e6892a93982 | [
"MIT"
] | null | null | null | #!/bin/python
| 4 | 13 | 0.5625 |
1842a50616fbef1cfe0cb3f52da633c9ff6caecd | 1,285 | py | Python | config.py | SevenMoGod/movenet.pytorch | 95ec8535245228aa4335243e68722810e50bcaf8 | [
"MIT"
] | 87 | 2021-11-13T11:05:55.000Z | 2022-03-30T11:00:45.000Z | config.py | Dyian-snow/movenet.pytorch | 95ec8535245228aa4335243e68722810e50bcaf8 | [
"MIT"
] | 18 | 2021-11-16T01:13:19.000Z | 2022-03-31T16:04:31.000Z | config.py | Dyian-snow/movenet.pytorch | 95ec8535245228aa4335243e68722810e50bcaf8 | [
"MIT"
] | 28 | 2021-11-13T11:22:05.000Z | 2022-03-29T10:02:09.000Z | """
@Fire
https://github.com/fire717
"""
cfg = {
##### Global Setting
'GPU_ID': '0',
"num_workers":8,
"random_seed":42,
"cfg_verbose":True,
"save_dir": "output/",
"num_classes": 17,
"width_mult":1.0,
"img_size": 192,
##### Train Setting
'img_path':"./data/croped/imgs",
'train_label_path':'./data/croped/train2017.json',
'val_label_path':'./data/croped/val2017.json',
'balance_data':False,
'log_interval':10,
'save_best_only': True,
'pin_memory': True,
##### Train Hyperparameters
'learning_rate':0.001,#1.25e-4
'batch_size':64,
'epochs':120,
'optimizer':'Adam', #Adam SGD
'scheduler':'MultiStepLR-70,100-0.1', #default SGDR-5-2 CVPR step-4-0.8 MultiStepLR
'weight_decay' : 5.e-4,#0.0001,
'class_weight': None,#[1., 1., 1., 1., 1., 1., 1., ]
'clip_gradient': 5,#1,
##### Test
'test_img_path':"./data/croped/imgs",
#"../data/eval/imgs",
#"../data/eval/imgs",
#"../data/all/imgs"
#"../data/true/mypc/crop_upper1"
#../data/coco/small_dataset/imgs
#"../data/testimg"
'exam_label_path':'../data/all/data_all_new.json',
'eval_img_path':'../data/eval/imgs',
'eval_label_path':'../data/eval/mypc.json',
}
| 21.416667 | 91 | 0.568872 |
18433079856714742d377305353f6075edaf8a57 | 11,038 | py | Python | uart.py | WRansohoff/nmigen_uart_test | d520d3b72698a901f63e3485aadca620f1444350 | [
"MIT"
] | null | null | null | uart.py | WRansohoff/nmigen_uart_test | d520d3b72698a901f63e3485aadca620f1444350 | [
"MIT"
] | null | null | null | uart.py | WRansohoff/nmigen_uart_test | d520d3b72698a901f63e3485aadca620f1444350 | [
"MIT"
] | null | null | null | from nmigen import *
from nmigen.back.pysim import *
# Function to calculate a clock divider which creates the
# desired output frequency from a given input frequency.
# Verifies that the divider is a positive integer, and that
# the resulting signal doesn't deviate more than expected.
# Basic work-in-progress UART modules.
# - TX / RX only, no flow control or USART.
# - Samples during the middle of the clock period, no oversampling.
# - 8-bit words, 1 stop bit, no parity bit.
# - Receives bits LSB-first only.
# - Configurable baud rate.
# UART receiver.
# UART transmitter
# Combined UART interface with both TX and RX modules.
#
# Simple UART testbench.
#
# Helper UART test method to simulate receiving a byte.
def uart_rx_byte( uart, val ):
# Simulate a "start bit".
yield uart.rx.eq( 0 )
# Wait one cycle.
for i in range( uart.clk_div ):
yield Tick()
# Simulate the byte with one cycle between each bit.
for i in range( 8 ):
if val & ( 1 << i ):
yield uart.rx.eq( 1 )
else:
yield uart.rx.eq( 0 )
for j in range( uart.clk_div ):
yield Tick()
# Simulate the "stop bit", and wait one cycle.
yield uart.rx.eq( 1 )
for i in range( uart.clk_div ):
yield Tick()
# Helper UART test method to simulate transmitting a buffered byte.
# UART 'receive' testbench.
# UART 'transmit' testbench.
# Create a UART module and run tests on it.
# (The baud rate is set to a high value to speed up the simulation.)
if __name__ == "__main__":
#uart_rx = UART_RX( 24000000, 9600 )
#uart_tx = UART_TX( 24000000, 9600 )
uart_rx = UART_RX( 24000000, 1000000 )
uart_tx = UART_TX( 24000000, 1000000 )
uart = UART( uart_rx, uart_tx )
# Run the UART tests.
with Simulator( uart, vcd_file = open( 'test.vcd', 'w' ) ) as sim:
# Run the UART test with a 24MHz clock.
sim.add_clock( 24e-6 )
sim.add_sync_process( proc_rx )
sim.add_sync_process( proc_tx )
sim.run()
| 34.820189 | 74 | 0.603642 |
184359b6c6261d67915a09440ec8b6d1a0cc0927 | 5,853 | py | Python | edk2basetools/FMMT/core/GuidTools.py | YuweiChen1110/edk2-basetools | cfd05c928492b7ffd1329634cfcb089db995eeca | [
"BSD-2-Clause-Patent"
] | null | null | null | edk2basetools/FMMT/core/GuidTools.py | YuweiChen1110/edk2-basetools | cfd05c928492b7ffd1329634cfcb089db995eeca | [
"BSD-2-Clause-Patent"
] | null | null | null | edk2basetools/FMMT/core/GuidTools.py | YuweiChen1110/edk2-basetools | cfd05c928492b7ffd1329634cfcb089db995eeca | [
"BSD-2-Clause-Patent"
] | null | null | null | ## @file
# This file is used to define the FMMT dependent external tool management class.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import glob
import logging
import os
import shutil
import sys
import tempfile
import uuid
from edk2basetools.FMMT.PI.Common import *
from edk2basetools.FMMT.utils.FmmtLogger import FmmtLogger as logger
import subprocess
guidtools = GUIDTools()
| 38.254902 | 160 | 0.571843 |
18444ea5a0cd3e04e2706a71502de539bb9fa0dc | 1,709 | py | Python | python/tests/test_tree_intersection.py | Yonatan1P/data-structures-and-algorithms | ddd647d52a3182ca01032bfdb72f94ea22a0e76b | [
"MIT"
] | 1 | 2020-12-16T22:38:12.000Z | 2020-12-16T22:38:12.000Z | python/tests/test_tree_intersection.py | Yonatan1P/data-structures-and-algorithms | ddd647d52a3182ca01032bfdb72f94ea22a0e76b | [
"MIT"
] | 1 | 2020-11-14T05:37:48.000Z | 2020-11-14T05:37:48.000Z | python/tests/test_tree_intersection.py | Yonatan1P/data-structures-and-algorithms | ddd647d52a3182ca01032bfdb72f94ea22a0e76b | [
"MIT"
] | null | null | null | from challenges.tree_intersection.tree_intersection import find_intersection
from challenges.tree.tree import BinarySearchTree
| 21.632911 | 76 | 0.627853 |
1847f0a48843e1e83cb2f45be72c476d66e2ca39 | 562 | py | Python | setup.py | rif/imgdup | fe59c6b4b8c06699d48f887bc7a90acea48aa8f2 | [
"MIT"
] | 14 | 2016-02-10T04:53:42.000Z | 2021-08-08T17:39:55.000Z | setup.py | rif/imgdup | fe59c6b4b8c06699d48f887bc7a90acea48aa8f2 | [
"MIT"
] | null | null | null | setup.py | rif/imgdup | fe59c6b4b8c06699d48f887bc7a90acea48aa8f2 | [
"MIT"
] | 2 | 2017-11-01T14:02:46.000Z | 2019-02-20T10:55:52.000Z | from setuptools import setup, find_packages
setup(
name = "imgdup",
version = "1.3",
packages = find_packages(),
scripts = ['imgdup.py'],
install_requires = ['pillow>=2.8.1'],
# metadata for upload to PyPI
author = "Radu Ioan Fericean",
author_email = "radu@fericean.ro",
description = "Visual similarity image finder and cleaner (image deduplication tool)",
license = "MIT",
keywords = "deduplication duplicate images image visual finder",
url = "https://github.com/rif/imgdup", # project home page, if any
)
| 31.222222 | 90 | 0.663701 |
184a025720245d69fec4505befed933cb56ea1a7 | 178 | py | Python | exercicio13.py | LuizHps18/infosatc-lp-avaliativo-01 | 0b891d74a98705182175a53e023b6cbbe8cc880a | [
"MIT"
] | null | null | null | exercicio13.py | LuizHps18/infosatc-lp-avaliativo-01 | 0b891d74a98705182175a53e023b6cbbe8cc880a | [
"MIT"
] | null | null | null | exercicio13.py | LuizHps18/infosatc-lp-avaliativo-01 | 0b891d74a98705182175a53e023b6cbbe8cc880a | [
"MIT"
] | null | null | null | k = float(input("Digite uma distncia em quilometros: "))
m = k / 1.61
print("A distncia digitada de {} quilometros, essa distncia convertida {:.2f} milhas" .format(k,m)) | 35.6 | 105 | 0.696629 |
184a7377a4969ebcc47ccb33cd2b9fb82e77a11d | 660 | py | Python | rcs/wiki/urls.py | ShuffleBox/django-rcsfield | dd8b5b22635bcdae9825e00b65887bb51171e76f | [
"BSD-3-Clause"
] | null | null | null | rcs/wiki/urls.py | ShuffleBox/django-rcsfield | dd8b5b22635bcdae9825e00b65887bb51171e76f | [
"BSD-3-Clause"
] | null | null | null | rcs/wiki/urls.py | ShuffleBox/django-rcsfield | dd8b5b22635bcdae9825e00b65887bb51171e76f | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls.defaults import *
urlpatterns = patterns('rcs.wiki.views',
url(r'^((?:[A-Z]+[a-z]+){2,})/$', 'page', {}, name="wiki_page"),
url(r'^((?:[A-Z]+[a-z]+){2,})/edit/$', 'edit', {}, name="wiki_edit"),
url(r'^((?:[A-Z]+[a-z]+){2,})/attachments/$', 'attachments', {}, name="wiki_attachments"),
url(r'^((?:[A-Z]+[a-z]+){2,})/rev/([a-f0-9]+)/$', 'revision', {}, name="wiki_revision"),
url(r'^((?:[A-Z]+[a-z]+){2,})/diff/([\w]+)/([\w]+)/$', 'diff', {}, name="wiki_diff"),
url(r'^list/$', 'list', {}, name="wiki_list"),
url(r'^recent/$', 'recent', {}, name="wiki_recent"),
url(r'^$', 'index', {}, name="wiki_index"),
) | 55 | 94 | 0.487879 |
184a8a8a53eaf08a2a13054389bb04e1b3d15e28 | 3,359 | py | Python | sample 1/main.py | RezaFirouzii/multi-choice_correction_opencv | 31c777d6714216e0811947a1ceadc893c2c1d7c0 | [
"MIT"
] | 1 | 2022-03-04T15:55:20.000Z | 2022-03-04T15:55:20.000Z | sample 1/main.py | RezaFirouzii/multi-choice_correction_opencv | 31c777d6714216e0811947a1ceadc893c2c1d7c0 | [
"MIT"
] | null | null | null | sample 1/main.py | RezaFirouzii/multi-choice_correction_opencv | 31c777d6714216e0811947a1ceadc893c2c1d7c0 | [
"MIT"
] | null | null | null | import cv2 as cv
import numpy as np
import pandas as pd
import heapq
if __name__ == "__main__":
img = cv.imread('sample1_2.jpg')
cop = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 10)
kernel = cv.getStructuringElement(cv.MORPH_RECT, (4, 1))
img = cv.morphologyEx(img, cv.MORPH_CLOSE, kernel)
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
contours = list(filter(lambda x: 300 < cv.contourArea(x) < 450, contours))
contours = sort_contours(contours)
answers = []
for i, contour in enumerate(contours):
x, y, w, h = cv.boundingRect(contour)
roi = cv.cvtColor(cop[y: y+h, x: x + w], cv.COLOR_BGR2GRAY)
roi_cop = roi.copy()
roi = cv.adaptiveThreshold(roi, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 10)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (1, 3))
roi = cv.morphologyEx(roi, cv.MORPH_CLOSE, kernel)
cnts, hierarchy = cv.findContours(roi, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_NONE)
cnts = list(filter(cv.contourArea, cnts))
cnts = sort_contours_horizontally(cnts)
tests = list(map(cv.boundingRect, cnts))
coord = [(x, y)]
for j, test in enumerate(tests): # each test is a contour
coord.append(test)
x, y, w, h = test
area = w * h
filled_area = np.count_nonzero(roi[y: y+h, x: x+w])
tests[j] = filled_area / area
if is_valid_test(tests):
choice = tests.index(max(tests)) + 1
answers.append(choice)
X, Y = coord[0]
x, y, w, h = coord[choice]
pt1 = (X + x, Y + y)
pt2 = (X + x + w, Y + y + h)
cv.rectangle(cop, pt1, pt2, (0, 255, 0), 2)
else:
answers.append(-1)
for i in range(len(answers)):
print(i + 1, ":", answers[i])
data = {
"Q": [i for i in range(1, len(answers) + 1)],
"A": answers
}
data = pd.DataFrame(data)
data.to_excel('./sample1.xlsx', 'Answer Sheet 1')
cv.imwrite('output.jpg', cop)
cv.imshow('Detected Choices', cop)
cv.waitKey() | 28.709402 | 101 | 0.567133 |
184b18ea17717fde23e6a6b62fed9b2b61f17cb3 | 704 | py | Python | a-practical-introduction-to-python-programming-brian-heinold/chapter-08/exercise-07.py | elarabyelaidy19/awesome-reading | 5c01a4272ba58e4f7ea665aab14b4c0aa252ea89 | [
"MIT"
] | 31 | 2021-11-02T19:51:13.000Z | 2022-02-17T10:55:26.000Z | a-practical-introduction-to-python-programming-brian-heinold/chapter-08/exercise-07.py | MosTafaHoSamm/awesome-reading | 469408fefc049d78ed53a2b2331b5d5cecdc6c06 | [
"MIT"
] | 1 | 2022-01-18T12:27:54.000Z | 2022-01-18T12:27:54.000Z | a-practical-introduction-to-python-programming-brian-heinold/chapter-08/exercise-07.py | MosTafaHoSamm/awesome-reading | 469408fefc049d78ed53a2b2331b5d5cecdc6c06 | [
"MIT"
] | 3 | 2022-01-11T05:01:34.000Z | 2022-02-05T14:36:29.000Z | # 7. Write a program that estimates the average number of drawings it takes before the users
# numbers are picked in a lottery that consists of correctly picking six different numbers that
# are between 1 and 10. To do this, run a loop 1000 times that randomly generates a set of
# user numbers and simulates drawings until the users numbers are drawn. Find the average
# number of drawings needed over the 1000 times the loop runs.
import random
lottery_numbers = [i for i in range(1, 11)]
avg = 0
for i in range(1000):
user = random.randint(1, 10)
lott = random.choice(lottery_numbers)
if lott == user:
avg = avg + 1
print('Average number of drawings:', round(1000 / avg, 4))
| 37.052632 | 95 | 0.728693 |
184bf76e800fcea4dae223c4ac96db64613fb1ae | 709 | py | Python | humfrey/update/utils.py | ox-it/humfrey | c92e46a24a9bf28aa9638a612f166d209315e76b | [
"BSD-3-Clause"
] | 6 | 2015-01-09T15:53:07.000Z | 2020-02-13T14:00:53.000Z | humfrey/update/utils.py | ox-it/humfrey | c92e46a24a9bf28aa9638a612f166d209315e76b | [
"BSD-3-Clause"
] | null | null | null | humfrey/update/utils.py | ox-it/humfrey | c92e46a24a9bf28aa9638a612f166d209315e76b | [
"BSD-3-Clause"
] | 1 | 2017-05-12T20:46:15.000Z | 2017-05-12T20:46:15.000Z | from django.conf import settings
from django.utils.importlib import import_module
from humfrey.update.transform.base import Transform
| 29.541667 | 67 | 0.723554 |
184dce967a4de0cb71723aecd6ec63f6783befa6 | 2,448 | py | Python | flask/model/device_model.py | Dev-Jahn/cms | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | [
"RSA-MD"
] | null | null | null | flask/model/device_model.py | Dev-Jahn/cms | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | [
"RSA-MD"
] | 9 | 2021-01-05T07:48:28.000Z | 2021-05-14T06:38:27.000Z | flask/model/device_model.py | Dev-Jahn/cms | 84ea115bdb865daff83d069502f6f0dd105fc4f0 | [
"RSA-MD"
] | 4 | 2021-01-05T06:46:09.000Z | 2021-05-06T01:44:28.000Z | from .db_base import db, env
| 48 | 112 | 0.624183 |
184e8888d3aeff144a6fa7390d4e574c4fcd9c17 | 18,542 | py | Python | pytests/tuqquery/tuq_tokens.py | ramalingam-cb/testrunner | 81cea7a5a493cf0c67fca7f97c667cd3c6ad2142 | [
"Apache-2.0"
] | null | null | null | pytests/tuqquery/tuq_tokens.py | ramalingam-cb/testrunner | 81cea7a5a493cf0c67fca7f97c667cd3c6ad2142 | [
"Apache-2.0"
] | null | null | null | pytests/tuqquery/tuq_tokens.py | ramalingam-cb/testrunner | 81cea7a5a493cf0c67fca7f97c667cd3c6ad2142 | [
"Apache-2.0"
] | null | null | null | from lib.remote.remote_util import RemoteMachineShellConnection
from pytests.tuqquery.tuq import QueryTests
| 65.059649 | 367 | 0.642595 |
184fa55d99eb6ba4a36992ee508941f13328275f | 1,074 | py | Python | src/python/autotransform/input/empty.py | nathro/AutoTransform | 04ef5458bc8401121e33370ceda6ef638e535e9a | [
"MIT"
] | 11 | 2022-01-02T00:50:24.000Z | 2022-02-22T00:30:09.000Z | src/python/autotransform/input/empty.py | nathro/AutoTransform | 04ef5458bc8401121e33370ceda6ef638e535e9a | [
"MIT"
] | 6 | 2022-01-06T01:45:34.000Z | 2022-02-03T21:49:52.000Z | src/python/autotransform/input/empty.py | nathro/AutoTransform | 04ef5458bc8401121e33370ceda6ef638e535e9a | [
"MIT"
] | null | null | null | # AutoTransform
# Large scale, component based code modification library
#
# Licensed under the MIT License <http://opensource.org/licenses/MIT>
# SPDX-License-Identifier: MIT
# Copyright (c) 2022-present Nathan Rockenbach <http://github.com/nathro>
# @black_format
"""The implementation for the DirectoryInput."""
from __future__ import annotations
from typing import ClassVar, Sequence
from autotransform.input.base import Input, InputName
from autotransform.item.base import Item
| 28.263158 | 83 | 0.712291 |
1851692534eb7b89ed5ce5f0fcea30358bb3c381 | 27,790 | py | Python | snowplow_tracker/tracker.py | jackwilliamson/snowplow-python-tracker | b4ee5192bde044f406182bef848b51bd21646f12 | [
"Apache-2.0"
] | null | null | null | snowplow_tracker/tracker.py | jackwilliamson/snowplow-python-tracker | b4ee5192bde044f406182bef848b51bd21646f12 | [
"Apache-2.0"
] | 1 | 2019-01-08T17:09:11.000Z | 2019-01-08T17:09:11.000Z | snowplow_tracker/tracker.py | jackwilliamson/snowplow-python-tracker | b4ee5192bde044f406182bef848b51bd21646f12 | [
"Apache-2.0"
] | 1 | 2017-05-30T20:49:24.000Z | 2017-05-30T20:49:24.000Z | """
tracker.py
Copyright (c) 2013-2014 Snowplow Analytics Ltd. All rights reserved.
This program is licensed to you under the Apache License Version 2.0,
and you may not use this file except in compliance with the Apache License
Version 2.0. You may obtain a copy of the Apache License Version 2.0 at
http://www.apache.org/licenses/LICENSE-2.0.
Unless required by applicable law or agreed to in writing,
software distributed under the Apache License Version 2.0 is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the Apache License Version 2.0 for the specific
language governing permissions and limitations there under.
Authors: Anuj More, Alex Dean, Fred Blundun
Copyright: Copyright (c) 2013-2014 Snowplow Analytics Ltd
License: Apache License Version 2.0
"""
import time
import uuid
import six
from contracts import contract, new_contract
from snowplow_tracker import payload, _version, SelfDescribingJson
from snowplow_tracker import subject as _subject
from snowplow_tracker.timestamp import Timestamp, TrueTimestamp, DeviceTimestamp
"""
Constants & config
"""
VERSION = "py-%s" % _version.__version__
DEFAULT_ENCODE_BASE64 = True
BASE_SCHEMA_PATH = "iglu:com.snowplowanalytics.snowplow"
SCHEMA_TAG = "jsonschema"
CONTEXT_SCHEMA = "%s/contexts/%s/1-0-1" % (BASE_SCHEMA_PATH, SCHEMA_TAG)
UNSTRUCT_EVENT_SCHEMA = "%s/unstruct_event/%s/1-0-0" % (BASE_SCHEMA_PATH, SCHEMA_TAG)
FORM_NODE_NAMES = ("INPUT", "TEXTAREA", "SELECT")
FORM_TYPES = (
"button", "checkbox", "color", "date", "datetime",
"datetime-local", "email", "file", "hidden", "image", "month",
"number", "password", "radio", "range", "reset", "search",
"submit", "tel", "text", "time", "url", "week"
)
"""
Tracker class
"""
| 40.688141 | 147 | 0.556747 |
185308de027ac2681bc3f8d490477023a29fcb44 | 6,597 | py | Python | src/oic/oauth2/util.py | alanbuxey/pyoidc | 5f2d9ac468aaad599260f70481062c9d31273da2 | [
"Apache-2.0"
] | 290 | 2015-01-02T20:14:53.000Z | 2022-01-24T11:39:10.000Z | src/oic/oauth2/util.py | peppelinux/pyoidc | 2e751ed84039259a2b138148eae204c877518950 | [
"Apache-2.0"
] | 103 | 2015-02-03T13:20:59.000Z | 2017-09-19T20:01:08.000Z | src/oic/oauth2/util.py | peppelinux/pyoidc | 2e751ed84039259a2b138148eae204c877518950 | [
"Apache-2.0"
] | 128 | 2015-01-02T20:14:19.000Z | 2021-11-07T14:28:03.000Z | import logging
from http import cookiejar as http_cookiejar
from http.cookiejar import http2time # type: ignore
from typing import Any # noqa
from typing import Dict # noqa
from urllib.parse import parse_qs
from urllib.parse import urlsplit
from urllib.parse import urlunsplit
from oic.exception import UnSupported
from oic.oauth2.exception import TimeFormatError
from oic.utils.sanitize import sanitize
logger = logging.getLogger(__name__)
__author__ = "roland"
URL_ENCODED = "application/x-www-form-urlencoded"
JSON_ENCODED = "application/json"
DEFAULT_POST_CONTENT_TYPE = URL_ENCODED
PAIRS = {
"port": "port_specified",
"domain": "domain_specified",
"path": "path_specified",
}
ATTRS = {
"version": None,
"name": "",
"value": None,
"port": None,
"port_specified": False,
"domain": "",
"domain_specified": False,
"domain_initial_dot": False,
"path": "",
"path_specified": False,
"secure": False,
"expires": None,
"discard": True,
"comment": None,
"comment_url": None,
"rest": "",
"rfc2109": True,
} # type: Dict[str, Any]
def get_or_post(
uri, method, req, content_type=DEFAULT_POST_CONTENT_TYPE, accept=None, **kwargs
):
"""
Construct HTTP request.
:param uri:
:param method:
:param req:
:param content_type:
:param accept:
:param kwargs:
:return:
"""
if method in ["GET", "DELETE"]:
if req.keys():
_req = req.copy()
comp = urlsplit(str(uri))
if comp.query:
_req.update(parse_qs(comp.query))
_query = str(_req.to_urlencoded())
path = urlunsplit(
(comp.scheme, comp.netloc, comp.path, _query, comp.fragment)
)
else:
path = uri
body = None
elif method in ["POST", "PUT"]:
path = uri
if content_type == URL_ENCODED:
body = req.to_urlencoded()
elif content_type == JSON_ENCODED:
body = req.to_json()
else:
raise UnSupported("Unsupported content type: '%s'" % content_type)
header_ext = {"Content-Type": content_type}
if accept:
header_ext = {"Accept": accept}
if "headers" in kwargs.keys():
kwargs["headers"].update(header_ext)
else:
kwargs["headers"] = header_ext
else:
raise UnSupported("Unsupported HTTP method: '%s'" % method)
return path, body, kwargs
def set_cookie(cookiejar, kaka):
"""
Place a cookie (a http_cookielib.Cookie based on a set-cookie header line) in the cookie jar.
Always chose the shortest expires time.
:param cookiejar:
:param kaka: Cookie
"""
# default rfc2109=False
# max-age, httponly
for cookie_name, morsel in kaka.items():
std_attr = ATTRS.copy()
std_attr["name"] = cookie_name
_tmp = morsel.coded_value
if _tmp.startswith('"') and _tmp.endswith('"'):
std_attr["value"] = _tmp[1:-1]
else:
std_attr["value"] = _tmp
std_attr["version"] = 0
attr = ""
# copy attributes that have values
try:
for attr in morsel.keys():
if attr in ATTRS:
if morsel[attr]:
if attr == "expires":
std_attr[attr] = http2time(morsel[attr])
else:
std_attr[attr] = morsel[attr]
elif attr == "max-age":
if morsel[attr]:
std_attr["expires"] = http2time(morsel[attr])
except TimeFormatError:
# Ignore cookie
logger.info(
"Time format error on %s parameter in received cookie"
% (sanitize(attr),)
)
continue
for att, spec in PAIRS.items():
if std_attr[att]:
std_attr[spec] = True
if std_attr["domain"] and std_attr["domain"].startswith("."):
std_attr["domain_initial_dot"] = True
if morsel["max-age"] == 0:
try:
cookiejar.clear(
domain=std_attr["domain"],
path=std_attr["path"],
name=std_attr["name"],
)
except ValueError:
pass
else:
# Fix for Microsoft cookie error
if "version" in std_attr:
try:
std_attr["version"] = std_attr["version"].split(",")[0]
except (TypeError, AttributeError):
pass
new_cookie = http_cookiejar.Cookie(**std_attr) # type: ignore
cookiejar.set_cookie(new_cookie)
| 30.123288 | 97 | 0.555404 |
1853550d01976a79c3f2f5631cb3c4c7ae9f5fcf | 5,890 | py | Python | main.py | aditya02acharya/TypingAgent | 34c5230be72c3878942457a6e44b7078fbd08ea0 | [
"MIT"
] | 5 | 2020-09-07T16:40:34.000Z | 2022-01-18T15:50:57.000Z | main.py | aditya02acharya/TypingAgent | 34c5230be72c3878942457a6e44b7078fbd08ea0 | [
"MIT"
] | 1 | 2020-10-06T13:14:46.000Z | 2020-10-06T13:14:46.000Z | main.py | aditya02acharya/TypingAgent | 34c5230be72c3878942457a6e44b7078fbd08ea0 | [
"MIT"
] | null | null | null | import sys
import yaml
import numpy
import random
import logging
import argparse
from os import path, makedirs
from datetime import datetime
from src.finger_proxy.proxy_agent import ProxyAgent
from src.utilities.logging_config_manager import setup_logging
from src.display.touchscreendevice import TouchScreenDevice
from src.vision.vision_agent import VisionAgent
from src.finger.finger_agent import FingerAgent
from src.proofread.proofread_agent import ProofreadAgent
from src.supervisor.supervisor_agent import SupervisorAgent
parser = argparse.ArgumentParser()
# General parameters
parser.add_argument("--all", action="store_true", default=False,
help="train/test all the agents [vision, finger, proofread, supervisor]")
parser.add_argument("--vision", action="store_true", default=False, help="train/test only the vision agent")
parser.add_argument("--finger", action="store_true", default=False, help="train/test only the finger agent")
parser.add_argument("--proofread", action="store_true", default=False, help="train/test only the proofread agent")
parser.add_argument("--supervisor", action="store_true", default=False, help="train/test only the supervisor agent")
parser.add_argument("--train", action="store_true", default=False, help="run model in train mode")
parser.add_argument("--config", required=True, help="name of the configuration file (REQUIRED)")
parser.add_argument("--seed", type=int, default=datetime.now().microsecond, help="random seed default: current time")
parser.add_argument("--type", default=">", help="sentence to type for the agent.")
parser.add_argument("--batch", action="store_true", default=False, help="evaluate a batch of sentences.")
parser.add_argument("--users", type=int, default=1, help="number of users to simulate")
parser.add_argument("--twofinger", action="store_true", default=False, help="enable typing with two finger.")
parser.add_argument("--verbose", action="store_true", default=False, help="print tqdm step in new line.")
# get user command line arguments.
args = parser.parse_args()
# Initialise random seed.
numpy.random.seed(args.seed)
random.seed(args.seed)
# Setup Logger.
if not path.isdir("logs"):
# if logs folder doesn't exist create one.
makedirs("logs")
setup_logging(default_path=path.join("configs", "logging.yml"))
logger = logging.getLogger(__name__)
logger.info("logger is set.")
# load app config.
if path.exists(path.join("configs", args.config)):
with open(path.join("configs", args.config), 'r') as file:
config_file = yaml.load(file, Loader=yaml.FullLoader)
logger.info("App Configurations loaded.")
else:
logger.error("File doesn't exist: Failed to load %s file under configs folder." % str(args.config))
sys.exit(0)
if args.train:
if path.exists(path.join("configs", config_file['training_config'])):
with open(path.join("configs", config_file['training_config']), 'r') as file:
train_config = yaml.load(file, Loader=yaml.FullLoader)
logger.info("Training Configurations loaded.")
else:
logger.error("File doesn't exist: Failed to load %s file under configs folder." %
config_file['training_config'])
sys.exit(0)
if args.vision or args.all:
logger.info("Initiating Vision Agent Training.")
vision_agent = VisionAgent(config_file['device_config'], train_config['vision'], args.verbose)
vision_agent.train(vision_agent.episodes)
if args.finger or args.all:
logger.info("Initiating Finger Agent Training.")
finger_agent = FingerAgent(config_file['device_config'], train_config['finger'], 0, True, args.verbose)
finger_agent.train(finger_agent.episodes)
if args.proofread or args.all:
logger.info("Initiating Proofread Agent Training.")
proofread_agent = ProofreadAgent(config_file['device_config'], train_config['proofread'], args.verbose)
proofread_agent.train(proofread_agent.episodes)
if args.supervisor or args.all:
logger.info("Initiating Supervisor Agent Training.")
if args.twofinger:
supervisor_agent = SupervisorAgent(config_file['device_config'], train_config, True, True, args.verbose)
else:
supervisor_agent = SupervisorAgent(config_file['device_config'], train_config, True, False, args.verbose)
print(type(supervisor_agent.episodes))
supervisor_agent.train(supervisor_agent.episodes)
else:
if path.exists(path.join("configs", config_file['testing_config'])):
with open(path.join("configs", config_file['testing_config']), 'r') as file:
test_config = yaml.load(file, Loader=yaml.FullLoader)
logger.info("Training Configurations loaded.")
else:
logger.error("File doesn't exist: Failed to load %s file under configs folder." %
config_file['testing_config'])
sys.exit(0)
if args.vision or args.all:
logger.info("Initiating Vision Agent Evaluation.")
vision_agent = VisionAgent(config_file['device_config'], test_config['vision'])
vision_agent.evaluate(args.type)
if args.finger or args.all:
logger.info("Initiating Finger Agent Evaluation.")
finger_agent = FingerAgent(config_file['device_config'], test_config['finger'], 0, False)
finger_agent.evaluate(args.type, sat_desired=test_config['finger']['typing_accuracy'])
if args.supervisor or args.all:
logger.info("Initiating Supervisor Agent Evaluation.")
if args.twofinger:
supervisor_agent = SupervisorAgent(config_file['device_config'], test_config, False, True, args.verbose)
else:
supervisor_agent = SupervisorAgent(config_file['device_config'], test_config, False, False, args.verbose)
supervisor_agent.evaluate(args.type, args.batch, args.users)
| 47.5 | 117 | 0.720204 |
185491bbcdadc1f460e3cbb3e31ce90f8c3eb65e | 1,854 | py | Python | examples/chain.py | yeeliu01/pyrfa | 536c94f1bcff232415495cbe04b8897ad91e0c76 | [
"MIT"
] | 33 | 2016-11-29T08:18:28.000Z | 2021-11-11T15:40:19.000Z | examples/chain.py | yeeliu01/pyrfa | 536c94f1bcff232415495cbe04b8897ad91e0c76 | [
"MIT"
] | 41 | 2016-09-20T10:15:11.000Z | 2021-10-20T01:14:22.000Z | examples/chain.py | devcartel/thomsonreuters | 536c94f1bcff232415495cbe04b8897ad91e0c76 | [
"MIT"
] | 9 | 2016-10-19T00:09:22.000Z | 2020-08-03T03:02:15.000Z | #!/usr/bin/python
#
# Decoding a legacy chain ric
#
import pyrfa
p = pyrfa.Pyrfa()
p.createConfigDb("./pyrfa.cfg")
p.acquireSession("Session1")
p.createOMMConsumer()
p.login()
p.directoryRequest()
p.dictionaryRequest()
p.setInteractionType("snapshot")
fids = ['LINK_1', 'LINK_2', 'LINK_3', 'LINK_4', 'LINK_5', 'LINK_6', 'LINK_7', 'LINK_8',
'LINK_9', 'LINK_10', 'LINK_11', 'LINK_12', 'LINK_13', 'LINK_14',
'LONGLINK1', 'LONGLINK2', 'LONGLINK3', 'LONGLINK4', 'LONGLINK5', 'LONGLINK6', 'LONGLINK7',
'LONGLINK8', 'LONGLINK9', 'LONGLINK10', 'LONGLINK11', 'LONGLINK12', 'LONGLINK13', 'LONGLINK14',
'BR_LINK1', 'BR_LINK2', 'BR_LINK3', 'BR_LINK4', 'BR_LINK5', 'BR_LINK6', 'BR_LINK7', 'BR_LINK8',
'BR_LINK9', 'BR_LINK10', 'BR_LINK11', 'BR_LINK12', 'BR_LINK13', 'BR_LINK14']
rics = expandChainRIC("0#.FTSE")
print(rics)
| 34.981132 | 103 | 0.635922 |
185637d8cc3eb01cc46a55e5e9f5b84f8e7f9e79 | 1,746 | py | Python | hard-gists/749857/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 21 | 2019-07-08T08:26:45.000Z | 2022-01-24T23:53:25.000Z | hard-gists/749857/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 5 | 2019-06-15T14:47:47.000Z | 2022-02-26T05:02:56.000Z | hard-gists/749857/snippet.py | jjhenkel/dockerizeme | eaa4fe5366f6b9adf74399eab01c712cacaeb279 | [
"Apache-2.0"
] | 17 | 2019-05-16T03:50:34.000Z | 2021-01-14T14:35:12.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
# launchctl unload /System/Library/LaunchDaemons/com.apple.syslogd.plist
# launchctl load /System/Library/LaunchDaemons/com.apple.syslogd.plist
from twisted.internet import reactor, stdio, defer
from twisted.internet.protocol import Protocol, Factory
from twisted.protocols.basic import LineReceiver
import time, re, math, json
#<22>Nov 1 00:12:04 gleicon-vm1 postfix/smtpd[4880]: connect from localhost[127.0.0.1]
severity = ['emerg', 'alert', 'crit', 'err', 'warn', 'notice', 'info', 'debug', ]
facility = ['kern', 'user', 'mail', 'daemon', 'auth', 'syslog', 'lpr', 'news',
'uucp', 'cron', 'authpriv', 'ftp', 'ntp', 'audit', 'alert', 'at', 'local0',
'local1', 'local2', 'local3', 'local4', 'local5', 'local6', 'local7',]
fs_match = re.compile("<(.+)>(.*)", re.I)
def main():
factory = SyslogdFactory()
reactor.listenTCP(25000, factory, 10)
reactor.run()
if __name__ == '__main__':
main()
| 31.178571 | 87 | 0.605956 |
185646f6d47cb9be2bd7e09abafec85a18497f07 | 11,371 | py | Python | research/Issue2/utils.py | johnklee/ff_crawler | 53b056bd94ccf55388d12c7f70460d280964f45f | [
"MIT"
] | null | null | null | research/Issue2/utils.py | johnklee/ff_crawler | 53b056bd94ccf55388d12c7f70460d280964f45f | [
"MIT"
] | 4 | 2021-04-09T02:05:42.000Z | 2021-07-04T07:42:15.000Z | research/Issue2/utils.py | johnklee/ff_crawler | 53b056bd94ccf55388d12c7f70460d280964f45f | [
"MIT"
] | null | null | null | import requests as reqlib
import os
import re
import random
import time
import pickle
import abc
import hashlib
import threading
from urllib.parse import urlparse
from purifier import TEAgent
from purifier.logb import getLogger
from enum import IntEnum
from typing import Tuple, List, Dict, Optional
| 38.157718 | 141 | 0.540322 |
1856d318d47ce3e4786a9a38b7674ba6814094a5 | 1,554 | py | Python | Python-CPU/monitor.py | cwd0204/Python | 35413d0cfab0d659d710fd3f752dacef00f4a713 | [
"MIT"
] | 1 | 2022-01-05T05:49:59.000Z | 2022-01-05T05:49:59.000Z | Python-CPU/monitor.py | cwd0204/Python | 35413d0cfab0d659d710fd3f752dacef00f4a713 | [
"MIT"
] | null | null | null | Python-CPU/monitor.py | cwd0204/Python | 35413d0cfab0d659d710fd3f752dacef00f4a713 | [
"MIT"
] | null | null | null | # CPU
# Charles
# Charles
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
import psutil as p
POINTS = 300
fig, ax = plt.subplots()
ax.set_ylim([0, 100])
ax.set_xlim([0, POINTS])
ax.set_autoscale_on(False)
ax.set_xticks([])
ax.set_yticks(range(0, 101, 10))
ax.grid(True)
#
user = [None] * POINTS
#
sys = [None] * POINTS
# CPU
idle = [None] * POINTS
l_user, = ax.plot(range(POINTS), user, label='User %')
l_sys, = ax.plot(range(POINTS), sys, label='Sys %')
l_idle, = ax.plot(range(POINTS), idle, label='Idle %')
ax.legend(loc='upper center', ncol=4, prop=font_manager.FontProperties(size=10))
bg = fig.canvas.copy_from_bbox(ax.bbox)
before = cpu_usage()
if __name__ == '__main__':
start_monitor() | 20.72 | 80 | 0.689189 |
185a8ab47b8d277c20020394a96aac3365fae3e8 | 8,128 | py | Python | leaderboards/api_views.py | bfrederix/django-improv | 23ae4b2cc3b7d38aa2a4d6872ea084247a1e34f6 | [
"Apache-2.0"
] | 1 | 2020-08-07T18:46:19.000Z | 2020-08-07T18:46:19.000Z | leaderboards/api_views.py | bfrederix/django-improv | 23ae4b2cc3b7d38aa2a4d6872ea084247a1e34f6 | [
"Apache-2.0"
] | null | null | null | leaderboards/api_views.py | bfrederix/django-improv | 23ae4b2cc3b7d38aa2a4d6872ea084247a1e34f6 | [
"Apache-2.0"
] | null | null | null | import datetime
from rest_framework import viewsets
from rest_framework.response import Response
from leaderboards import LEADERBOARD_MAX_PER_PAGE
from leaderboards.models import LeaderboardEntry, Medal, LeaderboardSpan
from leaderboards.serializers import (LeaderboardEntrySerializer, MedalSerializer,
LeaderboardSerializer, LeaderboardSpanSerializer,
LeaderboardEntrySpanSerializer)
from leaderboards import service as leaderboards_service
from users import service as users_service
from channels import service as channels_service
from shows import service as shows_service
from utilities.api import APIObject
| 42.333333 | 118 | 0.664001 |
185ab66623ac277ebae7a53438dfbee88f107a07 | 4,450 | py | Python | pyaz/sql/instance_pool/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/sql/instance_pool/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/sql/instance_pool/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | from ... pyaz_utils import _call_az
def show(name, resource_group):
'''
Get the details for an instance pool.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql instance-pool show", locals())
def list(resource_group=None):
'''
List available instance pools.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az sql instance-pool list", locals())
def update(name, resource_group, add=None, force_string=None, remove=None, set=None, tags=None):
'''
Update an instance pool.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string>
- force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON.
- remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove
- set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value>
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
'''
return _call_az("az sql instance-pool update", locals())
def delete(name, resource_group, no_wait=None, yes=None):
'''
Delete an instance pool.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
- yes -- Do not prompt for confirmation.
'''
return _call_az("az sql instance-pool delete", locals())
def create(capacity, family, location, name, resource_group, subnet, tier, license_type=None, no_wait=None, tags=None, vnet_name=None):
'''
Create an instance pool.
Required Parameters:
- capacity -- Capacity of the instance pool in vcores.
- family -- The compute generation component of the sku. Allowed value: Gen5
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- subnet -- Name or ID of the subnet that allows access to an Instance Pool. If subnet name is provided, --vnet-name must be provided.
- tier -- The edition component of the sku. Allowed value: GeneralPurpose.
Optional Parameters:
- license_type -- The license type to apply for this instance pool.
- no_wait -- Do not wait for the long-running operation to finish.
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
- vnet_name -- The virtual network name
'''
return _call_az("az sql instance-pool create", locals())
def wait(name, resource_group, created=None, custom=None, deleted=None, exists=None, interval=None, timeout=None, updated=None):
'''
Wait for an instance pool to reach a desired state.
Required Parameters:
- name -- Instance Pool Name
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- created -- wait until created with 'provisioningState' at 'Succeeded'
- custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running']
- deleted -- wait until deleted
- exists -- wait until the resource exists
- interval -- polling interval in seconds
- timeout -- maximum wait in seconds
- updated -- wait until updated with provisioningState at 'Succeeded'
'''
return _call_az("az sql instance-pool wait", locals())
| 45.408163 | 164 | 0.702472 |
185b8c2212dd3b144fbc0efeca4d07970b4b5805 | 316 | py | Python | exercicios/ex090.py | Siqueira-Vinicius/Python | bd1f7e2bcdfd5481724d32db387f51636bb4ad60 | [
"MIT"
] | null | null | null | exercicios/ex090.py | Siqueira-Vinicius/Python | bd1f7e2bcdfd5481724d32db387f51636bb4ad60 | [
"MIT"
] | null | null | null | exercicios/ex090.py | Siqueira-Vinicius/Python | bd1f7e2bcdfd5481724d32db387f51636bb4ad60 | [
"MIT"
] | null | null | null | aluno = {}
aluno['nome'] = str(input('Digite o nome do aluno: '))
aluno['media'] = float(input('Digite a mdia desse aluno: '))
if aluno['media'] >= 5:
aluno['situao'] = '\033[32mAprovado\033[m'
else:
aluno['situao'] = '\033[31mReprovado\033[m'
for k, v in aluno.items():
print(f'{k} do aluno {v}') | 35.111111 | 61 | 0.617089 |
185c355337e2e9938d29808ca0f7b31c79694a3f | 813 | py | Python | cntr_div_train_test_images.py | globalgood-ag/treecover | ecab0ac2cef622b5f72054d5a234237a34c0bd4d | [
"MIT"
] | null | null | null | cntr_div_train_test_images.py | globalgood-ag/treecover | ecab0ac2cef622b5f72054d5a234237a34c0bd4d | [
"MIT"
] | null | null | null | cntr_div_train_test_images.py | globalgood-ag/treecover | ecab0ac2cef622b5f72054d5a234237a34c0bd4d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 6 10:57:41 2019
Creates train and test splits at the IMAGE LEVEL to prep for thumbnail extraction in countr_cnn_1
@author: smcguire
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
## read dataframe of unique images with annotation info
df_unique = pd.read_pickle('./df_unique.pkl')
# create df_test from every 4th image
df_test = df_unique[df_unique.index % 4 == 0]
# create df_train_val from every image not in df_test
df_train_val = df_unique[df_unique.index % 4 != 0]
# reset indexes
df_test = df_test.reset_index(drop=True)
df_train_val = df_train_val.reset_index(drop=True)
# pickle dataframes
df_test.to_pickle('./df_test.pkl')
df_train_val.to_pickle('./df_train_val.pkl')
| 26.225806 | 98 | 0.710947 |
185c491ee371d020cd3b4bc449367e92f4f7af90 | 1,144 | py | Python | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Attributes/Propellants/Aviation_Gasoline.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Attributes/Propellants/Aviation_Gasoline.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | SUAVE/SUAVE-2.5.0/trunk/SUAVE/Attributes/Propellants/Aviation_Gasoline.py | Vinicius-Tanigawa/Undergraduate-Research-Project | e92372f07882484b127d7affe305eeec2238b8a9 | [
"MIT"
] | null | null | null | ## @ingroup Attributes-Propellants
# Aviation_Gasoline.py
#
# Created: Unk 2013, SUAVE TEAM
# Modified: Apr 2015, SUAVE TEAM
# ----------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------
from .Propellant import Propellant
# ----------------------------------------------------------------------
# Aviation_Gasoline Propellant Class
# ----------------------------------------------------------------------
## @ingroup Attributes-Propellants
| 23.346939 | 72 | 0.436189 |
185eea51530d25c06bcb22494c22d6c4640df3ce | 4,108 | py | Python | write_grok/write_grok.py | namedyangfan/Python_practice | 7f7394d82bb5afc13b039eec286b9485a775ae39 | [
"MIT"
] | null | null | null | write_grok/write_grok.py | namedyangfan/Python_practice | 7f7394d82bb5afc13b039eec286b9485a775ae39 | [
"MIT"
] | null | null | null | write_grok/write_grok.py | namedyangfan/Python_practice | 7f7394d82bb5afc13b039eec286b9485a775ae39 | [
"MIT"
] | null | null | null | import os, glob, shutil | 45.644444 | 96 | 0.529211 |
185f0bca3ed3085aa387bfdbe9104d5218249f4a | 5,752 | py | Python | src/tfi/publish.py | ajbouh/tfi | 6e89e8c8f1ca3b285c788cc6b802fc44f9001290 | [
"MIT"
] | 160 | 2017-09-13T00:32:05.000Z | 2018-05-21T18:17:32.000Z | src/tfi/publish.py | tesserai/tfi | 6e89e8c8f1ca3b285c788cc6b802fc44f9001290 | [
"MIT"
] | 6 | 2017-09-14T17:54:21.000Z | 2018-01-27T19:31:18.000Z | src/tfi/publish.py | ajbouh/tfi | 6e89e8c8f1ca3b285c788cc6b802fc44f9001290 | [
"MIT"
] | 11 | 2017-09-13T00:37:08.000Z | 2018-03-05T08:03:34.000Z | import decimal
import hashlib
import json
import requests
import tempfile
import uuid
import os
from tqdm import tqdm
from requests_toolbelt import MultipartEncoder, MultipartEncoderMonitor
namespace = "default"
fission_url = os.environ["FISSION_URL"]
| 30.433862 | 96 | 0.577712 |