max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
ci/delete_old_binaries.py
|
NoahR02/Odin
| 2,690
|
12775751
|
import subprocess
import sys
import json
import datetime
import urllib.parse
import sys
def main():
files_by_date = {}
bucket = sys.argv[1]
days_to_keep = int(sys.argv[2])
print(f"Looking for binaries to delete older than {days_to_keep} days")
files_lines = execute_cli(f"b2 ls --long --versions {bucket} nightly").split("\n")
for x in files_lines:
parts = [y for y in x.split(' ') if y]
if parts and parts[0]:
date = datetime.datetime.strptime(parts[2], '%Y-%m-%d').replace(hour=0, minute=0, second=0, microsecond=0)
now = datetime.datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
delta = now - date
if delta.days > days_to_keep:
print(f'Deleting {parts[5]}')
execute_cli(f'b2 delete-file-version {parts[0]}')
def execute_cli(command):
sb = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
return sb.stdout.read().decode("utf-8");
if __name__ == '__main__':
sys.exit(main())
| 2.703125
| 3
|
newrelic/core/custom_event.py
|
newrelic/newrelic-python-agen
| 92
|
12775752
|
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import time
from newrelic.core.attribute import (check_name_is_string, check_name_length,
process_user_attribute, NameIsNotStringException, NameTooLongException,
MAX_NUM_USER_ATTRIBUTES)
_logger = logging.getLogger(__name__)
EVENT_TYPE_VALID_CHARS_REGEX = re.compile(r'^[a-zA-Z0-9:_ ]+$')
class NameInvalidCharactersException(Exception): pass
def check_event_type_valid_chars(name):
regex = EVENT_TYPE_VALID_CHARS_REGEX
if not regex.match(name):
raise NameInvalidCharactersException()
def process_event_type(name):
"""Perform all necessary validation on a potential event type.
If any of the validation checks fail, they will raise an exception
which we catch, so we can log a message, and return None.
Args:
name (str): The type (name) of the custom event.
Returns:
name, if name is OK.
NONE, if name isn't.
"""
FAILED_RESULT = None
try:
check_name_is_string(name)
check_name_length(name)
check_event_type_valid_chars(name)
except NameIsNotStringException:
_logger.debug('Event type must be a string. Dropping '
'event: %r', name)
return FAILED_RESULT
except NameTooLongException:
_logger.debug('Event type exceeds maximum length. Dropping '
'event: %r', name)
return FAILED_RESULT
except NameInvalidCharactersException:
_logger.debug('Event type has invalid characters. Dropping '
'event: %r', name)
return FAILED_RESULT
else:
return name
def create_custom_event(event_type, params):
"""Creates a valid custom event.
Ensures that the custom event has a valid name, and also checks
the format and number of attributes. No event is created, if the
name is invalid. An event is created, if any of the attributes are
invalid, but the invalid attributes are dropped.
Args:
event_type (str): The type (name) of the custom event.
params (dict): Attributes to add to the event.
Returns:
Custom event (list of 2 dicts), if successful.
None, if not successful.
"""
name = process_event_type(event_type)
if name is None:
return None
attributes = {}
try:
for k, v in params.items():
key, value = process_user_attribute(k, v)
if key:
if len(attributes) >= MAX_NUM_USER_ATTRIBUTES:
_logger.debug('Maximum number of attributes already '
'added to event %r. Dropping attribute: %r=%r',
name, key, value)
else:
attributes[key] = value
except Exception:
_logger.debug('Attributes failed to validate for unknown reason. '
'Check traceback for clues. Dropping event: %r.', name,
exc_info=True)
return None
intrinsics = {
'type': name,
'timestamp': int(1000.0 * time.time()),
}
event = [intrinsics, attributes]
return event
| 2.265625
| 2
|
tract_querier/tractography/__init__.py
|
gabknight/tract_querier
| 21
|
12775753
|
from .tractography import Tractography
from .trackvis import tractography_from_trackvis_file, tractography_to_trackvis_file
from warnings import warn
import numpy
__all__ = [
'Tractography',
'tractography_from_trackvis_file', 'tractography_to_trackvis_file',
'tractography_from_files',
'tractography_from_file', 'tractography_to_file',
]
try:
__all__ += [
'tractography_from_vtk_files', 'tractography_to_vtk_file',
'vtkPolyData_to_tracts', 'tracts_to_vtkPolyData'
]
from .vtkInterface import (
tractography_from_vtk_files, tractography_to_vtk_file,
vtkPolyData_to_tracts, tracts_to_vtkPolyData
)
except ImportError:
warn(
'VTK support not installed in this python distribution, '
'VTK files will not be read or written'
)
def tractography_from_files(filenames):
if isinstance(filenames, str):
filenames = [filenames]
tracts = tractography_from_file(filenames[0])
for filename in filenames[1:]:
tracts_ = tractography_from_file(filename)
tracts.append(tracts_.tracts(), tracts_.tracts_data())
return tracts
def tractography_from_file(filename):
if filename.endswith('trk'):
return tractography_from_trackvis_file(filename)
elif filename.endswith('vtk') or filename.endswith('vtp'):
if 'tractography_from_vtk_files' in __all__:
return tractography_from_vtk_files(filename)
else:
raise IOError("No VTK support installed, VTK files could not be read")
else:
raise IOError("File format not supported")
def tractography_to_file(filename, tractography, **kwargs):
if filename.endswith('trk'):
if 'affine' not in kwargs or kwargs['affine'] is None:
if (
hasattr(tractography, 'affine') and
tractography.affine is not None
):
kwargs['affine'] = tractography.affine
else:
warn('Setting affine of trk file to the identity')
kwargs['affine'] = numpy.eye(4)
if (
'image_dimensions' not in kwargs or
kwargs['image_dimensions'] is None
):
if (
hasattr(tractography, 'image_dims') and
tractography.image_dims is not None
):
kwargs['image_dimensions'] = tractography.image_dims
else:
warn('Setting image_dimensions of trk file to: 1 1 1')
kwargs['image_dimensions'] = numpy.ones(3)
return tractography_to_trackvis_file(filename, tractography, **kwargs)
elif filename.endswith('vtk') or filename.endswith('vtp'):
if 'tractography_from_vtk_files' in __all__:
return tractography_to_vtk_file(filename, tractography, **kwargs)
else:
raise IOError("No VTK support installed, VTK files could not be read")
else:
raise IOError("File format not supported")
| 2.3125
| 2
|
setup.py
|
carletes/mock-ssh-server
| 42
|
12775754
|
import os
from setuptools import find_packages, setup
def read_requirements():
ret = []
fname = os.path.join(os.path.dirname(__file__), "requirements.txt")
with open(fname, "r") as f:
for line in f:
line = line.strip()
if line and not line.startswith("#"):
ret.append(line)
return ret
def read_long_description():
with open("README.rst", "r") as f:
return f.read()
setup(
name="mock-ssh-server",
version="0.9.1",
description="Mock SSH server for testing purposes",
long_description=read_long_description(),
url="https://github.com/carletes/mock-ssh-server",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Testing",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
package_dir={
"mockssh": "mockssh",
},
packages=find_packages(),
package_data={
"mockssh": [
"sample-user-key",
"sample-user-key.pub",
"server-key",
"server-key.pub",
]
},
install_requires=read_requirements(),
zip_safe=False,
)
| 1.921875
| 2
|
pybitcoin/transactions/scripts.py
|
sea212/pybitcoin
| 220
|
12775755
|
<filename>pybitcoin/transactions/scripts.py
# -*- coding: utf-8 -*-
"""
pybitcoin
~~~~~
:copyright: (c) 2014 by Halfmoon Labs
:license: MIT, see LICENSE for more details.
"""
from .opcodes import *
from .utils import count_bytes
from ..constants import MAX_BYTES_AFTER_OP_RETURN
from ..b58check import b58check_decode, b58check_encode
from binascii import hexlify, unhexlify
from utilitybelt import is_hex
def script_to_hex(script):
""" Parse the string representation of a script and return the hex version.
Example: "OP_DUP OP_HASH160 c629...a6db OP_EQUALVERIFY OP_CHECKSIG"
"""
hex_script = ''
parts = script.split(' ')
for part in parts:
if part[0:3] == 'OP_':
try:
hex_script += '%0.2x' % eval(part)
except:
raise Exception('Invalid opcode: %s' % part)
elif isinstance(part, (int)):
hex_script += '%0.2x' % part
elif is_hex(part):
hex_script += '%0.2x' % count_bytes(part) + part
else:
raise Exception('Invalid script - only opcodes and hex characters allowed.')
return hex_script
def make_pay_to_address_script(address):
""" Takes in an address and returns the script
"""
hash160 = hexlify(b58check_decode(address))
script_string = 'OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG' % hash160
return script_to_hex(script_string)
def make_op_return_script(data, format='bin'):
""" Takes in raw ascii data to be embedded and returns a script.
"""
if format == 'hex':
assert(is_hex(data))
hex_data = data
elif format == 'bin':
hex_data = hexlify(data)
else:
raise Exception("Format must be either 'hex' or 'bin'")
num_bytes = count_bytes(hex_data)
if num_bytes > MAX_BYTES_AFTER_OP_RETURN:
raise Exception('Data is %i bytes - must not exceed 40.' % num_bytes)
script_string = 'OP_RETURN %s' % hex_data
return script_to_hex(script_string)
| 2.53125
| 3
|
Code/GraphMol/Descriptors/Wrap/test3D.py
|
docking-org/rdk
| 0
|
12775756
|
<filename>Code/GraphMol/Descriptors/Wrap/test3D.py
from rdkit import Chem
from rdkit import rdBase
from rdkit import RDConfig
import os
from rdkit.Chem import rdMolDescriptors as rdMD
from rdkit.Chem import AllChem
haveDescrs3D = hasattr(rdMD,'CalcAUTOCORR3D')
import time,unittest
def _gen3D(m,is3d,calculator):
if not is3d:
m = Chem.AddHs(m)
ps = AllChem.ETKDG()
ps.randomSeed = 0xf00d
AllChem.EmbedMolecule(m,ps)
return calculator(m)
class TestCase(unittest.TestCase):
def setUp(self):
self.dataDir = os.path.join(RDConfig.RDBaseDir,'Code','GraphMol',
'Descriptors','test_data')
self.suppl = Chem.SDMolSupplier(os.path.join(self.dataDir,'PBF_egfr.sdf'),removeHs=False)
@unittest.skipIf(not haveDescrs3D,"3d descriptors not present")
def test1AUTOCORR2D(self):
# not really a 3D descriptor, but this was added at the same time
with open(os.path.join(self.dataDir,'auto2D.out')) as refFile:
for i,m in enumerate(self.suppl):
if i>10: break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0],nm)
split.pop(0)
vs = rdMD.CalcAUTOCORR2D(m)
for rv,nv in zip(split,vs):
self.assertAlmostEqual(float(rv),nv,delta=0.05)
@unittest.skipIf(not haveDescrs3D,"3d descriptors not present")
def test2AUTOCORR3D(self):
with open(os.path.join(self.dataDir,'auto3D_dragon.out')) as refFile:
for i,m in enumerate(self.suppl):
if i>10: break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0],nm)
split.pop(0)
vs = _gen3D(m,True,rdMD.CalcAUTOCORR3D)
for rv,nv in zip(split,vs):
self.assertAlmostEqual(float(rv),nv,delta=0.05)
@unittest.skipIf(not haveDescrs3D,"3d descriptors not present")
def test3GETAWAY(self):
with open(os.path.join(self.dataDir,'GETAWAY.new.out')) as refFile:
for i,m in enumerate(self.suppl):
if i>10: break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0],nm)
split.pop(0)
vs = _gen3D(m,True,rdMD.CalcGETAWAY)
for rv,nv in zip(split,vs):
self.assertAlmostEqual(float(rv),nv,delta=0.05)
@unittest.skipIf(not haveDescrs3D,"3d descriptors not present")
def test4MORSE(self):
with open(os.path.join(self.dataDir,'MORSE.out')) as refFile:
for i,m in enumerate(self.suppl):
if i>10: break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0],nm)
split.pop(0)
vs = _gen3D(m,True,rdMD.CalcMORSE)
for rv,nv in zip(split,vs):
ref = float(rv)
self.assertTrue(ref < 1 or abs(ref - nv) / ref < 0.02)
@unittest.skipIf(not haveDescrs3D,"3d descriptors not present")
def test5RDF(self):
with open(os.path.join(self.dataDir,'RDF.out')) as refFile:
for i,m in enumerate(self.suppl):
if i>10: break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0],nm)
split.pop(0)
vs = _gen3D(m,True,rdMD.CalcRDF)
for rv,nv in zip(split,vs):
ref = float(rv)
self.assertTrue(ref < 0.5 or abs(ref - nv) / ref < 0.02)
@unittest.skipIf(not haveDescrs3D,"3d descriptors not present")
def test6WHIM(self):
with open(os.path.join(self.dataDir,'whim.new.out')) as refFile:
for i,m in enumerate(self.suppl):
if i>10: break
nm = m.GetProp('_Name')
inl = refFile.readline()
split = inl.split('\t')
self.assertEqual(split[0],nm)
split.pop(0)
vs = _gen3D(m,True,lambda x:rdMD.CalcWHIM(x,thresh=0.01))
for rv,nv in zip(split,vs):
self.assertAlmostEqual(float(rv),nv,delta=0.01)
if(__name__=='__main__'):
unittest.main()
| 1.960938
| 2
|
Bin/model_vdsr.py
|
MingSun-Tse/pytorch-vdsr
| 1
|
12775757
|
<filename>Bin/model_vdsr.py<gh_stars>1-10
import numpy as np
import os
import torch.nn as nn
import torch
# Load param from model1 to model2
# For each layer of model2, if model1 has the same layer, then copy the params.
def load_param(model1_path, model2):
dict_param1 = torch.load(model1_path) # model1_path: .pth model path
dict_param2 = dict(model2.named_parameters())
for name2 in dict_param2:
if name2 in dict_param1:
# print("tensor '%s' found in both models, so copy it from model 1 to model 2" % name2)
dict_param2[name2].data.copy_(dict_param1[name2].data)
model2.load_state_dict(dict_param2)
return model2
# Original VDSR model
class VDSR(nn.Module):
def __init__(self, model=False, fixed=False):
super(VDSR, self).__init__()
self.fixed = fixed
self.conv1 = nn.Conv2d( 1,64,3,1,1,bias=False)
self.conv2 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv3 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv4 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv5 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv6 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv7 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv8 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv9 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv10 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv11 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv12 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv13 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv14 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv15 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv16 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv17 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv18 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv19 = nn.Conv2d(64,64,3,1,1,bias=False)
self.conv20 = nn.Conv2d(64, 1,3,1,1,bias=False)
self.relu = nn.ReLU(inplace=True)
if model:
load_param(model, self)
# self.load_state_dict(torch.load(model, map_location=lambda storage, location: storage))
if fixed:
for param in self.parameters():
param.requires_grad = False
def forward(self, y):
y = self.relu(self.conv1(y))
y = self.relu(self.conv2(y))
y = self.relu(self.conv3(y))
y = self.relu(self.conv4(y))
y = self.relu(self.conv5(y))
y = self.relu(self.conv6(y))
y = self.relu(self.conv7(y))
y = self.relu(self.conv8(y))
y = self.relu(self.conv9(y))
y = self.relu(self.conv10(y))
y = self.relu(self.conv11(y))
y = self.relu(self.conv12(y))
y = self.relu(self.conv13(y))
y = self.relu(self.conv14(y))
y = self.relu(self.conv15(y))
y = self.relu(self.conv16(y))
y = self.relu(self.conv17(y))
y = self.relu(self.conv18(y))
y = self.relu(self.conv19(y))
y = self.conv20(y) # note there is no relu in the output layer
return y
def forward_stem(self, y):
y = self.relu(self.conv1(y)); out1 = y
y = self.relu(self.conv2(y))
y = self.relu(self.conv3(y)); out3 = y
y = self.relu(self.conv4(y))
y = self.relu(self.conv5(y)); out5 = y
y = self.relu(self.conv6(y))
y = self.relu(self.conv7(y)); out7 = y
y = self.relu(self.conv8(y))
y = self.relu(self.conv9(y)); out9 = y
y = self.relu(self.conv10(y))
y = self.relu(self.conv11(y)); out11 = y
y = self.relu(self.conv12(y))
y = self.relu(self.conv13(y)); out13 = y
y = self.relu(self.conv14(y))
y = self.relu(self.conv15(y)); out15 = y
y = self.relu(self.conv16(y))
y = self.relu(self.conv17(y)); out17 = y
y = self.relu(self.conv18(y))
y = self.relu(self.conv19(y)); out19 = y
y = self.conv20(y)
# return out1, out3, out5, out7, out9, \
# out11, out13, out15, out17, out19, y
return out1, out5, out9, out13, out17, y # the last element of return is the residual
def forward_dense(self, y):
y = self.relu(self.conv1(y)); out1 = y
y = self.relu(self.conv2(y)); out2 = y
y = self.relu(self.conv3(y)); out3 = y
y = self.relu(self.conv4(y)); out4 = y
y = self.relu(self.conv5(y)); out5 = y
y = self.relu(self.conv6(y)); out6 = y
y = self.relu(self.conv7(y)); out7 = y
y = self.relu(self.conv8(y)); out8 = y
y = self.relu(self.conv9(y)); out9 = y
y = self.relu(self.conv10(y)); out10 = y
y = self.relu(self.conv11(y)); out11 = y
y = self.relu(self.conv12(y)); out12 = y
y = self.relu(self.conv13(y)); out13 = y
y = self.relu(self.conv14(y)); out14 = y
y = self.relu(self.conv15(y)); out15 = y
y = self.relu(self.conv16(y)); out16 = y
y = self.relu(self.conv17(y)); out17 = y
y = self.relu(self.conv18(y)); out18 = y
y = self.relu(self.conv19(y)); out19 = y
y = self.conv20(y); out20 = y
return out1, out2, out3, out4, out5, out6, out7, out8, out9, out10, \
out11, out12, out13, out14, out15, out16, out17, out18, out19, out20
class SmallVDSR_16x(nn.Module):
def __init__(self, model=False, fixed=False):
super(SmallVDSR_16x, self).__init__()
self.fixed = fixed
self.conv1 = nn.Conv2d( 1,16,3,1,1,bias=False)
self.conv2 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv3 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv4 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv5 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv6 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv7 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv8 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv9 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv10 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv11 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv12 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv13 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv14 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv15 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv16 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv17 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv18 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv19 = nn.Conv2d(16,16,3,1,1,bias=False)
self.conv20 = nn.Conv2d(16, 1,3,1,1,bias=False)
self.prelu = nn.PReLU()
self.relu = nn.ReLU()
self.conv1_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv3_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv5_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv7_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv9_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv11_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv13_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv15_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv17_aux = nn.Conv2d(16,64,1,1,0,bias=False)
self.conv19_aux = nn.Conv2d(16,64,1,1,0,bias=False)
if model:
load_param(model, self)
if fixed:
for param in self.parameters():
param.requires_grad = False
def forward_aux(self, y):
y = self.relu(self.conv1(y)); out1_aux = self.prelu(self.conv1_aux(y))
y = self.relu(self.conv2(y)); out2_aux = self.prelu(self.conv2_aux(y))
y = self.relu(self.conv3(y)); out3_aux = self.prelu(self.conv3_aux(y))
y = self.relu(self.conv4(y)); out4_aux = self.prelu(self.conv4_aux(y))
y = self.relu(self.conv5(y)); out5_aux = self.prelu(self.conv5_aux(y))
y = self.relu(self.conv6(y)); out5_aux = self.prelu(self.conv5_aux(y))
y = self.relu(self.conv7(y)); out7_aux = self.prelu(self.conv7_aux(y))
y = self.relu(self.conv8(y)); out5_aux = self.prelu(self.conv5_aux(y))
y = self.relu(self.conv9(y)); out9_aux = self.prelu(self.conv9_aux(y))
y = self.relu(self.conv10(y)); out10_aux = self.prelu(self.conv10_aux(y))
y = self.relu(self.conv11(y)); out11_aux = self.prelu(self.conv11_aux(y))
y = self.relu(self.conv12(y)); out12_aux = self.prelu(self.conv12_aux(y))
y = self.relu(self.conv13(y)); out13_aux = self.prelu(self.conv13_aux(y))
y = self.relu(self.conv14(y)); out14_aux = self.prelu(self.conv14_aux(y))
y = self.relu(self.conv15(y)); out15_aux = self.prelu(self.conv15_aux(y))
y = self.relu(self.conv16(y)); out16_aux = self.prelu(self.conv16_aux(y))
y = self.relu(self.conv17(y)); out17_aux = self.prelu(self.conv17_aux(y))
y = self.relu(self.conv18(y)); out18_aux = self.prelu(self.conv18_aux(y))
y = self.relu(self.conv19(y)); out19_aux = self.prelu(self.conv19_aux(y))
y = self.conv20(y)
# return out1_aux, out3_aux, out5_aux, out7_aux, out9_aux, \
# out11_aux, out13_aux, out15_aux, out17_aux, out19_aux, y
return out1_aux, out5_aux, out9_aux, out13_aux, out17_aux, y # the last element of return is the residual
def forward_dense(self, y):
y = self.relu(self.conv1(y)); out1 = y
y = self.relu(self.conv2(y)); out2 = y
y = self.relu(self.conv3(y)); out3 = y
y = self.relu(self.conv4(y)); out4 = y
y = self.relu(self.conv5(y)); out5 = y
y = self.relu(self.conv6(y)); out6 = y
y = self.relu(self.conv7(y)); out7 = y
y = self.relu(self.conv8(y)); out8 = y
y = self.relu(self.conv9(y)); out9 = y
y = self.relu(self.conv10(y)); out10 = y
y = self.relu(self.conv11(y)); out11 = y
y = self.relu(self.conv12(y)); out12 = y
y = self.relu(self.conv13(y)); out13 = y
y = self.relu(self.conv14(y)); out14 = y
y = self.relu(self.conv15(y)); out15 = y
y = self.relu(self.conv16(y)); out16 = y
y = self.relu(self.conv17(y)); out17 = y
y = self.relu(self.conv18(y)); out18 = y
y = self.relu(self.conv19(y)); out19 = y
y = self.conv20(y); out20 = y
return out1, out2, out3, out4, out5, out6, out7, out8, out9, out10, \
out11, out12, out13, out14, out15, out16, out17, out18, out19, out20
def forward(self, y):
y = self.relu(self.conv1(y))
y = self.relu(self.conv2(y))
y = self.relu(self.conv3(y))
y = self.relu(self.conv4(y))
y = self.relu(self.conv5(y))
y = self.relu(self.conv6(y))
y = self.relu(self.conv7(y))
y = self.relu(self.conv8(y))
y = self.relu(self.conv9(y))
y = self.relu(self.conv10(y))
y = self.relu(self.conv11(y))
y = self.relu(self.conv12(y))
y = self.relu(self.conv13(y))
y = self.relu(self.conv14(y))
y = self.relu(self.conv15(y))
y = self.relu(self.conv16(y))
y = self.relu(self.conv17(y))
y = self.relu(self.conv18(y))
y = self.relu(self.conv19(y))
y = self.conv20(y)
return y
class KTSmallVDSR_16x(nn.Module):
def __init__(self, e1, e2):
super(KTSmallVDSR_16x, self).__init__()
self.e1 = VDSR(e1, fixed=True)
self.e2 = SmallVDSR_16x(e2)
def forward(self, LR):
feats_1 = self.e1.forward_stem(LR); predictedHR_1 = torch.add(feats_1[-1], LR)
feats_2 = self.e1.forward_stem(predictedHR_1); predictedHR_2 = torch.add(feats_2[-1], predictedHR_1)
feats_3 = self.e1.forward_stem(predictedHR_2); predictedHR_3 = torch.add(feats_3[-1], predictedHR_2)
feats2_1 = self.e2.forward_aux(LR); predictedHR2_1 = torch.add(feats2_1[-1], LR)
feats2_2 = self.e2.forward_aux(predictedHR2_1); predictedHR2_2 = torch.add(feats2_2[-1], predictedHR2_1)
feats2_3 = self.e2.forward_aux(predictedHR2_2); predictedHR2_3 = torch.add(feats2_3[-1], predictedHR2_2)
return feats_1, feats2_1, predictedHR_1, predictedHR2_1, \
feats_2, feats2_2, predictedHR_2, predictedHR2_2, \
feats_3, feats2_3, predictedHR_3, predictedHR2_3
Autoencoders = {
"16x": KTSmallVDSR_16x,
}
| 2.28125
| 2
|
pinax/projects/temp_group_project/apps/temp_tribes/admin.py
|
skabber/pinax
| 2
|
12775758
|
from django.contrib import admin
from temp_tribes.models import Tribe
class TribeAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'creator', 'created', 'deleted')
admin.site.register(Tribe, TribeAdmin)
| 1.773438
| 2
|
stone/common/errors.py
|
Coderhypo/booklib
| 0
|
12775759
|
class BaseError(Exception):
error_id = ""
error_msg = ""
def __repr__(self):
return "<{err_id}>: {err_msg}".format(
err_id=self.error_id,
err_msg=self.error_msg,
)
def render(self):
return dict(
error_id=self.error_id,
error_msg=self.error_msg,
)
class ClientError(BaseError):
error_id = "Third_Party_Dependent_Error"
def __init__(self, error_msg):
self.error_msg = error_msg
class BookNotFound(BaseError):
error_id = "Book_Not_Found"
def __init__(self, error_msg):
self.error_msg = error_msg
class UserNotFound(BaseError):
error_id = "User_Not_Found"
def __init__(self, error_msg):
self.error_msg = error_msg
class RecommendedNotFound(BaseError):
error_id = "Recommended_Not_Found"
def __init__(self, error_msg):
self.error_msg = error_msg
| 2.75
| 3
|
src/euler_python_package/euler_python/medium/p374.py
|
wilsonify/euler
| 0
|
12775760
|
<reponame>wilsonify/euler
def problem374():
pass
| 0.84375
| 1
|
links/views.py
|
RuijiaX/w3hacks
| 1
|
12775761
|
from django.shortcuts import render
from django.http import HttpResponseRedirect, HttpResponse
from app.models import ResourceLink
def index(request):
links = ResourceLink.objects.all()
return render(request, "links/index.html", context={
"links": links
})
def link(request, url_extension):
# Link exists
if ResourceLink.objects.filter(url_extension=url_extension).exists():
resource_link = ResourceLink.objects.get(url_extension=url_extension)
return HttpResponseRedirect(resource_link.link)
# Link doesn't exist
else:
return HttpResponse("That link doesn't exist.")
| 2.15625
| 2
|
app/main.py
|
lauralex/DSBD_csv_gen
| 0
|
12775762
|
<gh_stars>0
from fastapi import FastAPI
from app.kafka import consumers, producers
from app.utils.advanced_scheduler import init_scheduler
app = FastAPI()
@app.on_event("startup")
def run_consumers_producers():
init_scheduler()
consumers.init_consumers()
producers.init_producers()
@app.on_event("shutdown")
def close_consumers():
consumers.close_consumers()
producers.close_producers()
| 2.171875
| 2
|
test/test_model.py
|
vkazei/deepwave
| 73
|
12775763
|
<reponame>vkazei/deepwave<filename>test/test_model.py<gh_stars>10-100
import torch
import pytest
import deepwave.base.model
def test_init_scalar():
"""Init model with scalars"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
dx = 5.0
model = deepwave.base.model.Model(properties, dx, pad_width=1, origin=2.0)
assert model.properties == properties
assert model.device == properties['a'].device
assert model.ndim == 2
assert (model.shape == torch.Tensor([3, 4, 1]).long()).all()
assert (model.dx == dx * torch.ones(2)).all()
assert (model.pad_width == torch.Tensor([1, 1, 1, 1, 0, 0]).long()).all()
assert (model.origin == torch.Tensor([2.0, 2.0])).all()
assert model.interior == [slice(1, 2), slice(1, 3)]
def test_init_list():
"""Init model with lists"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
dx = [5.0, 5.0]
pad_width = [1, 1, 1, 1, 0, 0]
origin = [2.0, 2.0]
model = deepwave.base.model.Model(properties, dx, pad_width=pad_width,
origin=origin)
assert model.properties == properties
assert model.device == properties['a'].device
assert model.ndim == 2
assert (model.shape == torch.Tensor([3, 4, 1]).long()).all()
assert (model.dx == torch.Tensor(dx)).all()
assert (model.pad_width == torch.Tensor([1, 1, 1, 1, 0, 0]).long()).all()
assert (model.origin == torch.Tensor([2.0, 2.0])).all()
assert model.interior == [slice(1, 2), slice(1, 3)]
def test_not_tensor():
"""One of the properties is not a Tensor"""
properties = {'a': torch.ones(3, 4),
'b': [0, 1]}
with pytest.raises(TypeError):
deepwave.base.model.Model(properties, 5.0, pad_width=1,
origin=2.0)
def test_different_types():
"""Properties have different types"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4, dtype=torch.double)}
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, 5.0, pad_width=1,
origin=2.0)
def test_different_sizes1():
"""Properties have different sizes (same ndim)"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 5)}
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, 5.0, pad_width=1,
origin=2.0)
def test_different_sizes2():
"""Properties have different sizes (different ndim)"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4, 1)}
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, 5.0, pad_width=1,
origin=2.0)
def test_nonpositive_dx1():
"""Nonpositive dx (scalar)"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, -5.0, pad_width=1,
origin=2.0)
def test_nonpositive_dx2():
"""Nonpositive dx (list)"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
dx = [5.0, 0.0]
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, dx, pad_width=1,
origin=2.0)
def test_negative_pad1():
"""Negative pad (scalar)"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, 5.0, pad_width=-1,
origin=2.0)
def test_negative_pad2():
"""Negative pad (list)"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
pad_width = [1, 1, -1, 1, 0, 0]
with pytest.raises(RuntimeError):
deepwave.base.model.Model(properties, 5.0, pad_width=pad_width,
origin=2.0)
def test_integer_origin():
"""Origin is int instead of float"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
with pytest.raises(TypeError):
deepwave.base.model.Model(properties, 5.0, pad_width=1,
origin=2)
def test_extract():
"""Extract portion of model"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
model = deepwave.base.model.Model(properties, 5.0, pad_width=1, origin=2.0)
model_extract = model[:, 1:2]
assert (model_extract.shape == torch.Tensor([3, 3, 1]).long()).all()
assert model_extract.properties['a'].shape == torch.Size([3, 3])
assert model_extract.properties['b'].shape == torch.Size([3, 3])
assert model_extract.ndim == 2
assert (model_extract.pad_width ==
torch.Tensor([1, 1, 1, 1, 0, 0]).long()).all()
assert (model_extract.origin == torch.Tensor([2.0, 7.0])).all()
assert model_extract.interior == [slice(1, 2), slice(1, 2)]
def test_pad1():
"""Change pad_width from 1 to 2"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
model = deepwave.base.model.Model(properties, 5.0, pad_width=1, origin=2.0)
model_pad = model.pad(2)
assert (model_pad.shape == torch.Tensor([5, 6, 1]).long()).all()
assert model_pad.properties['a'].shape == torch.Size([5, 6])
assert model_pad.properties['b'].shape == torch.Size([5, 6])
assert model_pad.ndim == 2
assert (model_pad.pad_width ==
torch.Tensor([2, 2, 2, 2, 0, 0]).long()).all()
assert (model_pad.origin == torch.Tensor([2.0, 2.0])).all()
assert model_pad.interior == [slice(2, 3), slice(2, 4)]
def test_pad2():
"""Add two pad_widths"""
properties = {'a': torch.ones(3, 4),
'b': torch.zeros(3, 4)}
model = deepwave.base.model.Model(properties, 5.0, pad_width=1, origin=2.0)
model_pad = model.pad(1, 1)
assert (model_pad.shape == torch.Tensor([5, 6, 1]).long()).all()
assert model_pad.properties['a'].shape == torch.Size([5, 6])
assert model_pad.properties['b'].shape == torch.Size([5, 6])
assert model_pad.ndim == 2
assert (model_pad.pad_width ==
torch.Tensor([2, 2, 2, 2, 0, 0]).long()).all()
assert (model_pad.origin == torch.Tensor([2.0, 2.0])).all()
assert model_pad.interior == [slice(2, 3), slice(2, 4)]
def test_pad3():
"""Verify that padded model has correct values"""
properties = {'a': torch.arange(6).float().reshape(2, 3)}
model = deepwave.base.model.Model(properties, 5.0)
model_pad = model.pad([1,0,0,0,0,0])
assert (model_pad.properties['a'] == torch.tensor([[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[3.0, 4.0, 5.0]])).all()
| 2.359375
| 2
|
fitseq/fitseq.py
|
darachm/PyFitSeq
| 1
|
12775764
|
<reponame>darachm/PyFitSeq
#!/usr/bin/env python3
import numpy as np
import pandas as pd
import math
import argparse
import itertools
import sys
from scipy.stats import linregress
from scipy.optimize import minimize
from scipy.optimize import Bounds
from tqdm import tqdm
from scipy.misc import derivative
from multiprocessing import Pool
import itertools
x0_global = None
read_num_measure_global = None
kappa_global = None
read_depth_seq_global = None
t_seq_global = None
seq_num_global = None
sum_term_global = None
fitness_type_global = None
def estimate_parameters(x,processes,total_reads,max_chunk_size):
"""Estimate parameters?
This copied over from the old old old PyFitSeq - dunno if still relevant
but it's missing in this version !!!
A SUB-FUNCTION CALLED BY MAIN FUNCTION main() TO CALCULATE THE LOG
LIKELIHOOD VALUE OF EACH GENOTYPE GIVEN ITS FITNESS, THE ESTIMATED READ
NUMBER PER GENOTYPE PER SEQUENCING TIME-POINT, AND THE ESTIMATED MEAN
FITNESS PER SEQUENCING TIME-POINT
INPUTS ( NOT ANY more apparently....)
* x: fitness of each genotype, [x1, x2, ...]
* read_num_seq: read number per genotype at each sequencing time-point
* t_seq: sequenced time-points in number of generations,
[0, t1, t2, ...]
* kappa: a noise parameter that characterizes the total noise introduced
by growth, cell transfer, DNA extraction, PCR, and sequencing
(To measure kappa empirically, see the reference:
[<NAME>, et al. Quantitative Evolutionary Dynamics Using
High-resolution Lineage Tracking. Nature, 519: 181â186 (2015)].
) . (default: 2.5)
* fitness_type: type of fitness: Wrightian fitness (w), or
Malthusian fitness (m)' (default: m)
OUTPUTS
* estimate_parameters_output: log likelihood value of each genotype,
estimated reads number per genotype per sequencing time-point,
estimated mean fitness per sequencing time-point,
[x_mean(t0),x_mean(t1),...]
"""
global read_num_measure_global
global read_num_measure_original
global read_depth_seq_global
global t_seq_global
global kappa_global
global fitness_type_global
global seq_num_global
global fitness_type_global
read_num_theory = 1e-1*np.ones(read_num_measure_global.shape, dtype=float)
read_num_theory[:,0] = read_num_measure_global[:,0]
x_mean = np.zeros(seq_num_global, dtype=float)
sum_term = np.zeros(seq_num_global, dtype=float)
if fitness_type_global == 'm':
for k in range(1, seq_num_global):
freq_of_lineage = (
read_num_measure_original[:, k] /
np.sum(read_num_measure_original[:, k])
)
x_mean[k] = np.average(x, weights=freq_of_lineage)
sum_term[k] = (
(t_seq_global[k]-t_seq_global[k-1]) *
(x_mean[k]+x_mean[k-1]) / 2
)
tempt = (
read_num_measure_original[:, k-1] *
np.exp(
(t_seq_global[k]-t_seq_global[k-1]) *
x - sum_term[k]
)
)
read_num_theory[:,k] = ( tempt /
read_depth_seq_global[k-1]*read_depth_seq_global[k]
)
elif fitness_type_global == 'w':
for k in range(1, seq_num_global):
freq_of_lineage = (
read_num_measure_global[:, k] /
np.sum(read_num_measure_global[:, k])
)
x_mean[k] = np.maximum( np.average(x, weights=freq_of_lineage) , 0)
if x_mean[k] != x_mean[k-1]:
sum_term[k] = ((x_mean[k]+1)*np.log(x_mean[k]+1) - (x_mean[k-1]+1)*np.log(x_mean[k-1]+1)
- (x_mean[k]-x_mean[k-1])) * (t_seq_global[k]-t_seq_global[k-1])/(x_mean[k]-x_mean[k-1])
else:
sum_term[k] = (t_seq_global[k] - t_seq_global[k-1]) * np.log(1 + x_mean[k-1])
tempt = (
read_num_measure_global[:,k-1] *
np.exp( (t_seq_global[k]-t_seq_global[k-1]) *
np.log(1+x) - sum_term[k]
)
)
read_num_theory[:,k] = tempt/read_depth_seq_global[k-1]*read_depth_seq_global[k]
#x_mean[k] = np.maximum(np.dot(x, read_num_theory[:, k]) / np.sum(read_num_theory[:, k]),0)
if x_mean[k] != x_mean[k-1]:
sum_term[k] = ((x_mean[k]+1)*np.log(x_mean[k]+1) - (x_mean[k-1]+1)*np.log(x_mean[k-1]+1)
- (x_mean[k]-x_mean[k-1])) * (t_seq_global[k]-t_seq_global[k-1])/(x_mean[k]-x_mean[k-1])
else:
sum_term[k] = (t_seq_global[k] - t_seq_global[k-1]) * np.log(1 + x_mean[k-1])
if processes > 1:
pool_obj = Pool(processes)
other_result = pool_obj.starmap(
calculate_likelihood_of_fitness_vector,
tqdm(
[ (x0_global[i],read_num_measure_global[i,:],kappa_global,total_reads,sum_term)
for i in range(read_num_measure_global.shape[0]) ]
) ,
chunksize=np.minimum(
max_chunk_size,
int(len(x)/processes)+1
)
)
else:
other_result = list(itertools.starmap(
calculate_likelihood_of_fitness_vector,
tqdm(
[ (x0_global[i],read_num_measure_global[i,:],kappa_global,total_reads,sum_term)
for i in range(read_num_measure_global.shape[0]) ]
) ))
parameter_output = {'Likelihood_Log': other_result,
'Estimated_Read_Number': read_num_theory,
'Estimated_Mean_Fitness': x_mean,
'Sum_Term': sum_term}
return parameter_output
##################################################
def predict_counts(fitness,observations,total_reads,sum_term):
"""predict expected counts?
"""
global t_seq_global
global seq_num_global
global fitness_type_global
number_of_timepoints = len(observations)
read_num_lineage_theory = 1e-1 * np.ones(number_of_timepoints, dtype=float)
read_num_lineage_theory[0] = observations[0]
if fitness_type_global == 'm':
for k in range(1, number_of_timepoints):
tempt = (
observations[k-1] *
np.exp(
(t_seq_global[k]-t_seq_global[k-1]) *
fitness - sum_term[k]
)
)
# wait a sec, so this is predicting from the observed previous timepoint at every step????? that seems odd,maybe wrong
read_num_lineage_theory[k] = (
tempt / total_reads[k-1] *
total_reads[k]
)
elif fitness_type_global == 'w':
for k in range(1, number_of_timepoints):
tempt = observations[k-1] * np.exp((t_seq_global[k]-t_seq_global[k-1])*np.log(1+fitness)
- sum_term[k])
read_num_lineage_theory[k] = tempt/total_reads[k-1]*total_reads[k]
return read_num_lineage_theory
def calculate_likelihood_of_fitness_vector(fitness,observations,kappa,
total_reads,sum_term):
"""given a fitness value, calculate the likelihood of that
Arguments:
fitness -- fitness to calc likelihood for
observations -- the counts to calc likelihood for
kappa -- that kappa parameter for noise
"""
# generate expected counts
expected_counts = predict_counts(fitness,observations,
total_reads,sum_term)
number_of_timepoints = len(observations)
likelihood_log_seq_lineage = np.zeros(number_of_timepoints, dtype=float)
read_threshold = 20
read_threshold_2 = 10
positions_to_consider = np.where(observations[:-1] >= read_threshold)[0]
likelihood_log_seq_lineage[positions_to_consider + 1] = (
0.25 * np.log(expected_counts[positions_to_consider + 1])
- 0.5 * np.log(4 * np.pi * kappa)
- 0.75 * np.log(observations[positions_to_consider + 1])
- ( np.sqrt(observations[positions_to_consider + 1]) -
np.sqrt(expected_counts[positions_to_consider + 1])
) ** 2 / kappa
)
pos = np.where(observations[:-1] < read_threshold)[0]
pos_p1 = np.where(
observations[pos + 1] >= read_threshold_2
)[0]
pos_p2 = np.where(
observations[pos + 1] < read_threshold_2
)[0]
pos2 = pos[pos_p1]
pos3 = pos[pos_p2]
likelihood_log_seq_lineage[pos2 + 1] = (
np.multiply(
observations[pos2 + 1],
np.log(expected_counts[pos2 + 1])
) -
expected_counts[pos2 + 1] -
np.multiply(
observations[pos2 + 1],
np.log(observations[pos2 + 1])
) +
observations[pos2 + 1] -
0.5 * np.log(2 * np.pi *
observations[pos2 + 1])
)
factorial_tempt = [
float(math.factorial(i)) for i in
observations[pos3 + 1].astype(int)
]
likelihood_log_seq_lineage[pos3 + 1] = (
np.multiply(
observations[pos3 + 1],
np.log(expected_counts[pos3 + 1])
) -
expected_counts[pos3 + 1] -
np.log(factorial_tempt)
)
likelihood_log_lineage = np.sum(likelihood_log_seq_lineage)
return -likelihood_log_lineage
##################################################
def fun_x_est_lineage(i,tolerance):
global x0_global
global read_num_measure_global
global kappa_global
global read_depth_seq_global
global t_seq_global
global seq_num_global
global sum_term_global
global fitness_type_global
# x0_global is the currently worked on fitnesses
optimization_result = minimize(
fun=calculate_likelihood_of_fitness_vector,
x0=x0_global[i],
args=(
read_num_measure_global[i,:] ,
kappa_global,
read_depth_seq_global,
sum_term_global
),
method='BFGS',
options={'gtol':tolerance}
)
return optimization_result['x'][0]
##################################################
def main():
"""
"""
global x0_global
global read_num_measure_global
global read_num_measure_original
global kappa_global
global read_depth_seq_global
global t_seq_global
global seq_num_global
global sum_term_global
global fitness_type_global
parser = argparse.ArgumentParser(description='Estimate fitness of each genotype in a competitive pooled growth experiment',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--processes', type=int, default=1,
help='Number of processes to launch with multiprocessing')
parser.add_argument('--max-chunk-size', type=int, default=None,
help=('The max chunksize for parallelism, automatically set to '
'a roughly even split of lineages per chunk. Tune if you want to.')
)
parser.add_argument('-i', '--input', type=str, required=True,
help=('The path to a header-less CSV file, where each column '
'contains the count of each lineage (each row is a lineage) '
'at that sample/timepoint.')
)
parser.add_argument('--t-seq', '-t', nargs='*', required=True, type=float,
help=('The estimated "generations" of growth elapse at each sampled '
'timepoint. This is useful for scaling the fitness or using '
'unevenly spaced timepoints')
)
parser.add_argument('-o', '--output', type=str, default=sys.stdout,
help='The path (default STDOUT) from which to output the fitnesses '
'and errors and likelihoods and estimated reads. CSV format.')
parser.add_argument('--output-mean-fitness','-om', type=str,
default=None,
help='The path (default None) to which to write the mean fitnesses'
'calculated per sample.')
parser.add_argument('--min-iter', type=int, default=10,
help='Force FitSeq to run at least this many iterations in the '
'optimization')
parser.add_argument('--max-iter-num', '-m', type=int, default=100,
help=('Maximum number of iterations in the optimization '
'(of optimizing population average fitness)')
)
parser.add_argument('--minimum-step-size', '--min-step',
type=float, default=0.0001,
help=('Set a minimum fracitonal step size for improvement, if below '
'this then the optimization iterations terminate.')
)
parser.add_argument('--fitness-type', '-f', type=str, default='m',
choices = ['m', 'w'],
help=('SORRY no choice, only Malthusian fitness (m) works. '
'But in later verions, '
'maybe we\'ll re-implement Wrightian fitness (w).')
)
parser.add_argument('-k', '--kappa', type=float, default=2.5,
help=('a noise parameter that characterizes the total '
'noise introduced. For estimateion, see doi:10.1038/nature14279')
)
parser.add_argument('--gtol', type=float, default=1e-5,
help='The gradient tolerance parameter for the BFGS opitmization, '
'default (from SciPy) is 1e-5')
parser.add_argument('-g', '--regression-num', type=int, default=2,
help='number of points used in the initial '
'linear-regression-based fitness estimate')
args = parser.parse_args()
read_num_measure_global = np.array(pd.read_csv(args.input, header=None), dtype=float)
t_seq_global = np.array(args.t_seq, dtype=float)
max_iter_num = args.max_iter_num
min_iter = args.min_iter
kappa_global = args.kappa
regression_num = args.regression_num
fitness_type_global = args.fitness_type
minimum_step_size = args.minimum_step_size
lineages_num, seq_num_global = read_num_measure_global.shape
max_chunk_size = args.max_chunk_size
if max_chunk_size is None:
max_chunk_size = int(lineages_num/args.processes)+1
else:
max_chunk_size = int(np.minimum(max_chunk_size,lineages_num))
if fitness_type_global == 'w':
exit("Wrightian fitness does not yet work in this version")
print('Estimating Wrightian fitness for %d lineages...' %lineages_num,file=sys.stderr)
elif fitness_type_global == 'm':
print('Estimating Malthusian fitness for %d lineages...' %lineages_num,file=sys.stderr)
read_num_measure_original = read_num_measure_global
read_num_measure_global[read_num_measure_global < 1] = 0.1
# This is where the minimum read is set to 0.1, so that later
# log values do not error out
read_depth_seq_global = np.sum(read_num_measure_original, axis=0)
read_freq_seq = read_num_measure_global / read_depth_seq_global
if fitness_type_global == 'm':
if regression_num == 2:
x0_tempt = np.true_divide(read_freq_seq[:, 1] - read_freq_seq[:, 0], t_seq_global[1] - t_seq_global[0])
else:
x0_tempt = [regression_output.slope for i in range(lineages_num) for regression_output in
[linregress(t_seq[0:regression_num], np.log(read_freq_seq[i, 0:regression_num]))]]
x0 = x0_tempt #- np.dot(read_freq_seq[:, 0], x0_tempt) # normalization
elif fitness_type_global == 'w':
if regression_num == 2:
x0_tempt = np.power(np.true_divide(read_freq_seq[:, 1], read_freq_seq[:, 0]), 1
/ (t_seq_global[1] - t_seq_global[0])) - 1
else:
x0_tempt = np.exp([regression_output.slope for i in range(lineages_num) for regression_output in
[linregress(t_seq_global[0:regression_num], np.log(read_freq_seq[i, 0:regression_num]))]]) - 1
x0 = (1 + x0_tempt) / (1 + np.dot(read_freq_seq[:, 0], x0_tempt)) - 1 # normalization
x0_global = x0
print(r'-- Estimating initial guesses of global parameters ',file=sys.stderr)
parameter_output = estimate_parameters(x0_global,args.processes,
read_depth_seq_global,
max_chunk_size
)
x_mean_global = parameter_output['Estimated_Mean_Fitness']
sum_term_global = parameter_output['Sum_Term']
likelihood_log = parameter_output['Likelihood_Log']
likelihood_log_sum_iter = [np.sum(likelihood_log)]
for k_iter in range(max_iter_num):
if fitness_type_global == 'w':
x0_global[x0_global <= -1] = -1 + 1e-7
print(r'-- Optimizing fitness for every lineage with global parms',file=sys.stderr)
if args.processes > 1:
with Pool(args.processes) as pool_obj:
x0_global = np.array(
pool_obj.starmap(
fun_x_est_lineage,
tqdm([ (i,args.gtol)
for i in range(lineages_num) ]),
chunksize=np.minimum(
max_chunk_size,
int(len(x0_global)/args.processes)+1
)
)
)
else:
x0_global = np.array(
list(
itertools.starmap(fun_x_est_lineage,
tqdm([ (i,args.gtol)
for i in range(lineages_num) ])
)
)
)
print(r'-- Re-estimating global parms',file=sys.stderr)
parameter_output = estimate_parameters(x0_global,args.processes,
read_depth_seq_global,
max_chunk_size
)
x_mean_global = parameter_output['Estimated_Mean_Fitness']
sum_term_global = parameter_output['Sum_Term']
likelihood_log = parameter_output['Likelihood_Log']
print(r'-- Average fitnesses ', x_mean_global,file=sys.stderr)
likelihood_log_sum_iter.append(np.sum(likelihood_log))
print(r'-- log likelihood after iteration %i: %.4f'
%(k_iter+1, likelihood_log_sum_iter[-1]) ,
file=sys.stderr)
if ( k_iter >= min_iter and
(likelihood_log_sum_iter[-2] / likelihood_log_sum_iter[-1]) - 1 <= minimum_step_size
):
break
print(r'-- Calculating second derivatives around final fitness estimates',file=sys.stderr)
# estimation error
if args.processes > 1:
with Pool(args.processes) as pool_obj:
second_derivative = pool_obj.starmap(
derivative,
tqdm( [ ( calculate_likelihood_of_fitness_vector,
x0_global[i], 1e-6, 2,
( read_num_measure_global[i,:],
kappa_global,
read_depth_seq_global,
sum_term_global )
) for i in range(lineages_num) ] ) ,
chunksize=np.minimum(
max_chunk_size,
int(lineages_num/args.processes)+1
)
)
else:
second_derivative = list(itertools.starmap(
derivative,
tqdm( [ ( calculate_likelihood_of_fitness_vector,
x0_global[i], 1e-6, 2,
( read_num_measure_global[i,:],
kappa_global,
read_depth_seq_global,
sum_term_global )
) for i in range(lineages_num)
]
)
)
)
estimation_error = np.array(
[ 1/np.sqrt(i)
if type(i) is np.double and i > 0 and np.sqrt(i) is not None
else np.nan
for i in second_derivative
]
)
print(r'-- Writing outputs',file=sys.stderr)
read_num_theory = parameter_output['Estimated_Read_Number']
if fitness_type_global == 'm':
x_opt = x0_global #- np.dot(read_num_theory[:, 0], x0_global) / np.sum(read_num_theory[:, 0]) # normalization
elif fitness_type_global == 'w':
x_opt = (1 + x0_global) / (1 + np.dot(read_num_theory[:, 0], x0_global)) - 1 # normalization
fitseq_output = {'Estimated_Fitness': x_opt,
'Estimation_Error': estimation_error,
'Likelihood_Log': likelihood_log}
for k in range(seq_num_global):
fitseq_output['Estimated_Read_Number_t%d' % k] = read_num_theory[:, k].astype(float)
pd.DataFrame(fitseq_output).to_csv(args.output,index=False)
pd.DataFrame(
{'Samples':list(range(seq_num_global)),
'Estimate_Mean_Fitness':x_mean_global}
).to_csv(args.output_mean_fitness,index=False)
print('Finished!',file=sys.stderr)
if __name__ == "__main__":
main()
| 2.515625
| 3
|
data processing/deleteJudge.py
|
xiameng552180/SeqDynamics_V0
| 0
|
12775765
|
<filename>data processing/deleteJudge.py<gh_stars>0
import MongoProcessor
connection = MongoProcessor.Processor()
collection = connection.loadCollections('mega_authors')
collection2 = connection.loadCollections('sequences')
"""
collection.delete_many({"author":{"$regex": "judge"}})
collection.delete_many({"author":{"$regex": "nlgx"}})
"""
"""
collection2.delete_many({"author":{"$regex": "judge"}})
collection2.delete_many({"author":{"$regex": "nlgx"}})
"""
delete = []
with open("peoplelist.txt", "r") as file:
for line in file:
#print(line)
delete.append(line)
result = [x.strip() for x in delete[0].split(',')]
for name in result:
collection.delete_one({"author":{"$regex": name}})
collection2.delete_one({"author":{"$regex": name}})
| 2.953125
| 3
|
python-package/arboretum/core.py
|
sh1ng/arboretum
| 67
|
12775766
|
<filename>python-package/arboretum/core.py
# coding: utf-8
# pylint: disable=too-many-arguments, too-many-branches
"""Core Arboretum Library."""
from __future__ import absolute_import
import os
import ctypes
from ctypes import *
import numpy as np
import scipy.sparse
import json
from sklearn.metrics import mean_squared_error
from sklearn.metrics import roc_auc_score
class ArboretumError(Exception):
pass
def _load_lib():
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
lib_path = os.path.join(curr_path, 'libarboretum.so')
lib = ctypes.cdll.LoadLibrary(lib_path)
lib.ACreateFromDenseMatrix.restype = ctypes.c_char_p
lib.ASetY.restype = ctypes.c_char_p
lib.AInitGarden.restype = ctypes.c_char_p
lib.AGrowTree.restype = ctypes.c_char_p
lib.APredict.restype = ctypes.c_char_p
lib.AFreeDMatrix.restype = ctypes.c_char_p
lib.AFreeGarden.restype = ctypes.c_char_p
lib.AAppendLastTree.restype = ctypes.c_char_p
lib.AGetY.restype = ctypes.c_char_p
lib.ADeleteArray.restype = ctypes.c_char_p
lib.ASetLabel.restype = ctypes.c_char_p
lib.ASetWeights.restype = ctypes.c_char_p
lib.ADumpModel.restype = ctypes.c_char_p
lib.ADumpModel.argtypes = [POINTER(c_char_p), c_void_p]
lib.ALoadModel.restype = ctypes.c_char_p
return lib
_LIB = _load_lib()
def _call_and_throw_if_error(ret):
if ret is not None:
raise ArboretumError(ValueError(ret))
class DMatrix(object):
def __init__(self, data, data_category=None, y=None, labels=None, weights=None, missing=0.0):
self.labels_count = 1
self.rows = data.shape[0]
self.columns = data.shape[1]
self._init_from_npy2d(data, missing, category=data_category)
if y is not None and labels is not None:
raise ValueError(
'y and labels both are not None. Specify labels only for multi label classification')
if y is not None:
assert data.shape[0] == len(y)
self._init_y(y)
elif labels is not None:
self.labels_count = np.max(labels) + 1
assert data.shape[0] == len(labels)
self._init_labels(labels)
if weights is not None:
assert weights.shape[0] == self.rows
assert weights.size == self.rows
self._set_weight(weights)
def __del__(self):
_call_and_throw_if_error(_LIB.AFreeDMatrix(self.handle))
def _set_weight(self, weights):
data = np.array(weights.reshape(self.rows), dtype=np.float32)
_call_and_throw_if_error(_LIB.ASetWeights(self.handle,
data.ctypes.data_as(ctypes.POINTER(ctypes.c_float))))
def _init_from_npy2d(self, mat, missing, category=None):
if len(mat.shape) != 2:
raise ValueError('Input numpy.ndarray must be 2 dimensional')
if category is not None and category.dtype not in [np.uint8, np.uint16, np.uint32, np.int8, np.int16, np.int32, np.int]:
raise ValueError('Categoty''s type must be int like')
data = np.array(mat.reshape(mat.size), dtype=np.float32)
self.handle = ctypes.c_void_p()
if category is None:
data_category = None
columns = 0
else:
columns = category.shape[1]
data_category = np.array(category.reshape(
category.size), dtype=np.uint32)
_call_and_throw_if_error(_LIB.ACreateFromDenseMatrix(data.ctypes.data_as(ctypes.POINTER(ctypes.c_float)),
None if data_category is None else data_category.ctypes.data_as(
ctypes.POINTER(ctypes.c_uint)),
ctypes.c_int(
mat.shape[0]),
ctypes.c_int(
mat.shape[1]),
ctypes.c_int(
columns),
ctypes.c_float(
missing),
ctypes.byref(self.handle)))
def _init_y(self, y):
data = np.array(y.reshape(self.rows), dtype=np.float32)
_call_and_throw_if_error(_LIB.ASetY(self.handle,
data.ctypes.data_as(ctypes.POINTER(ctypes.c_float))))
def _init_labels(self, labels):
data = np.array(labels.reshape(self.rows), dtype=np.uint8)
_call_and_throw_if_error(_LIB.ASetLabel(self.handle,
data.ctypes.data_as(ctypes.POINTER(ctypes.c_ubyte))))
class Garden(object):
"""Low level object to work with arboretum
"""
def __init__(self, config, data=None):
"""Initialize arboretum
Parameters
----------
config : str
Configuration as a json.
data : DMatrix, optional
Data used for training, by default None
"""
self.config = config
self.data = data
self._init = False
if 'labels_count' in config['tree']:
self.labels_count = config['tree']['labels_count']
else:
self.labels_count = self.labels_count = 1
self.config_str = json.dumps(config)
def __del__(self):
_call_and_throw_if_error(_LIB.AFreeGarden(self.handle))
if hasattr(self, 'data'):
del self.data
def load(self, json_model_str):
"""Load model from json
Parameters
----------
json_model_str : str
Json representation
"""
json_model = json.loads(json_model_str)
self.handle = ctypes.c_void_p()
_call_and_throw_if_error(_LIB.AInitGarden(ctypes.c_char_p(self.config_str.encode('UTF-8')),
ctypes.byref(self.handle)))
self._init = True
_call_and_throw_if_error(_LIB.ALoadModel(
c_char_p(json_model_str.encode('UTF-8')), self.handle))
def grow_tree(self, grad=None):
"""Grows single tree
Parameters
----------
grad : numpy array, optional
Gradient(not supported yet), by default None
"""
if not self._init:
self.handle = ctypes.c_void_p()
_call_and_throw_if_error(_LIB.AInitGarden(ctypes.c_char_p(self.config_str.encode('UTF-8')),
ctypes.byref(self.handle)))
self._init = True
if grad:
assert len(grad) == self.data.rows
data = np.array(grad.reshape(self.data.rows), dtype=np.float32)
_call_and_throw_if_error(_LIB.AGrowTree(self.handle,
self.data.handle,
data.ctypes.data_as(ctypes.POINTER(ctypes.c_float))))
else:
_call_and_throw_if_error(_LIB.AGrowTree(self.handle,
self.data.handle,
ctypes.c_void_p(grad)))
def append_last_tree(self, data):
"""Appends last tree for ``data`` and updated prediction stored in Y.
Parameters
----------
data : DMatrix
Data to be used to propagate through the last tree.
"""
_call_and_throw_if_error(_LIB.AAppendLastTree(self.handle,
data.handle))
def get_y(self, data):
"""Return prediction Y previously computed with calling ``append_last_tree`` multiple times.
Parameters
----------
data : DMatrix
data input
Returns
-------
numpy array
y
Raises
------
RuntimeError
[description]
"""
length = int(data.rows)
preds = ctypes.POINTER(ctypes.c_float)()
_call_and_throw_if_error(_LIB.AGetY(self.handle,
data.handle,
ctypes.byref(preds)))
if not isinstance(preds, ctypes.POINTER(ctypes.c_float)):
raise RuntimeError('expected float pointer')
if self.labels_count == 1:
res = np.copy(np.ctypeslib.as_array(preds, shape=(length,)))
else:
res = np.copy(np.ctypeslib.as_array(
preds, shape=(length, self.labels_count)))
_call_and_throw_if_error(_LIB.ADeleteArray(preds))
return res
def predict(self, data, n_rounds=-1):
"""Predict
Parameters
----------
data : DMatrix
Data input
n_rounds : int, optional
[description], by default -1
Returns
-------
numpy array
prediction
Raises
------
RuntimeError
[description]
"""
length = int(data.rows)
preds = ctypes.POINTER(ctypes.c_float)()
_call_and_throw_if_error(_LIB.APredict(self.handle,
data.handle,
ctypes.byref(preds), n_rounds))
if not isinstance(preds, ctypes.POINTER(ctypes.c_float)):
raise RuntimeError('expected float pointer')
if self.labels_count == 1:
res = np.copy(np.ctypeslib.as_array(preds, shape=(length,)))
else:
res = np.copy(np.ctypeslib.as_array(
preds, shape=(length, self.labels_count)))
_call_and_throw_if_error(_LIB.ADeleteArray(preds))
return res
def dump(self):
"""Dumps the model as a json
Returns
-------
str
json
"""
json_p = c_char_p()
_call_and_throw_if_error(_LIB.ADumpModel(
ctypes.byref(json_p), self.handle))
return json_p.value.decode('utf-8')
def train(config, data, num_round):
"""Train model according to the parameters
Parameters
----------
config : str
configuration as a json
data : DMatrix
Data to be trained on.
num_round : int
Number of boosting rounds
Returns
-------
Garden
The trained model.
"""
model = Garden(config)
model.data = data
model.labels_count = data.labels_count
for _ in range(num_round):
model.grow_tree(None)
return model
def load(json_model_str):
"""load model from json
Parameters
----------
json_model_str : str
json model representation
Returns
-------
self : object
Returns self.
"""
json_model = json.loads(json_model_str)
config = json_model['configuration']
model = Garden(config)
model.load(json_model_str)
return model
class ArboretumRegression(object):
"""Scikit-learn API like implementation for regression.
"""
def __init__(self, max_depth=6, learning_rate=0.1, n_estimators=100,
verbosity=1,
gamma_absolute=0.0, gamma_relative=0.0,
min_child_weight=1.0, min_leaf_size=0, max_leaf_weight=0.0, colsample_bytree=0.8,
colsample_bylevel=1.0, l1=1.0, l2=1.0,
scale_pos_weight=1.0, initial_y=0.5, seed=0,
double_precision=False, method='hist', hist_size=255, **kwargs):
"""[summary]
Parameters
----------
max_depth : int, optional
Maximum tree depth, by default 6
learning_rate : float, optional
Learning rate, by default 0.1
n_estimators : int, optional
Number of boosted trees to fit, by default 100
verbosity : int, optional
verbosity, by default 1
gamma_absolute : float, optional
Minimum absolute gain required to make a further partition on a leaf, by default 0.0
gamma_relative : float, optional
Minimum relative(split vs constant) gain required to make a further partition on a leaf, by default 0.0, by default 0.0
min_child_weight : float, optional
Minimum sum of hessing to allow split, by default 1.0
min_leaf_size : int, optional
Minimum number of samples in a leaf, by default 0
max_leaf_weight : float, optional
Maximum weight of a leaf (values less than ``-max_leaf_weight`` and greater than ``max_leaf_weight``
will be tranceted to ``max_leaf_weight`` and ``max_leaf_weight`` respectively). Zero value is ignored, by default 0.0
colsample_bytree : float, optional
Subsample ratio of columns when constructing each tree., by default 0.8
colsample_bylevel : float, optional
Subsample ratio of columns when constructing each tree's level., by default 1.0
l1 : float, optional
L1 or alpha regularization, by default 1.0
l2 : float, optional
L2 or lambda regularization, by default 1.0
scale_pos_weight : float, optional
Scaling ratio for positive , by default 1.0
initial_y : float, optional
Initial value to start from, by default 0.5
seed : int, optional
Seed for random number generator., by default 0
double_precision : bool, optional
Use double precision to summation. Makes result run-to-run reproducible, but reduces performance a bit(~10%)., by default False
method : str, optional
Algorithm to grow trees. 'exact' or 'hist'., by default 'hist'
hist_size : int, optional
Histogram size, only used by when ``method`` is 'hist', by default 255
"""
config = {'objective': 0,
'method': 1 if method == 'hist' else 0,
'internals':
{
'double_precision': double_precision,
'compute_overlap': 2,
'use_hist_subtraction_trick': True,
'dynamic_parallelism': True,
'upload_features': True,
'hist_size': hist_size,
'seed': seed,
},
'verbose':
{
'gpu': True if verbosity > 0 else False,
'booster': True if verbosity > 0 else False,
'data': True if verbosity > 0 else False,
},
'tree':
{
'eta': learning_rate,
'max_depth': max_depth,
'gamma_absolute': gamma_absolute,
'gamma_relative': gamma_relative,
'min_child_weight': min_child_weight,
'min_leaf_size': min_leaf_size,
'colsample_bytree': colsample_bytree,
'colsample_bylevel': colsample_bylevel,
'max_leaf_weight': max_leaf_weight,
'lambda': l2,
'alpha': l1
}}
self._config = config
self.n_estimators = n_estimators
self._garden = Garden(self._config)
self.verbosity = verbosity
def fit(self, X, y=None, eval_set=None, eval_labels=None, early_stopping_rounds=5,
eval_metric=mean_squared_error):
"""Fit gradient boosting model.
Parameters
----------
X : DMatrix or numpy array
Data to fit
y : numpy array, optional
labels, by default None
eval_set : DMatrix or numpy_array, optional
Evaluation set data used for early stopping., by default None
eval_labels : numpy array, optional
Evaluation set labels, by default None
early_stopping_rounds : int, optional
Stop fitting process if there's no improvement for ``eval_set`` during
``early_stopping_rounds`` rounds., by default 5
Returns
-------
self
[description]
Raises
------
ArgumentError
[description]
ArgumentError
[description]
"""
data = None
if isinstance(X, DMatrix):
data = X
elif isinstance(X, np.ndarray) and isinstance(y, np.ndarray):
data = DMatrix(X, y=y)
else:
raise ArgumentError("Only DMatrix and numpy array are supported")
self._garden.data = data
eval_data = None
if eval_set is not None:
if isinstance(eval_set, DMatrix):
eval_data = X
elif isinstance(eval_set, np.ndarray):
eval_data = DMatrix(eval_set)
else:
raise ArgumentError(
"Only DMatrix and numpy array are supported")
self.best_round = -1
self.best_score = np.inf
for i in range(self.n_estimators):
self._garden.grow_tree()
if eval_data is not None:
self._garden.append_last_tree(eval_data)
pred = self._garden.get_y(eval_data)
score = eval_metric(eval_labels, pred)
if score < self.best_score:
print(
"improved score {0} {1}->{2}".format(i, self.best_score, score))
self.best_score = score
self.best_round = i
if early_stopping_rounds + self.best_round < i:
print("early stopping at {0} score {1}, use 'best_round' and 'best_score' to get it".format(
self.best_round, self.best_score))
break
return self
def predict(self, X, n_rounds=-1):
"""Predict with ``X``.
Parameters
----------
X : DMatrix or numpy array
Data
n_rounds : int, optional
Number of trees to use, -1 - use all, by default -1
Returns
-------
numpy array
Prediction
Raises
------
ArgumentError
[description]
"""
data = None
if isinstance(X, DMatrix):
data = X
elif isinstance(X, np.ndarray):
data = DMatrix(X)
else:
raise ArgumentError("Only DMatrix and numpy array are supported")
return self._garden.predict(data, n_rounds)
class ArboretumClassifier(object):
"""Scikit-learn API like implementation for regression.
"""
def __init__(self, max_depth=6, learning_rate=0.1, n_estimators=100,
verbosity=1,
gamma_absolute=0.0, gamma_relative=0.0,
min_child_weight=1.0, min_leaf_size=0, max_leaf_weight=0.0, colsample_bytree=0.8,
colsample_bylevel=1.0, l1=1.0, l2=1.0,
scale_pos_weight=1.0, initial_y=0.5, seed=0,
double_precision=False, method='hist', hist_size=255, **kwargs):
"""[summary]
Parameters
----------
max_depth : int, optional
Maximum tree depth, by default 6
learning_rate : float, optional
Learning rate, by default 0.1
n_estimators : int, optional
Number of boosted trees to fit, by default 100
verbosity : int, optional
verbosity, by default 1
gamma_absolute : float, optional
Minimum absolute gain required to make a further partition on a leaf, by default 0.0
gamma_relative : float, optional
Minimum relative(split vs constant) gain required to make a further partition on a leaf, by default 0.0, by default 0.0
min_child_weight : float, optional
Minimum sum of hessing to allow split, by default 1.0
min_leaf_size : int, optional
Minimum number of samples in a leaf, by default 0
max_leaf_weight : float, optional
Maximum weight of a leaf (values less than ``-max_leaf_weight`` and greater than ``max_leaf_weight``
will be tranceted to ``max_leaf_weight`` and ``max_leaf_weight`` respectively). Zero value is ignored, by default 0.0
colsample_bytree : float, optional
Subsample ratio of columns when constructing each tree., by default 0.8
colsample_bylevel : float, optional
Subsample ratio of columns when constructing each tree's level., by default 1.0
l1 : float, optional
L1 or alpha regularization, by default 1.0
l2 : float, optional
L2 or lambda regularization, by default 1.0
scale_pos_weight : float, optional
Scaling ratio for positive , by default 1.0
initial_y : float, optional
Initial value to start from, by default 0.5
seed : int, optional
Seed for random number generator., by default 0
double_precision : bool, optional
Use double precision to summation. Makes result run-to-run reproducible, but reduces performance a bit(~10%)., by default False
method : str, optional
Algorithm to grow trees. 'exact' or 'hist'., by default 'hist'
hist_size : int, optional
Histogram size, only used by when ``method`` is 'hist', by default 255
"""
config = {'objective': 1,
'method': 1 if method == 'hist' else 0,
'internals':
{
'double_precision': double_precision,
'compute_overlap': 2,
'use_hist_subtraction_trick': True,
'dynamic_parallelism': True,
'upload_features': True,
'hist_size': hist_size,
'seed': seed,
},
'verbose':
{
'gpu': True if verbosity > 0 else False,
'booster': True if verbosity > 0 else False,
'data': True if verbosity > 0 else False,
},
'tree':
{
'eta': learning_rate,
'max_depth': max_depth,
'gamma_absolute': gamma_absolute,
'gamma_relative': gamma_relative,
'min_child_weight': min_child_weight,
'min_leaf_size': min_leaf_size,
'colsample_bytree': colsample_bytree,
'colsample_bylevel': colsample_bylevel,
'max_leaf_weight': max_leaf_weight,
'lambda': l2,
'alpha': l1
}}
self._config = config
self.n_estimators = n_estimators
self._garden = Garden(self._config)
self.verbosity = verbosity
def fit(self, X, y=None, eval_set=None, eval_labels=None, early_stopping_rounds=5, eval_metric=lambda a, b: -roc_auc_score(a, b)):
"""Fit gradient boosting model.
Parameters
----------
X : DMatrix or numpy array
Data to fit
y : numpy array, optional
labels, by default None
eval_set : DMatrix or numpy_array, optional
Evaluation set data used for early stopping., by default None
eval_labels : numpy array, optional
Evaluation set labels, by default None
early_stopping_rounds : int, optional
Stop fitting process if there's no improvement for ``eval_set`` during
``early_stopping_rounds`` rounds., by default 5
Returns
-------
self
[description]
Raises
------
ArgumentError
[description]
ArgumentError
[description]
"""
data = None
if isinstance(X, DMatrix):
data = X
elif isinstance(X, np.ndarray) and isinstance(y, np.ndarray):
data = DMatrix(X, y=y)
else:
raise ArgumentError("Only DMatrix and numpy array are supported")
self._garden.data = data
eval_data = None
if eval_set is not None:
if isinstance(eval_set, DMatrix):
eval_data = X
elif isinstance(eval_set, np.ndarray):
eval_data = DMatrix(eval_set)
else:
raise ArgumentError(
"Only DMatrix and numpy array are supported")
self.best_round = -1
self.best_score = np.inf
for i in range(self.n_estimators):
self._garden.grow_tree()
if eval_data is not None:
from sklearn.metrics import mean_squared_error
self._garden.append_last_tree(eval_data)
pred = self._garden.get_y(eval_data)
score = eval_metric(eval_labels, pred)
if score < self.best_score:
print(
"improved score {0} {1}->{2}".format(i, self.best_score, score))
self.best_score = score
self.best_round = i
if early_stopping_rounds + self.best_round < i:
print("early stopping at {0} score {1}, use 'best_round' and 'best_score' to get it".format(
self.best_round, self.best_score))
break
return self
def predict(self, X, n_rounds=-1):
"""Predict with ``X``.
Parameters
----------
X : DMatrix or numpy array
Data
n_rounds : int, optional
Number of trees to use, -1 - use all, by default -1
Returns
-------
numpy array
Positive class probability
Raises
------
ArgumentError
[description]
"""
data = None
if isinstance(X, DMatrix):
data = X
elif isinstance(X, np.ndarray):
data = DMatrix(X)
else:
raise ArgumentError("Only DMatrix and numpy array are supported")
return self._garden.predict(data, n_rounds)
| 1.90625
| 2
|
profiles/migrations/0031_populate_mail_id.py
|
Wassaf-Shahzad/micromasters
| 32
|
12775767
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-06 20:14
from __future__ import unicode_literals
import uuid
from django.db import migrations, models
def gen_uuid(apps, schema_editor):
"""Generate unique UUID values"""
Profile = apps.get_model('profiles', 'Profile')
for profile in Profile.objects.all():
profile.mail_id = uuid.uuid4()
profile.save()
class Migration(migrations.Migration):
dependencies = [
('profiles', '0030_create_mail_id'),
]
operations = [
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
]
| 2.25
| 2
|
gesture_recognition/models.py
|
JoshBClemons/gesture_recognition
| 0
|
12775768
|
import pdb
import binascii
import os
from flask import g
from werkzeug.security import generate_password_hash, check_password_hash
from . import db
from .utils import timestamp
class User(db.Model):
"""The User model
Attributes:
__tablename__ (str): Table name for user model in database
id (SQLAlchemy table column, int): User ID
created_at (SQLAlchemy table column, int): Timestamp at which user was first created
updated_at (SQLAlchemy table column, int): Timestamp of last time user profile was updated
last_seen_at (SQLAlchemy table column, int): Timestamp of last time user was active
username (SQLAlchemy table column, str): User username
password_hash (SQLAlchemy table column, str): User password hash string
token (SQLAlchemy table column, str): User authentication token
online (SQLAlchemy table column, bool): Boolean that captures whether user is online
num_logins (SQLAlchemy table column, int): Number of user logins to page
frames (SQLAlchemy table relationship): Relationship property linking "user" model table to this one
"""
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.Integer, default=timestamp)
updated_at = db.Column(db.Integer, default=timestamp, onupdate=timestamp)
last_seen_at = db.Column(db.Integer, default=timestamp)
username = db.Column(db.String(32), nullable=False, unique=True)
password_hash = db.Column(db.String(256), nullable=False)
token = db.Column(db.String(64), nullable=True, unique=True)
online = db.Column(db.Boolean, default=False)
num_logins = db.Column(db.Integer, default=1)
frames = db.relationship('Frame', lazy='dynamic', backref='user')
@property
def password(self):
"""Returns attribute error if user password is not readable"""
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
"""Generates password hash string and authentication token from user password
Args:
password (str): User password
"""
self.password_hash = generate_password_hash(password)
self.token = None # if user is changing passwords, also revoke token
def verify_password(self, password):
"""Verify password matches stored password hash string
Args:
password (str): Inputted user password
Returns:
(bool): True if password matches password hash string
"""
return check_password_hash(self.password_hash, password)
def generate_token(self):
"""Creates a 64 character long randomly generated token
Returns:
self.token (str): Generated token
"""
self.token = binascii.hexlify(os.urandom(32)).decode('utf-8')
return self.token
def ping(self):
"""Marks the user as recently seen and online"""
self.last_seen_at = timestamp()
self.online = True
def new_login(self):
"""Increments number of times user has logged in."""
self.num_logins += 1
@staticmethod
def create(data):
"""Create a new user
Args:
data (dict): Dictionary containing user's username and password
Returns:
user (object): Newly created user
"""
user = User()
user.from_dict(data)
return user
def from_dict(self, data):
"""Import user data from a dictionary
Args:
data (dict): Dictionary containing user's username and password
"""
for field in ['username', 'password']:
try:
setattr(self, field, data[field])
except KeyError:
print(f'Key {key} not valid.')
def to_dict(self):
"""Export user to a dictionary"""
return {
'username': self.username,
'online': self.online,
}
@staticmethod
def find_offline_users():
"""Find users that haven't been active and mark them as offline
Returns:
users (list): List of offline users
"""
users = User.query.filter(User.last_seen_at < timestamp() - 60, User.online == True).all()
for user in users:
user.online = False
db.session.add(user)
db.session.commit()
return users
class Frame(db.Model):
"""The Frames model
Attributes:
__tablename__ (str): Table name for user model in database
instance (SQLAlchemy table column, str): Unique ID for processed frame
date (SQLAlchemy table column, datetime): Date that frame is processed
session_id (SQLAlchemy table column, int): User's login count
frame_count (SQLAlchemy table column, int): Frame number for user's current session
ip_address (SQLAlchemy table column, str): User's IP address
root_dir (SQLAlchemy table column, str): Root directory of user's image folder
raw_path (SQLAlchemy table column, str): Path for original image
processed_path (SQLAlchemy table column, str): Path for processed image
true_gest (SQLAlchemy table column, str): Ground-truth gesture inputted by user
pred_gest (SQLAlchemy table column, str): Predicted gesture
pred_conf (SQLAlchemy table column, float): Prediction confidence, percent
pred_time (SQLAlchemy table column, float): Prediction time, seconds
user_id (SQLAlchemy table column, int): User ID
"""
__tablename__ = 'frames'
instance = db.Column(db.String(), primary_key=True, nullable=False)
date = db.Column(db.DateTime(), nullable=False)
session_id = db.Column(db.Integer(), nullable=False)
frame_count = db.Column(db.Integer(), nullable=False)
ip_address = db.Column(db.String())
root_dir = db.Column(db.String(), nullable=False)
raw_path = db.Column(db.String(), nullable=False)
processed_path = db.Column(db.String())
true_gest = db.Column(db.String(), nullable=False)
pred_gest = db.Column(db.String(), nullable=False)
pred_conf = db.Column(db.Numeric(), nullable=False)
pred_time = db.Column(db.Numeric(), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
@staticmethod
def create(data, user=None):
"""Create a new frame. The user is obtained from the context unless provided explicitly.
Args:
data (dict): Dictionary containing values for some or all class attributes listed above
Returns:
frame (object): Newly generated frame
"""
frame = Frame(user=user or g.current_user)
frame.from_dict(data)
return frame
def from_dict(self, data):
"""Import frame data from a dictionary
Args:
data (dict): Dictionary containing values for some or all class attributes listed above
"""
for key in list(data.keys()):
try:
setattr(self, key, data[key])
except KeyError:
print(f'Key {key} not valid.')
| 2.9375
| 3
|
hmm_event_detection.py
|
Lab-Work/gpsresilience
| 21
|
12775769
|
<filename>hmm_event_detection.py
# -*- coding: utf-8 -*-
"""
Created on Tue May 5 12:31:30 2015
@author: <NAME> (<EMAIL>)
"""
from hmmlearn.hmm import MultinomialHMM
from numpy import array
from tools import *
from measureOutliers import readGlobalPace, getExpectedPace
import csv
#Read the time-series outlier scores from file. Note that this file should be generated by measureOutliers.py
#Arguments:
#filename - the name of the file where outlier scores are saved
#Returns:
#a dictionary which maps (date, hour, weekday) to the calculated mahalanobis distance
def readOutlierScores(filename):
r = csv.reader(open(filename, "r"))
r.next()
mahal_timeseries={}
c_timeseries = {}
for (date,hour,weekday,mahal5,mahal10,mahal20,mahal50,c_val,gamma,tol,pca_dim,
num_guess,hi_pcs,global_pace,expected_pace,sd_pace) in r:
hour = int(hour)
mahal_timeseries[(date,hour,weekday)] = float(mahal10)
c_timeseries[(date,hour,weekday)] = int(c_val)
return mahal_timeseries, c_timeseries
def get_event_properties(start_id, end_id, dates_list, mahal_list,
global_pace_list, expected_pace_list):
duration = end_id - start_id
pace_devs = [global_pace_list[i] - expected_pace_list[i] for i in xrange(start_id, end_id)]
min_pace_dev = min(pace_devs) / 60
max_pace_dev = max(pace_devs) / 60
max_mahal = max(mahal_list[start_id:end_id])
(date, hour, weekday) = dates_list[start_id]
start_date = datetime.strptime(date, "%Y-%m-%d") + timedelta(hours = int(hour))
(date, hour, weekday) = dates_list[end_id - 1]
end_date = datetime.strptime(date, "%Y-%m-%d") + timedelta(hours = int(hour))
return [start_date, end_date, duration, max_mahal, max_pace_dev, min_pace_dev]
def get_all_events(states, dates_list, mahal_list, global_pace_list, expected_pace_list):
currently_in_event = False
events = []
for i in xrange(len(states)):
if(not currently_in_event and states[i]==1):
event_start_id = i
currently_in_event = True
if(currently_in_event and states[i] == 0):
event_end_id = i
currently_in_event = False
event_properties = get_event_properties(event_start_id, event_end_id,
dates_list, mahal_list, global_pace_list,
expected_pace_list)
events.append(event_properties)
return events
def augment_outlier_scores(in_file, out_file, predictions):
with open(in_file, 'r') as in_f:
with open(out_file, 'w') as out_f:
r = csv.reader(in_f)
w = csv.writer(out_f)
header = r.next() + ['state']
w.writerow(header)
i = 0
for line in r:
new_line = line + [predictions[i]]
w.writerow(new_line)
i += 1
# Set up the hidden markov model. We are modeling the non-event states as "0"
# and event states as "1"
# Transition matrix with heavy weight on the diagonals ensures that the model
# is likely to stick in the same state rather than rapidly switching. In other
# words, the predictions will be relatively "smooth"
DEFAULT_TRANS_MATRIX = array([[.999, .001],
[.001,.999]])
# Emission matrix - state 0 is likely to emit symbol 0, and vice versa
# In other words, events are likely to be outliers
DEFAULT_EMISSION_MATRIX = array([[.95, .05],
[.4, .6]])
def detect_events_hmm(mahal_timeseries, c_timeseries, global_pace_timeseries,
threshold_quant=.95, trans_matrix = DEFAULT_TRANS_MATRIX,
emission_matrix=DEFAULT_EMISSION_MATRIX, initial_state=None):
#Sort the keys of the timeseries chronologically
sorted_dates = sorted(mahal_timeseries)
(expected_pace_timeseries, sd_pace_timeseries) = getExpectedPace(global_pace_timeseries)
#Generate the list of values of R(t)
mahal_list = [mahal_timeseries[d] for d in sorted_dates]
c_list = [c_timeseries[d] for d in sorted_dates]
global_pace_list = [global_pace_timeseries[d] for d in sorted_dates]
expected_pace_list = [expected_pace_timeseries[d] for d in sorted_dates]
#Use the quantile to determine the threshold
sorted_mahal = sorted(mahal_list)
threshold = getQuantile(sorted_mahal, threshold_quant)
# The symbols array contains "1" if there is an outlier, "0" if there is not
symbols = []
for i in range(len(mahal_list)):
if(mahal_list[i] > threshold or c_list[i]==1):
symbols.append(1)
else:
symbols.append(0)
# Actually set up the hmm
model = MultinomialHMM(n_components=2, transmat=trans_matrix, startprob=initial_state)
model.emissionprob_ = emission_matrix
# Make the predictions
lnl, predictions = model.decode(symbols)
events = get_all_events(predictions, sorted_dates, mahal_list, global_pace_list,
expected_pace_list)
# Sort events by duration, starting with the long events
events.sort(key = lambda x: x[2], reverse=True)
return events, predictions
def process_events(outlier_score_file, feature_dir, output_file):
mahal_timeseries, c_timeseries = readOutlierScores(outlier_score_file)
global_pace_timeseries = readGlobalPace(feature_dir)
events, predictions = detect_events_hmm(mahal_timeseries, c_timeseries, global_pace_timeseries)
new_scores_file = output_file.split(".")[0] + "_scores.csv"
augment_outlier_scores(outlier_score_file, new_scores_file, predictions)
with open(output_file, 'w') as f:
w = csv.writer(f)
w.writerow(['event', 'start_date', 'end_date', 'duration', 'max_mahal', 'max_pace_dev', 'min_pace_dev'])
for line in events:
w.writerow(['?'] + line)
def process_events_multiple_regions():
#k_vals = [7,8,9,10,15,20,25,30,35,40,45,50]
k_vals = range(7,51)
for k in k_vals:
score_file = 'results/coarse_features_imb20_k%d_RPCAtune_10000000pcs_5percmiss_robust_outlier_scores.csv' % k
#feature_dir = 'featuers_imb20_k%d' % k
feature_dir = '4year_features'
out_file = 'results/coarse_events_k%d.csv' % k
logMsg('Generating %s' % out_file)
process_events(score_file, feature_dir, out_file)
if __name__ == "__main__":
process_events_multiple_regions()
"""
process_events('results/coarse_features_imb20_k10_RPCAtune_10000000pcs_5percmiss_robust_outlier_scores.csv',
'4year_features', 'results/coarse_events.csv')
process_events('results/link_features_imb20_k10_RPCAtune_10000000pcs_5percmiss_robust_outlier_scores.csv',
'4year_features', 'results/fine_events.csv')
process_events('results/link_features_imb20_k10_PCA_10000000pcs_5percmiss_robust_outlier_scores.csv',
'4year_features', 'results/pca_fine_events.csv')
"""
| 2.78125
| 3
|
python/testData/paramInfo/InitializingDataclassHierarchy/a.py
|
Sajaki/intellij-community
| 2
|
12775770
|
from dataclasses import dataclass
@dataclass
class A1:
a: int
@dataclass
class B1(A1):
b: str
B1(<arg1>)
@dataclass(init=False)
class A2:
a: int
@dataclass
class B2(A2):
b: str
B2(<arg2>)
@dataclass
class A3:
a: int
@dataclass(init=False)
class B3(A3):
b: str
B3(<arg3>)
@dataclass(init=False)
class A4:
a: int
@dataclass(init=False)
class B4(A4):
b: str
B4(<arg4>)
| 3
| 3
|
trodesnetwork-0.0.9/trodesnetwork-0.0.9/trodesnetwork/trodes/trodes.py
|
JohnLauFoo/clc_packages_Yu
| 1
|
12775771
|
from trodesnetwork import socket
from enum import Enum, auto
__all__ = ['CurrentScaling', 'GlobalStimulationSettings', 'StimulationCommand',
'TrodesHardware', 'TrodesInfoRequester', 'TrodesAnnotationRequester',
'TrodesAcquisitionRequester', 'TrodesEventSubscriber',
'TrodesAcquisitionSubscriber', 'TrodesSourceStatusSubscriber']
class CurrentScaling(Enum):
max10nA = auto()
max20nA = auto()
max50nA = auto()
max100nA = auto()
max200nA = auto()
max500nA = auto()
max1uA = auto()
max2uA = auto()
max5uA = auto()
max10uA = auto()
class GlobalStimulationSettings:
def setVoltageScale(self, scaleValue):
self.scaleValue = scaleValue
class StimulationCommand:
def setBiphasicPulseShape(self, leadingPulseWidth_Samples,
leadingPulseAmplitude, secondPulseWidth_Samples,
secondPulseAmplitude, interPhaseDwell_Samples, pulsePeriod_Samples,
startDelay_Samples):
self.leadingPulseWidth_Samples = leadingPulseWidth_Samples
self.leadingPulseAmplitude = leadingPulseAmplitude
self.secondPulseWidth_Samples = secondPulseWidth_Samples
self.secondPulseAmplitude = secondPulseAmplitude
self.interPhaseDwell_Samples = interPhaseDwell_Samples
self.pulsePeriod_Samples = pulsePeriod_Samples
self.startDelay_Samples = startDelay_Samples
def setNumPulsesInTrain(self, numPulsesInTrain):
self.numPulsesInTrain = numPulsesInTrain
def setChannels(self, cathodeID, cathodeChannel, anodeID, anodeChannel):
self.cathodeChannel = cathodeChannel
self.anodeChannel = anodeChannel
self.cathodeNtrodeID = cathodeID
self.anodeNtrodeID = anodeID
def setGroup(self, group):
self.group = group
def setSlot(self, slot):
self.slot = slot
class TrodesHardware:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.service = socket.ServiceConsumer(
'trodes.hardware', server_address=server_address)
def settle_command_triggered(self):
data = ['tag', 'HRSettle']
return self.service.request(data)
def __startstop(self, startstop, slotgroup, number):
data = [
'tag',
'HRStartStopCommand',
{'startstop': startstop, 'slotgroup': slotgroup, 'number': number}
]
return self.service.request(data)
def sendStimulationStartSlot(self, slot):
return self.__startstop('START', 'SLOT', slot)
def sendStimulationStartGroup(self, group):
return self.__startstop('START', 'GROUP', group)
def sendStimulationStopSlot(self, slot):
return self.__startstop('STOP', 'SLOT', slot)
def sendStimulationStopGroup(self, group):
return self.__startstop('STOP', 'GROUP', group)
def sendStimulationParams(self, params):
'''
Takes StimulationCommand params
'''
data = [
'tag',
'HRSet',
{
'_group': params.group,
'slot': params.slot,
'cathodeChannel': params.cathodeChannel,
'anodeChannel': params.anodeChannel,
'cathodeNtrodeID': params.cathodeNtrodeID,
'anodeNtrodeID': params.anodeNtrodeID,
'leadingPulseWidth_Samples': params.leadingPulseWidth_Samples,
'leadingPulseAmplitude': params.leadingPulseAmplitude,
'secondPulseWidth_Samples': params.secondPulseWidth_Samples,
'secondPulseAmplitude': params.secondPulseAmplitude,
'interPhaseDwell_Samples': params.interPhaseDwell_Samples,
'pulsePeriod_Samples': params.pulsePeriod_Samples,
'startDelay_Samples': params.startDelay_Samples,
'numPulsesInTrain': params.numPulsesInTrain
}
]
return self.service.request(data)
def sendClearStimulationParams(self, slot):
'''
clear any existing commands in the given slot
'''
data = [
'tag',
'HRClear',
{ 'number': slot }
]
return self.service.request(data)
def sendGlobalStimulationSettings(self, settings):
def getScaleValue(scaleValue):
if scaleValue == CurrentScaling.max10nA:
return 'max10nA'
elif scaleValue == CurrentScaling.max20nA:
return 'max20nA'
elif scaleValue == CurrentScaling.max50nA:
return 'max50nA'
elif scaleValue == CurrentScaling.max100nA:
return 'max100nA'
elif scaleValue == CurrentScaling.max200nA:
return 'max200nA'
elif scaleValue == CurrentScaling.max500nA:
return 'max500nA'
elif scaleValue == CurrentScaling.max1uA:
return 'max1uA'
elif scaleValue == CurrentScaling.max2uA:
return 'max2uA'
elif scaleValue == CurrentScaling.max5uA:
return 'max5uA'
elif scaleValue == CurrentScaling.max10uA:
return 'max10uA'
else:
raise ValueError("unknown scaleValue enum")
data = [
'tag',
'HRSetGS',
{ 'scaleValue': getScaleValue(settings.scaleValue) }
]
return self.service.request(data)
def global_stimulation_command(self, resetSequencerCmd,
killStimulationCmd, clearDSPOffsetRemovalCmd,
enableStimulation):
data = [
'tag',
'HRSetGC',
{
'resetSequencerCmd': resetSequencerCmd,
'killStimulationCmd': killStimulationCmd,
'clearDSPOffsetRemovalCmd': clearDSPOffsetRemovalCmd,
'enableStimulation': enableStimulation,
}
]
return self.service.request(data)
def ecu_shortcut_message(self, fn):
data = [
'tag',
'HRSCTrig',
{ 'fn': fn }
]
return self.service.request(data)
class TrodesInfoRequester:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.service = socket.ServiceConsumer(
'trodes.info', server_address=server_address)
def __request(self, item):
data = { 'request': item }
return self.service.request(data)
def request_time(self):
return self.__request('time')[2]['time']
def request_timerate(self):
return self.__request('timerate')[2]['timerate']
def request_config(self):
return self.__request('config')
class TrodesAnnotationRequester:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.service = socket.ServiceConsumer(
'trodes.annotation', server_address=server_address)
def request_annotation(self, timestamp, sender, event):
data = {
'timestamp': timestamp,
'sender': sender,
'event': event
}
return self.service.request(data)
class TrodesAcquisitionRequester:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.service = socket.ServiceConsumer(
'trodes.acquisition.service', server_address=server_address)
def __request(self, command, timestamp):
data = { 'command': command, 'timestamp': timestamp }
return self.service.request(data)
def request_play(self):
return self.__request('play', 0)
def request_pause(self):
return self.__request('pause', 0)
def request_stop(self):
return self.__request('stop', 0)
def request_seek(self, timestamp):
return self.__request('seek', timestamp)
class TrodesEventSubscriber:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.subscriber = socket.SourceSubscriber(
'trodes.events', server_address=server_address)
def receive(self, *, noblock=False):
return self.subscriber.receive(noblock=noblock)
class TrodesAcquisitionSubscriber:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.subscriber = socket.SourceSubscriber(
'trodes.acquisition', server_address=server_address)
def receive(self, *, noblock=False):
return self.subscriber.receive(noblock=noblock)
class TrodesSourceStatusSubscriber:
def __init__(self, *, server_address="tcp://127.0.0.1:49152"):
self.subscriber = socket.SourceSubscriber(
'trodes.source.pub', server_address=server_address)
def receive(self, *, noblock=False):
return self.subscriber.receive(noblock=noblock)
| 2.359375
| 2
|
working/roberta_pretrain.py
|
upura/commonlitreadabilityprize
| 9
|
12775772
|
<filename>working/roberta_pretrain.py<gh_stars>1-10
import warnings
import pandas as pd
from transformers import (
AutoModelForMaskedLM,
AutoTokenizer,
DataCollatorForLanguageModeling,
LineByLineTextDataset,
Trainer,
TrainingArguments,
)
warnings.filterwarnings("ignore")
if __name__ == "__main__":
train_data = pd.read_csv("../input/commonlitreadabilityprize/train.csv")
test_data = pd.read_csv("../input/commonlitreadabilityprize/test.csv")
ext_data = pd.read_csv(
"../input/commonlit-external/dump_of_simple_english_wiki.csv"
)
data = pd.concat(
[train_data[["excerpt"]], test_data[["excerpt"]], ext_data[["excerpt"]]]
)
data["excerpt"] = data["excerpt"].apply(lambda x: x.replace("\n", ""))
text = "\n".join(data.excerpt.tolist())
with open("text.txt", "w") as f:
f.write(text)
model_name = "roberta-base"
model = AutoModelForMaskedLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
tokenizer.save_pretrained("./clrp_roberta_base")
train_dataset = LineByLineTextDataset(
tokenizer=tokenizer,
file_path="text.txt", # mention train text file here
block_size=256,
)
valid_dataset = LineByLineTextDataset(
tokenizer=tokenizer,
file_path="text.txt", # mention valid text file here
block_size=256,
)
data_collator = DataCollatorForLanguageModeling(
tokenizer=tokenizer, mlm=True, mlm_probability=0.15
)
training_args = TrainingArguments(
output_dir="./clrp_roberta_base_chk", # select model path for checkpoint
overwrite_output_dir=True,
num_train_epochs=5,
per_device_train_batch_size=16,
per_device_eval_batch_size=16,
evaluation_strategy="steps",
save_total_limit=2,
eval_steps=200,
metric_for_best_model="eval_loss",
greater_is_better=False,
load_best_model_at_end=True,
prediction_loss_only=True,
report_to="none",
)
trainer = Trainer(
model=model,
args=training_args,
data_collator=data_collator,
train_dataset=train_dataset,
eval_dataset=valid_dataset,
)
trainer.train()
trainer.save_model("./clrp_roberta_base")
| 2.53125
| 3
|
code/message/image_to_text_message.py
|
ITE-5th/skill-socket
| 1
|
12775773
|
<gh_stars>1-10
from .image_message import ImageMessage
class ImageToTextMessage(ImageMessage):
pass
| 1.203125
| 1
|
python/trezorlib/transport/bridge.py
|
Kayuii/trezor-crypto
| 0
|
12775774
|
# This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
import logging
import struct
from io import BytesIO
from typing import Any, Dict, Iterable, Optional
import requests
from .. import mapping, protobuf
from . import Transport, TransportException
LOG = logging.getLogger(__name__)
TREZORD_HOST = "http://127.0.0.1:21325"
TREZORD_ORIGIN_HEADER = {"Origin": "https://python.trezor.io"}
TREZORD_VERSION_MODERN = (2, 0, 25)
CONNECTION = requests.Session()
CONNECTION.headers.update(TREZORD_ORIGIN_HEADER)
def call_bridge(uri: str, data=None) -> requests.Response:
url = TREZORD_HOST + "/" + uri
r = CONNECTION.post(url, data=data)
if r.status_code != 200:
error_str = "trezord: {} failed with code {}: {}".format(
uri, r.status_code, r.json()["error"]
)
raise TransportException(error_str)
return r
def is_legacy_bridge() -> bool:
config = call_bridge("configure").json()
version_tuple = tuple(map(int, config["version"].split(".")))
return version_tuple < TREZORD_VERSION_MODERN
class BridgeHandle:
def __init__(self, transport: "BridgeTransport") -> None:
self.transport = transport
def read_buf(self) -> bytes:
raise NotImplementedError
def write_buf(self, buf: bytes) -> None:
raise NotImplementedError
class BridgeHandleModern(BridgeHandle):
def write_buf(self, buf: bytes) -> None:
self.transport._call("post", data=buf.hex())
def read_buf(self) -> bytes:
data = self.transport._call("read")
return bytes.fromhex(data.text)
class BridgeHandleLegacy(BridgeHandle):
def __init__(self, transport: "BridgeTransport") -> None:
super().__init__(transport)
self.request = None # type: Optional[str]
def write_buf(self, buf: bytes) -> None:
if self.request is not None:
raise TransportException("Can't write twice on legacy Bridge")
self.request = buf.hex()
def read_buf(self) -> bytes:
if self.request is None:
raise TransportException("Can't read without write on legacy Bridge")
try:
data = self.transport._call("call", data=self.request)
return bytes.fromhex(data.text)
finally:
self.request = None
class BridgeTransport(Transport):
"""
BridgeTransport implements transport through TREZOR Bridge (aka trezord).
"""
PATH_PREFIX = "bridge"
ENABLED = True
def __init__(
self, device: Dict[str, Any], legacy: bool, debug: bool = False
) -> None:
if legacy and debug:
raise TransportException("Debugging not supported on legacy Bridge")
self.device = device
self.session = None # type: Optional[str]
self.debug = debug
self.legacy = legacy
if legacy:
self.handle = BridgeHandleLegacy(self) # type: BridgeHandle
else:
self.handle = BridgeHandleModern(self)
def get_path(self) -> str:
return "%s:%s" % (self.PATH_PREFIX, self.device["path"])
def find_debug(self) -> "BridgeTransport":
if not self.device.get("debug"):
raise TransportException("Debug device not available")
return BridgeTransport(self.device, self.legacy, debug=True)
def _call(self, action: str, data: str = None) -> requests.Response:
session = self.session or "null"
uri = action + "/" + str(session)
if self.debug:
uri = "debug/" + uri
return call_bridge(uri, data=data)
@classmethod
def enumerate(cls) -> Iterable["BridgeTransport"]:
try:
legacy = is_legacy_bridge()
return [
BridgeTransport(dev, legacy) for dev in call_bridge("enumerate").json()
]
except Exception:
return []
def begin_session(self) -> None:
data = self._call("acquire/" + self.device["path"])
self.session = data.json()["session"]
def end_session(self) -> None:
if not self.session:
return
self._call("release")
self.session = None
def write(self, msg: protobuf.MessageType) -> None:
LOG.debug(
"sending message: {}".format(msg.__class__.__name__),
extra={"protobuf": msg},
)
buffer = BytesIO()
protobuf.dump_message(buffer, msg)
ser = buffer.getvalue()
header = struct.pack(">HL", mapping.get_type(msg), len(ser))
self.handle.write_buf(header + ser)
def read(self) -> protobuf.MessageType:
data = self.handle.read_buf()
headerlen = struct.calcsize(">HL")
msg_type, datalen = struct.unpack(">HL", data[:headerlen])
buffer = BytesIO(data[headerlen : headerlen + datalen])
msg = protobuf.load_message(buffer, mapping.get_class(msg_type))
LOG.debug(
"received message: {}".format(msg.__class__.__name__),
extra={"protobuf": msg},
)
return msg
| 1.960938
| 2
|
goalboost/blueprints/auth/__init__.py
|
JohnLockwood/Goalboost
| 0
|
12775775
|
<reponame>JohnLockwood/Goalboost
from flask.ext.login import LoginManager
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer, SignatureExpired, BadSignature
from goalboost.model.auth_models import Role, User
from flask.ext.security import Security, MongoEngineUserDatastore
from flask.ext.principal import Principal, Permission, RoleNeed
from goalboost.model import db
login_manager = LoginManager()
# Create a permission with a single Need, in this case a RoleNeed.
# See
#root_permission = Permission(RoleNeed('Root'))
#account_admin_permission = Permission(RoleNeed('Account Admin'))
#account_user_permission = Permission(RoleNeed('Account User'))
"""can_access_user_owned_resource
Given a principal such as the current user and a resource which must have a user field (such as a timer).
Return true if the user can access the resource, else false.
"""
def can_access_user_owned_resource(principal, resource):
role = principal.get_role()
if role == Role.ROOT:
return True
elif role == Role.ACCONT_USER:
return principal.id == resource.user.id
elif role == Role.ACCOUNT_ADMIN:
return principal.account == resource.user.account
else:
return False
def init_flask_security(app):
user_datastore = MongoEngineUserDatastore(db, User, Role)
security = Security(app, user_datastore)
# This step may not be necessary
app.security = security
@login_manager.user_loader
def load_user_by_id(id):
try:
return User.get(id)
except:
return None
# Work in progress, cf.
# http://blog.miguelgrinberg.com/post/restful-authentication-with-flask
# http://thecircuitnerd.com/flask-login-tokens/
# See also mongo_models.User.get_auth_token
# TODO Duplicate code of user.verify_auth_token. Consolidate!
@login_manager.token_loader
def verify_auth_token(token):
s = Serializer(app.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.objects(id=data['id']).first() #.query.get(data['id'])
return user
| 2.25
| 2
|
dataset/convert_repository_json.py
|
DevashishX/AbstractClustering
| 0
|
12775776
|
<reponame>DevashishX/AbstractClustering<filename>dataset/convert_repository_json.py
import json
import numpy as np
import pandas as pd
from pprint import pprint
from preprocessing import AbstractPreprocessor, preproc
def simpleSplit(text):
return text.split()
#reads joson form the repo files. Every line is a valid json but the whole doc is not
def repo_read_json(filename, lemma = True):
with open(filename, "r") as fd:
line = fd.readline()
while line != "":
if line.find("bibo:abstract") != -1:
jsonobj = json.loads(line)
# print(jsonobj)
id = jsonobj["identifier"]
pretitle = jsonobj["bibo:shortTitle"]
title = preproc(pretitle, to_lemmatize=lemma)
abstract = jsonobj["bibo:abstract"]
# print(str(id) + "Type of id: " + str(type(id)))
# print(title + "Type of title: " + str(type(title)))
# print(abstract + "Type of abstract: " + str(type(abstract)))
print(id, pretitle, title, abstract, sep="\n")
line = fd.readline()
class AbstractExtracter():
def __init__(self, filenames=None, preprocessor=simpleSplit):
self.filenames = filenames
self.preprocessor = preprocessor
self.columns = ["id", "title", "abstract"]
self.df = pd.DataFrame(columns=self.columns)
self.tempdf = pd.DataFrame(columns=self.columns)
def JsonCleaner(self, filename, lemma = True):
dataarray = []
idarray = []
with open(filename, "r") as fd:
line = fd.readline()
while line != "":
if line.find("bibo:abstract") != -1:
jsonobj = json.loads(line)
id = jsonobj["identifier"]
if id not in idarray:
idarray.append(id)
# print(id)
title = jsonobj["bibo:shortTitle"]
preabstract = jsonobj["bibo:abstract"]
abstract = self.preprocessor.preprocess(preabstract, to_lemmatize=lemma)
data = {"id":id, "title":title, "abstract":abstract}
dataarray.append(data)
# print(str(id) + "Type of id: " + str(type(id)))
# print(title + "Type of title: " + str(type(title)))
# print(abstract + "Type of abstract: " + str(type(abstract)))
# print(id, pretitle, title, abstract, sep="\n")
line = fd.readline()
self.df = self.df.append(dataarray, ignore_index=True)
# print(type(self.df.iloc[0, 0]))
# print(type(self.df.iloc[0, 1]))
# print(type(self.df.iloc[0, 2]))
def FilesCleaner(self):
for filename in self.filenames:
self.JsonCleaner(filename)
# csvname = filename.rsplit(".")[0] + ".csv"
# self.df.to_csv(csvname, index=False)
csvname = "new_" + filename.rsplit(".")[0] + ".pkl"
# self.df.to_json(csvname, orient="split")
self.df.to_pickle(csvname)
# csvname = filename.rsplit(".")[0] + ".pkl"
# self.df.to_pickle(csvname)
pass
if __name__ == "__main__":
filenames = ["repository_metadata_9_2013-03-18.json"]
op = "repository_metadata_9_2013-03-18.csv"
pre = AbstractPreprocessor()
AbsExt = AbstractExtracter(filenames,pre)
AbsExt.FilesCleaner()
csvname = "new_" + filenames[0].rsplit(".")[0] + ".pkl"
# df = pd.read_json(csvname, orient="split")
df = pd.read_pickle(csvname)
print(type(df.iloc[0, 0]))
print(type(df.iloc[0, 1]))
print(type(df.iloc[0, 2]))
print(df.head())
pass
| 2.53125
| 3
|
venv/Lib/site-packages/eyed3/id3/apple.py
|
shadowstriker15/Online_Ripper
| 0
|
12775777
|
"""
Here lies Apple frames, all of which are non-standard. All of these would have
been standard user text frames by anyone not being a bastard, on purpose.
"""
from .frames import Frame, TextFrame
PCST_FID = b"PCST"
WFED_FID = b"WFED"
TKWD_FID = b"TKWD"
TDES_FID = b"TDES"
TGID_FID = b"TGID"
class PCST(Frame):
"""Indicates a podcast. The 4 bytes of data is undefined, and is typically all 0."""
def __init__(self, id=PCST_FID):
super(PCST, self).__init__(PCST_FID)
def render(self):
self.data = b"\x00" * 4
return super(PCST, self).render()
class TKWD(TextFrame):
"""Podcast keywords."""
def __init__(self, id=TKWD_FID):
super(TKWD, self).__init__(TKWD_FID)
class TDES(TextFrame):
"""Podcast description. One encoding byte followed by text per encoding."""
def __init__(self, id=TDES_FID):
super(TDES, self).__init__(TDES_FID)
class TGID(TextFrame):
"""Podcast URL of the audio file. This should be a W frame!"""
def __init__(self, id=TGID_FID):
super(TGID, self).__init__(TGID_FID)
class WFED(TextFrame):
"""Another podcast URL, the feed URL it is said."""
def __init__(self, id=WFED_FID, url=""):
super(WFED, self).__init__(WFED_FID, url)
| 2.75
| 3
|
greaze/forms.py
|
Cian747/awaards
| 0
|
12775778
|
from django import forms
from .models import Project,Rate,Profile,DESIGN_CHOICES,USABILITY_CHOICES,CONTENT_CHOICES
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class GreazeRegistrationForm(UserCreationForm):
class Meta:
model = User
fields = ['first_name', 'last_name', 'email', 'username','<PASSWORD>','<PASSWORD>' ]
widgets = {
'first_name':forms.TextInput(attrs = {'class':'form-control names', 'placeholder':"First Name", 'label': 'First Name'}),
'last_name':forms.TextInput(attrs = {'class':'form-control names', 'placeholder':"Second Name", 'label': 'Second Name'}),
'email':forms.TextInput(attrs = {'class':'form-control names', 'placeholder':"Email Address", 'label': 'Email Address'}),
'username':forms.TextInput(attrs = {'class':'form-control names', 'placeholder':"Username", 'label': 'Username'}),
'password1':forms.TextInput(attrs = {'class':'form-control ','type':'password', 'placeholder':"Password", 'label': 'Password'}),
'password2':forms.TextInput(attrs = {'class':'form-control', 'placeholder':"Confirm Password", 'label': 'Confirm Password'}),
}
class PostProjectForm(forms.ModelForm):
class Meta:
model = Project
fields = ['title','image','description','link']
widgets = {
'title':forms.TextInput(attrs={'class':'form-control','placeholder':'Project Title...'}),
'image':forms.TextInput(attrs= {'class':'form-control ','placeholder':'In a word...','label':'Put a name'}),
'description':forms.Textarea(attrs = {'class':'form-control','placeholder':"Write here..",'label':"Caption"}),
'link':forms.URLInput(attrs={'class':'form-control'}),
}
class RateForm(forms.ModelForm):
design = forms.ChoiceField(choices=DESIGN_CHOICES,widget=forms.Select(),required=True)
usability = forms.ChoiceField(choices=USABILITY_CHOICES,widget=forms.Select(),required=True)
content = forms.ChoiceField(choices=CONTENT_CHOICES,widget=forms.Select(),required=True)
class Meta:
model = Rate
fields = ['design','usability','content']
# widgets = {
# 'design': forms.SelectMultiple(attrs={'class':'form-control','name':'design'}),
# 'usability': forms.SelectMultiple(attrs={'class':'form-control','placeholder':'Input value','name':'usability'}),
# 'content': forms.SelectMultiple(attrs={'class':'form-control','name':'content'}),
# }
class EditProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['profile_photo','bio','gender','contact']
widgets = {
'profile_photo':forms.FileInput(attrs={'class':'form-control'}),
'bio':forms.Textarea(attrs={'class':'form-control ','placeholder':'Write here...','label':'Put a name'}),
}
class UpdateProjectForm(forms.ModelForm):
class Meta:
model = Project
fields = ['title','image','description','link']
widgets = {
'title':forms.TextInput(attrs={'class':'form-control','placeholder':'Project Title...'}),
'image':forms.TextInput(attrs= {'class':'form-control ','placeholder':'In a word...','label':'Put a name'}),
'description':forms.Textarea(attrs = {'class':'form-control','placeholder':"Caption",'label':"Caption"}),
'link':forms.URLInput(attrs={'class':'form-control'}),
}
| 2.328125
| 2
|
tests/core/test_optimizers_schedulers.py
|
jerke123/mridc
| 0
|
12775779
|
# encoding: utf-8
__author__ = "<NAME>"
# Taken and adapted from: https://github.com/wdika/NeMo/blob/main/tests/core/test_optimizers_schedulers.py
import math
import os
import random
import shutil
from abc import ABC
import numpy as np
import omegaconf
import pytest
import pytorch_lightning as pl
import torch
import torch.optim
from mridc.core import optim
from mridc.core.conf import optimizers
from mridc.core.conf.optimizers import NovogradParams, SGDParams
from mridc.core.conf.schedulers import CosineAnnealingParams
from mridc.core.optim.lr_scheduler import AVAILABLE_SCHEDULERS, SquareRootAnnealing
from mridc.core.optim.novograd import Novograd
from mridc.core.optim.optimizers import AVAILABLE_OPTIMIZERS, get_optimizer, parse_optimizer_args, register_optimizer
from mridc.utils import logging
class TempModel(torch.nn.Module):
"""Create a dummy model for testing."""
def __init__(self):
super(TempModel, self).__init__()
self.layer = torch.nn.Linear(5, 1)
def forward(self, x):
"""Forward pass."""
x = self.layer(x)
return x
class OptCounter(torch.optim.SGD):
"""A simple optimizer that counts the number of calls to step()."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for group in self.param_groups:
group.setdefault("count", 0)
def step(self, closure=None):
"""Performs a single optimization step."""
for group in self.param_groups:
group["count"] += 1
super().step(closure)
class RandomDataset(torch.utils.data.Dataset):
"""A dataset that returns random tensors."""
def __init__(self, dataset_len):
super().__init__()
self.__dataset_len = dataset_len
def __getitem__(self, *args):
return torch.randn(2)
def __len__(self):
return self.__dataset_len
class ExampleModel(pl.LightningModule, ABC):
"""A dummy model for testing."""
def __init__(self, batch_size, dataset_len, drop_last, max_steps):
super().__init__()
self.l1 = torch.nn.modules.Linear(in_features=2, out_features=1)
self.batch_size = batch_size
self.dataset_len = dataset_len
self.drop_last = drop_last
self.max_steps = max_steps
self.my_opt = None
def train_dataloader(self):
"""Return a training data loader."""
dataset = RandomDataset(self.dataset_len)
return torch.utils.data.DataLoader(dataset, batch_size=self.batch_size, drop_last=self.drop_last)
def training_step(self, batch, batch_idx):
"""Set training step."""
output = self.l1(batch)
output = torch.nn.functional.l1_loss(output, torch.ones(output.size()).to(output.device))
return {"loss": output}
def configure_optimizers(self):
"""Configure optimizers for the model."""
self.my_opt = OptCounter(self.parameters(), lr=0.02)
return self.my_opt
class Callback(pl.callbacks.Callback):
"""A dummy callback for testing."""
@pl.utilities.distributed.rank_zero_only
def on_train_end(self, trainer, module):
"""On train end, check that the number of steps is correct"""
count = module.my_opt.param_groups[0]["count"]
if trainer.global_step != count or trainer.global_step != module.max_steps:
logging.debug(f"max_epochs: {trainer.max_epochs}")
logging.debug(f"accumulate_grad_batches: {trainer.accumulate_grad_batches}")
logging.debug(f"limit_train_batches: {trainer.limit_train_batches}")
logging.debug(f"num_processes: {trainer.num_processes}")
logging.debug(f"batch_size: {module.batch_size}")
logging.debug(f"dataset_len: {module.dataset_len}")
logging.debug(f"drop_last: {module.drop_last}")
logging.debug(f"{len(trainer.train_dataloader)}")
logging.debug(f"{trainer.num_training_batches}")
self.assert_counts(trainer, module, count)
@staticmethod
def assert_counts(trainer, module, count):
"""Assert that the number of steps is correct"""
if trainer.global_step != count:
raise AssertionError(f"{trainer.global_step} != {count} != {module.max_steps}")
if trainer.global_step != module.max_steps:
raise AssertionError(f"{trainer.global_step} != {count} != {module.max_steps}")
class SchedulerNoOpCallback(Callback):
"""A dummy callback for testing."""
@staticmethod
def on_train_batch_end(trainer: pl.Trainer, pl_module, outputs, batch, batch_idx):
"""On each training batch end"""
# pl_module.max_steps is "original" max steps without trainer extra steps.
if (trainer.global_step + 1) % 3 == 0 and (trainer.global_step + 1) < pl_module.max_steps:
schedulers = trainer.lr_schedulers
for scheduler in schedulers:
# Decrement the counter by 2, then perform a scheduler.step() to perform a no-up
# as well as update the optimizer lr in all param groups
scheduler["scheduler"].last_epoch -= 2
scheduler["scheduler"].step()
# Increase the max step count by 1
trainer.fit_loop.max_steps = trainer.fit_loop.max_steps + 1
def assert_counts(self, trainer, module, count):
"""This is a no-op callback, so the counts should not change"""
num_skips = torch.div(module.max_steps, 3, rounding_mode="trunc")
extra_steps = module.max_steps + num_skips
if trainer.global_step != count:
raise AssertionError(f"{trainer.global_step} != {count} != {extra_steps}")
if trainer.global_step != extra_steps:
raise AssertionError(f"{trainer.global_step} != {count} != {extra_steps}")
class TestOptimizersSchedulers:
"""Test the optimizers and schedulers."""
INITIAL_LR = 0.1
MIN_LR = 1e-3
MAX_STEPS = 10
# fused_adam is looking for CUDA and this test is being run on CPU only tests
@pytest.mark.unit
def test_get_optimizer(self):
"""Test that the optimizer is correctly created"""
model = TempModel()
for opt_name in AVAILABLE_OPTIMIZERS:
if opt_name == "fused_adam" and not torch.cuda.is_available():
continue
opt_cls = get_optimizer(opt_name)
if opt_name == "adafactor":
# Adafactor's default mode uses relative_step without any lr.
opt = opt_cls(model.parameters())
else:
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
if not isinstance(opt, AVAILABLE_OPTIMIZERS[opt_name]):
raise AssertionError
@pytest.mark.unit
def test_register_optimizer(self):
"""Test that we can register a new optimizer"""
class TempOpt(torch.optim.SGD):
"""A dummy optimizer"""
class TempOptParams(optimizers.SGDParams):
"""A dummy optimizer params"""
register_optimizer("TempOpt", TempOpt, TempOptParams)
model = TempModel()
opt_cls = get_optimizer("TempOpt")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
if not isinstance(opt, TempOpt):
raise AssertionError
@pytest.mark.unit
def test_optim_config_parse_bypass(self):
"""Test that the optimizer config is parsed correctly when the optimizer is not registered."""
basic_optim_config = {"weight_decay": 0.001, "betas": [0.8, 0.5]}
parsed_params = parse_optimizer_args("novograd", basic_optim_config)
if parsed_params["weight_decay"] != basic_optim_config["weight_decay"]:
raise AssertionError
if parsed_params["betas"][0] != basic_optim_config["betas"][0]:
raise AssertionError
if parsed_params["betas"][1] != basic_optim_config["betas"][1]:
raise AssertionError
dict_config = omegaconf.OmegaConf.create(basic_optim_config)
parsed_params = parse_optimizer_args("novograd", dict_config)
if parsed_params["weight_decay"] != dict_config["weight_decay"]:
raise AssertionError
if parsed_params["betas"][0] != dict_config["betas"][0]:
raise AssertionError
if parsed_params["betas"][1] != dict_config["betas"][1]:
raise AssertionError
@pytest.mark.unit
def test_optim_config_parse_arg_by_target(self):
"""Test that the optimizer config is parsed correctly by target."""
basic_optim_config = {
"_target_": "mridc.core.conf.optimizers.NovogradParams",
"params": {"weight_decay": 0.001, "betas": [0.8, 0.5]},
}
basic_optim_config = omegaconf.OmegaConf.create(basic_optim_config)
parsed_params = parse_optimizer_args("novograd", basic_optim_config)
if parsed_params["weight_decay"] != basic_optim_config["params"]["weight_decay"]:
raise AssertionError
if parsed_params["betas"][0] != basic_optim_config["params"]["betas"][0]:
raise AssertionError
if parsed_params["betas"][1] != basic_optim_config["params"]["betas"][1]:
raise AssertionError
dict_config = omegaconf.OmegaConf.create(basic_optim_config)
parsed_params = parse_optimizer_args("novograd", dict_config)
if parsed_params["weight_decay"] != dict_config["params"]["weight_decay"]:
raise AssertionError
if parsed_params["betas"][0] != dict_config["params"]["betas"][0]:
raise AssertionError
if parsed_params["betas"][1] != dict_config["params"]["betas"][1]:
raise AssertionError
# Names are ignored when passing class path
# This will be captured during optimizer instantiation
output_config = parse_optimizer_args("sgd", dict_config)
sgd_config = vars(SGDParams())
novograd_config = vars(NovogradParams())
if set(output_config.keys()) == set(sgd_config.keys()):
raise AssertionError
if set(output_config.keys()) != set(novograd_config):
raise AssertionError
@pytest.mark.unit
def test_get_scheduler(self):
"""Test that get_scheduler returns the correct scheduler class."""
model = TempModel()
optimizer = Novograd(model.parameters(), lr=self.INITIAL_LR)
for sched_name in AVAILABLE_SCHEDULERS:
sched_cls = optim.lr_scheduler.get_scheduler(sched_name)
try:
sched = sched_cls(optimizer)
if not isinstance(sched, AVAILABLE_SCHEDULERS[sched_name]):
raise AssertionError
continue
except Exception:
pass
try:
sched = sched_cls(optimizer, max_steps=self.MAX_STEPS)
if not isinstance(sched, AVAILABLE_SCHEDULERS[sched_name]):
raise AssertionError
continue
except Exception:
pass
@pytest.mark.unit
def test_register_scheduler(self):
"""Test registering a new scheduler"""
class TempSched(optim.lr_scheduler.CosineAnnealing):
"""Temporary scheduler class."""
class TempSchedParams(CosineAnnealingParams):
"""Temporary scheduler class."""
optim.lr_scheduler.register_scheduler("TempSched", TempSched, TempSchedParams)
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
sched_cls = optim.lr_scheduler.get_scheduler("TempSched")
sched = sched_cls(opt, max_steps=self.MAX_STEPS)
if not isinstance(sched, TempSched):
raise AssertionError
@pytest.mark.unit
def test_sched_config_parse_simple(self):
"""Test that scheduler config is parsed correctly"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
basic_sched_config = {"name": "CosineAnnealing", "max_steps": 10}
scheduler_setup = optim.lr_scheduler.prepare_lr_scheduler(opt, basic_sched_config)
if not isinstance(scheduler_setup["scheduler"], optim.lr_scheduler.CosineAnnealing):
raise AssertionError
dict_config = omegaconf.OmegaConf.create(basic_sched_config)
scheduler_setup = optim.lr_scheduler.prepare_lr_scheduler(opt, dict_config)
if not isinstance(scheduler_setup["scheduler"], optim.lr_scheduler.CosineAnnealing):
raise AssertionError
@pytest.mark.unit
def test_sched_config_parse_from_cls(self):
"""Test that we can parse a scheduler from a class"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
basic_sched_config = {
"_target_": "mridc.core.conf.schedulers.CosineAnnealingParams",
"params": {"min_lr": 0.1},
"max_steps": self.MAX_STEPS,
}
scheduler_setup = optim.lr_scheduler.prepare_lr_scheduler(opt, basic_sched_config)
if not isinstance(scheduler_setup["scheduler"], optim.lr_scheduler.CosineAnnealing):
raise AssertionError
dict_config = omegaconf.OmegaConf.create(basic_sched_config)
scheduler_setup = optim.lr_scheduler.prepare_lr_scheduler(opt, dict_config)
if not isinstance(scheduler_setup["scheduler"], optim.lr_scheduler.CosineAnnealing):
raise AssertionError
@pytest.mark.unit
def test_WarmupPolicy(self):
"""Test WarmupPolicy"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.WarmupPolicy(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] != self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.WarmupPolicy(opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 4:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] != self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_WarmupHoldPolicy(self):
"""Test WarmupHoldPolicy"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.WarmupHoldPolicy(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] != self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr <= self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.WarmupHoldPolicy(opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 4:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] != self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr <= self.MIN_LR:
raise AssertionError
# Warmup + Hold steps available
policy = optim.lr_scheduler.WarmupHoldPolicy(
opt, warmup_steps=5, hold_steps=3, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 4:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] != self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr < self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_WarmupAnnealing(self):
"""Test that the warmup annealing policy works as expected."""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.WarmupAnnealing(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr < self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.WarmupAnnealing(opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 5:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] >= self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup + Hold steps available
policy = optim.lr_scheduler.WarmupHoldPolicy(
opt, warmup_steps=5, hold_steps=3, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 4:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] != self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr < self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_SquareAnnealing(self):
"""Test SquareAnnealing"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.SquareAnnealing(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.SquareAnnealing(opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 5:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] >= self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_SquareRootAnnealing(self):
"""Test SquareRootAnnealing"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = SquareRootAnnealing(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.SquareRootAnnealing(
opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 5:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] >= self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_CosineAnnealing(self):
"""Test CosineAnnealing"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.CosineAnnealing(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.CosineAnnealing(opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 5:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] >= self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup + Constant steps available
policy = optim.lr_scheduler.CosineAnnealing(
opt, warmup_steps=3, constant_steps=2, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 3:
if policy.get_last_lr()[0] > self.INITIAL_LR + 1e-5:
raise AssertionError
elif 3 < i <= 8:
if policy.get_last_lr()[0] != policy._get_lr(i)[0]:
raise AssertionError
elif policy.get_last_lr()[0] != self.MIN_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_PolynomialDecayAnnealing(self):
"""Test PolynomialDecayAnnealing"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.PolynomialDecayAnnealing(
opt, power=2, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.PolynomialDecayAnnealing(
opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 5:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] >= self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_PolynomialHoldDecayAnnealing(self):
"""Test PolynomialHoldDecayAnnealing"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.PolynomialHoldDecayAnnealing(
opt, power=2, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr <= self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.PolynomialHoldDecayAnnealing(
opt, power=2, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr < self.MIN_LR:
raise AssertionError
# Warmup + Hold steps available
policy = optim.lr_scheduler.PolynomialHoldDecayAnnealing(
opt, warmup_steps=5, hold_steps=3, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR, power=2
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 4:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif i <= 8:
if policy.get_last_lr()[0] < self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr < self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_InverseSquareRootAnnealing(self):
"""Test InverseSquareRootAnnealing"""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.InverseSquareRootAnnealing(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
for _ in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
# Warmup steps available
policy = optim.lr_scheduler.InverseSquareRootAnnealing(
opt, warmup_steps=5, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR
)
initial_lr = policy.get_last_lr()[0]
if initial_lr >= self.INITIAL_LR:
raise AssertionError
for i in range(self.MAX_STEPS):
if i <= 5:
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
elif policy.get_last_lr()[0] >= self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
policy.step()
final_lr = policy.get_last_lr()[0]
if final_lr != self.MIN_LR:
raise AssertionError
@pytest.mark.unit
def test_CosineAnnealing_with_noop_steps(self):
"""Test CosineAnnealing with noop steps."""
model = TempModel()
opt_cls = get_optimizer("novograd")
opt = opt_cls(model.parameters(), lr=self.INITIAL_LR)
# No warmup case
policy = optim.lr_scheduler.CosineAnnealing(opt, max_steps=self.MAX_STEPS, min_lr=self.MIN_LR)
initial_lr = policy.get_last_lr()[0]
if initial_lr != self.INITIAL_LR:
raise AssertionError
update_steps = 0
for i in range(self.MAX_STEPS):
if policy.get_last_lr()[0] > self.INITIAL_LR:
raise AssertionError
opt.step()
policy.step()
# Perform a No-Op for scheduler every 2 steps
if i % 2 == 0:
policy.last_epoch -= 1
else:
update_steps += 1
policy.step()
update_steps += 1
if update_steps >= self.MAX_STEPS:
raise AssertionError
final_lr = policy.get_last_lr()[0]
if final_lr <= self.MIN_LR:
raise AssertionError
# update step = true number of updates performed after some number of skipped steps
true_end_lr = policy._get_lr(step=update_steps)[0]
if final_lr != true_end_lr:
raise AssertionError
@pytest.mark.unit
@pytest.mark.run_only_on("CPU")
def test_max_step_computation(self):
"""Test that the max_step computation is correct."""
def train(
max_epochs, accumulate_grad_batches, limit_train_batches, num_processes, batch_size, dataset_len, drop_last
):
"""Set up the training environment"""
trainer = pl.Trainer(
max_epochs=max_epochs,
strategy="ddp_spawn",
accelerator="cpu",
num_processes=num_processes,
accumulate_grad_batches=accumulate_grad_batches,
limit_train_batches=limit_train_batches,
enable_checkpointing=False,
progress_bar_refresh_rate=0,
weights_summary=None,
)
max_steps = optim.lr_scheduler.compute_max_steps(
max_epochs,
accumulate_grad_batches,
limit_train_batches,
num_processes,
dataset_len,
batch_size,
drop_last,
)
model = ExampleModel(batch_size, dataset_len, drop_last, max_steps)
trainer.callbacks.append(Callback())
trainer.fit(model)
# This test will break once we and lightning upgrade to pytorch 1.7.0 due to a bug fix in pytorch 1.7.0
train(
31,
accumulate_grad_batches=1,
limit_train_batches=1.0,
num_processes=9,
batch_size=60,
dataset_len=1613,
drop_last=True,
)
train(
5,
accumulate_grad_batches=1,
limit_train_batches=0.17382691901706027,
num_processes=4,
batch_size=97,
dataset_len=498,
drop_last=False,
)
train(
5,
accumulate_grad_batches=8,
limit_train_batches=0.1663306588594945,
num_processes=4,
batch_size=54,
dataset_len=629,
drop_last=True,
)
train(
5,
accumulate_grad_batches=1,
limit_train_batches=0.2121376533631948,
num_processes=1,
batch_size=68,
dataset_len=488,
drop_last=False,
)
for _ in range(5):
drop_last = bool(random.randint(0, 1))
accumulate_grad_batches = random.randint(1, 10)
limit_train_batches_int = random.randint(1, 10)
limit_train_batches_float = random.uniform(0, 1)
limit_train_batches = random.choice([limit_train_batches_int, limit_train_batches_float])
max_epochs = random.randint(4, 20)
num_processes = random.randint(1, 5)
dataset_len = random.randint(20, num_processes * 500)
batch_size = random.randint(
math.ceil(5.0 / num_processes), min(np.floor_divide(dataset_len, num_processes), 128)
)
train(
max_epochs,
accumulate_grad_batches,
limit_train_batches,
num_processes,
batch_size,
dataset_len,
drop_last,
)
@pytest.mark.unit
@pytest.mark.run_only_on("CPU")
def test_max_step_computation_with_sched_no_ops(self):
"""Test that max_step is computed correctly when scheduler has no_ops"""
def train(
max_steps, accumulate_grad_batches, limit_train_batches, num_processes, batch_size, dataset_len, drop_last
):
"""Set up trainer and model"""
trainer = pl.Trainer(
max_steps=max_steps,
strategy="ddp_spawn",
accelerator="cpu",
num_processes=num_processes,
accumulate_grad_batches=accumulate_grad_batches,
limit_train_batches=limit_train_batches,
enable_checkpointing=False,
progress_bar_refresh_rate=0,
weights_summary=None,
)
model = ExampleModel(batch_size, dataset_len, drop_last, max_steps)
trainer.callbacks.append(SchedulerNoOpCallback())
trainer.fit(model)
# This test will break once we and lightning upgrade to pytorch 1.7.0 due to a bug fix in pytorch 1.7.0
train(
max_steps=20,
accumulate_grad_batches=1,
limit_train_batches=1.0,
num_processes=4,
batch_size=60,
dataset_len=2000,
drop_last=True,
)
@staticmethod
def test_remove_logs_left():
"""Remove logs left by the trainer."""
if os.path.exists(os.path.join(os.getcwd(), "lightning_logs")):
shutil.rmtree(os.path.join(os.getcwd(), "lightning_logs"))
| 2.3125
| 2
|
DatabaseServer/procserv_utils.py
|
GustavLero/EPICS-inst_servers
| 1
|
12775780
|
from __future__ import print_function, absolute_import, division, unicode_literals
# This file is part of the ISIS IBEX application.
# Copyright (C) 2012-2016 Science & Technology Facilities Council.
# All rights reserved.
#
# This program is distributed in the hope that it will be useful.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution.
# EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
# AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
# You should have received a copy of the Eclipse Public License v1.0
# along with this program; if not, you can obtain a copy from
# https://www.eclipse.org/org/documents/epl-v10.php or
# http://opensource.org/licenses/eclipse-1.0.php
from server_common.channel_access import ChannelAccess
from server_common.utilities import print_and_log
class ProcServWrapper(object):
"""A wrapper for ProcSev to allow for control of IOCs"""
@staticmethod
def generate_prefix(prefix: str, ioc: str) -> str:
"""Creates a PV based on the given prefix and IOC name
Args:
prefix: The prefix of the instrument the IOC is being run on
ioc: The name of the requested IOC
"""
return "{}CS:PS:{}".format(prefix, ioc)
def start_ioc(self, prefix: str, ioc: str) -> None:
"""Starts the specified IOC
Args:
prefix: The prefix of the instrument the IOC is being run on
ioc: The name of the IOC to start
"""
print_and_log("Starting IOC {}".format(ioc))
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":START", 1)
def stop_ioc(self, prefix: str, ioc: str) -> None:
"""Stops the specified IOC
Args:
prefix: The prefix of the instrument the IOC is being run on
ioc: The name of the IOC to stop
"""
print_and_log("Stopping IOC {}".format(ioc))
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":STOP", 1)
def restart_ioc(self, prefix: str, ioc: str) -> None:
"""Restarts the specified IOC
Args:
prefix: The prefix of the instrument the IOC is being run on
ioc: The name of the IOC to restart
"""
print_and_log("Restarting IOC {}".format(ioc))
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":RESTART", 1)
def get_ioc_status(self, prefix: str, ioc: str) -> str:
"""Gets the status of the specified IOC
Args:
prefix: The prefix of the instrument the IOC is being run on
ioc: The name of the IOC
Returns:
The status of the requested IOC
"""
pv = self.generate_prefix(prefix, ioc) + ":STATUS"
ans = ChannelAccess.caget(pv, as_string=True)
if ans is None:
raise IOError("Could not find IOC (%s)" % pv)
return ans.upper()
def ioc_exists(self, prefix: str, ioc: str) -> bool:
"""Checks if the IOC exists on ProcServ
Args:
prefix: The prefix of the instrument the IOC is being run on
ioc: The name of the IOC
Returns:
True if IOC exists, False otherwise
"""
try:
self.get_ioc_status(prefix, ioc)
return True
except:
return False
| 1.96875
| 2
|
00_STARTUP/09_UPDATE_USER_PROFILE/orm_databases/blog/models.py
|
CrispenGari/python-and-django
| 0
|
12775781
|
<gh_stars>0
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length=50, null=False)
content = models.TextField(null=False)
created_at = models.DateTimeField(default=timezone.now)
user = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self) -> str:
return self.title
| 2.359375
| 2
|
trace_data/filter.py
|
kanishkarj/Distributed-Systems
| 0
|
12775782
|
#!/usr/bin/python
import csv
import os
def find_headers(lines) :
host_headers = ['host']
for line in lines :
if(line.count("State") > 0 and line.count("rank-1") > 0) :
t = line.split(",")
host_headers.append(t[7])
return host_headers
def print_data(file_name) :
f = open("./dump_files/" + file_name + ".dump", "r")
lines = f.readlines()
links_data = [['source', 'dest', 'hopcount']]
hosts_map = {}
hosts_data = []
for line in lines :
if(line.count("MPI_LINK") > 0) :
t = line.split(",")[9].split("_")
src = (int(t[0]))
dst = (int(t[1]))
count = (int(t[3]))
links_data.append([src,dst,count])
if(line.count("State") > 0) :
t = line.split(",")
t[1] = t[1].strip()
t[7] = t[7].strip()
if( t[1] not in hosts_map.keys()) :
hosts_map[t[1]] = {}
hosts_map[t[1]][t[7]] = max(t[3:7])
# hosts_data.append([t[1] ])
# host_map to host_data
x = list(hosts_map['rank-1'].keys())
x.insert(0,"host")
hosts_data.append(x)
for x in hosts_map.keys() :
t = []
t.append(x)
for y in hosts_map[x].keys() :
t.append(hosts_map[x][y])
hosts_data.append(t)
with open('./csv/link_data/' + file_name + '.csv', 'w') as csvFile:
writer = csv.writer(csvFile)
writer.writerows(links_data)
with open('./csv/host_data/' + file_name + '.csv', 'w') as csvFile:
writer = csv.writer(csvFile)
writer.writerows(hosts_data)
f.close()
for filename in os.listdir("./dump_files/"):
filename = (filename[0:-5])
print_data(filename)
| 2.6875
| 3
|
MPLearn/experimental_design/hill_model.py
|
momeara/MPLearn
| 5
|
12775783
|
# -*- tab-width:4;indent-tabs-mode:nil;show-trailing-whitespace:t;rm-trailing-spaces:t -*-
# vi: set ts=4 noet:
import math
from contextlib import ExitStack
# pytorch libraries
import torch
from torch.distributions import constraints
from torch import nn
ggg# pyro libraries
import pyro
import pyro.distributions as dist
from pyro.contrib.util import iter_plates_to_shape
from pyro.contrib.util import lexpand, rmv
from . import dose_response_model
from . import methods
class PosteriorGuide(nn.Module):
def __init__(
self,
observation_dim,
batching):
super(PosteriorGuide, self).__init__()
n_hidden = 64
self.linear1 = methods.TensorLinear(*batching, observation_dim, n_hidden)
self.linear2 = methods.TensorLinear(*batching, n_hidden, n_hidden)
self.output_layer = methods.TensorLinear(*batching, n_hidden, 2 + 2 + 2 + 2 + 1)
self.softplus = nn.Softplus()
self.relu = nn.ReLU()
def forward(
self,
observation_dict,
design_prototype,
observation_labels,
target_labels):
y = observation_dict[observation_labels[0]] - .5
x = self.relu(self.linear1(y))
x = self.relu(self.linear2(x))
final = self.output_layer(x)
top_mu = final[..., 0]
top_sigma = self.softplus(final[..., 1])
bottom_mu = final[..., 2]
bottom_sigma = self.softplus(final[..., 3])
mid_mu = final[..., 4]
mid_sigma = self.softplus(final[..., 5])
slope_mu = final[..., 6]
slope_sigma = self.softplus(final[..., 7])
response_sigma = self.softplus(final[..., 8])
pyro.module("posterior_guide", self)
batch_shape = design_prototype.shape[:-1]
with ExitStack() as stack:
for plate in iter_plates_to_shape(batch_shape):
stack.enter_context(plate)
pyro.sample("top", dist.Normal(top_mu, top_sigma))
pyro.sample("bottom", dist.Normal(bottom_mu, bottom_sigma))
pyro.sample("mid", dist.Normal(mid_mu, mid_sigma))
pyro.sample("slope", dist.Normal(slope_mu, slope_sigma))
pyro.sample("response", dist.Normal(0, response_sigma))
class HillModel(dose_response_model.DoseResponseExperimentalDesignModel):
def __init__(self, hparams):
super(HillModel, self).__init__(hparams)
@staticmethod
def add_model_specific_args(parent_parser, root_dir):
parser = dose_response_model.DoseResponseExperimentalDesignModel.add_model_specific_args(
parent_parser, root_dir)
parser.add_argument('--design_size', default=10, type=int)
parser.add_argument('--design_range', default=[-9, -4], type=float, nargs=2)
parser.add_argument('--init_range', default=[-9, -4], type=float, nargs=2)
parser.add_argument('--top_prior_mu', default=100., type=float)
parser.add_argument('--top_prior_sd', default=100., type=float)
parser.add_argument('--bottom_prior_mu', default=100., type=float)
parser.add_argument('--bottom_prior_sd', default=100., type=float)
parser.add_argument('--mid_prior_mu', default=50., type=float)
parser.add_argument('--mid_prior_sd', default=15., type=float)
parser.add_argument('--slope_prior_mu', default=-.15, type=float)
parser.add_argument('--slope_prior_sd', default=0.1, type=float)
parser.add_argument('--response_prior_sd', default=5., type=float)
parser.add_argument('--observation_label', default="observation", type=str)
parser.add_argument('--target_labels',
default=["top", "bottom", "mid", "slope", "response"],
nargs=5)
return parser
def sigmoid(self, x, top, bottom, mid, slope):
return (top - bottom) * torch.sigmoid((x - mid) * slope) + bottom
def model(self, design_prototype):
design_init = lexpand(
torch.linspace(
*self.hparams.init_range,
self.hparams.design_size,
device=self.hparams.device),
self.hparams.num_parallel)
design_constraint = constraints.interval(*self.hparams.design_range)
design = pyro.param("design", design_init, constraint=design_constraint)
design = design.expand(design_prototype.shape)
with pyro.plate_stack("plate_stack", design_prototype.shape[:-1])
# define the prior distribution for the parameters for the model
top_distribution = dist.Normal(
torch.tensor(self.hparams.top_prior_mu, device=self.hparams.device),
torch.tensor(self.hparams.top_prior_sd, device=self.hparams.device))
bottom_distribution = dist.Normal(
torch.tensor(self.hparams.bottom_prior_mu, device=self.hparams.device),
torch.tensor(self.hparams.bottom_prior_sd, device=self.hparams.device))
mid_distribution = dist.Normal(
torch.tensor(self.hparams.mid_prior_mu, device=self.hparams.device),
torch.tensor(self.hparams.mid_prior_sd, device=self.hparams.device))
slope_distribution = dist.Normal(
torch.tensor(self.hparams.slope_prior_mu, device=self.hparams.device),
torch.tensor(self.hparams.slope_prior_sd, device=self.hparams.device))
# sample
top = pyro.sample("top", top_distribution).unsqueeze(-1)
bottom = pyro.sample("bottom", bottom_distribution).unsqueeze(-1)
mid = pyro.sample("mid", mid_distribution).unsqueeze(-1)
slope = pyro.sample("slope", slope_distribution).unsqueeze(-1)
response = pyro.sample("response", response_distribution).unsqueeze(-1)
# define the response distribution for each sample point
response_distribution = dist.Normal(
torch.zeros(design_size, device=self.hparams.device),
torch.tensor(
self.hparams.response_prior_sd,
device=self.hparams.device).expand(design_size))
# combine the model and the response into the observation distribution
# the .to_event(1) indicates the design points are depenent
observation_distribution = dist.Delta(
self.sigmoid(design, top, bottom, mid, slope) + response).to_event(1)
# sample observations for each design point
# observation.shape = [<batch_dims>, <design_size>]
observation = pyro.sample(
self.hparams.observation_label,
observation_distribution)
return observation
def build_guide(self):
guide = PosteriorGuide(
self.hparams.design_size,
(self.hparams.num_parallel,))
guide.to(self.hparams.device)
return guide
| 2.046875
| 2
|
profiles/mxq/mxq-status.py
|
giesselmann/nanopype
| 87
|
12775784
|
#!/usr/bin/env python3
# \HEADER\-------------------------------------------------------------------------
#
# CONTENTS : snakemake mxq status script
#
# DESCRIPTION : none
#
# RESTRICTIONS : none
#
# REQUIRES : none
#
# ---------------------------------------------------------------------------------
# Copyright (c) 2018-2021, <NAME>, Max Planck Institute for Molecular Genetics
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Written by <NAME>
# ---------------------------------------------------------------------------------
import sys, subprocess
if __name__ == '__main__':
job_id = sys.argv[1]
# mxqdump for job id
try:
ret = subprocess.run('mxqdump --job-id {job_id}'.format(job_id=job_id), check=True, shell=True, stdout=subprocess.PIPE)
except subprocess.CalledProcessError as e:
raise e
# parse nxqdump output to dictionary
out = ret.stdout.decode()
status = {key:value for key, value in [line.split('=') for line in out.strip().split(' ')]}
# summarize mxq status to snakemake codes
if 'status' in status:
status_code = status['status']
if 'inq' in status_code or 'running' in status_code or 'loaded' in status_code or 'assigned' in status_code:
print('running')
elif 'finished' in status_code:
print('success')
else:
print('failed')
| 1.390625
| 1
|
usaspending_api/references/constants.py
|
Violet26/usaspending-api
| 0
|
12775785
|
TOTAL_BUDGET_AUTHORITY = 8361447130497.72
TOTAL_OBLIGATIONS_INCURRED = 4690484214947.31
WEBSITE_AWARD_BINS = {
"<1M": {"lower": None, "upper": 1000000},
"1M..25M": {"lower": 1000000, "upper": 25000000},
"25M..100M": {"lower": 25000000, "upper": 100000000},
"100M..500M": {"lower": 100000000, "upper": 500000000},
">500M": {"lower": 500000000, "upper": None},
}
DOD_CGAC = "097" # DoD's toptier identifier.
DOD_SUBSUMED_CGAC = ["017", "021", "057"] # Air Force, Army, and Navy are to be reported under DoD.
DOD_ARMED_FORCES_CGAC = [DOD_CGAC] + DOD_SUBSUMED_CGAC # The list of ALL agencies reported under DoD.
DOD_ARMED_FORCES_TAS_CGAC_FREC = [("011", "1137"), ("011", "DE00")] # TAS (CGAC, FREC)s for additional DoD agencies.
DOD_FEDERAL_ACCOUNTS = [
("011", "1081"),
("011", "1082"),
("011", "1085"),
("011", "4116"),
("011", "4121"),
("011", "4122"),
("011", "4174"),
("011", "8238"),
("011", "8242"),
] # Federal Account (AID, MAIN)s that are to be reported under DoD.
# Agencies which should be excluded from dropdowns.
EXCLUDE_CGAC = ["000", "067"]
| 1.578125
| 2
|
pyrai/dispatcher/structures/notification_data.py
|
routable-ai/pyrai
| 0
|
12775786
|
<reponame>routable-ai/pyrai
class NotificationData(object):
"""
Class used to represent notification data.
Attributes:
veh_id (int): The vehicle ID.
req_id (int): The request ID.
waiting_duration (str): The waiting duration.
assigned (bool): True if assigned, false if not.
"""
def __init__(self, veh_id, req_id, waiting_duration, assigned):
"""
Initializes a NotificationData object.
Args:
veh_id (int): The vehicle ID.
req_id (int): The request ID.
waiting_duration (str): The waiting duration.
assigned (bool): True if assigned, false if not.
"""
self.veh_id = veh_id
self.req_id = req_id
self.waiting_duration = waiting_duration
self.assigned = assigned
@staticmethod
def fromdict(d):
"""
Converts a python dictionary to a NotificationData object.
Args:
d (dict): The dictionary to convert.
Returns:
NotificationData: A NotificationData object with the attributes
set by values in d.
"""
return NotificationData(
d.get('veh_id'),
d.get('req_id'),
d.get('waiting_duration'),
d.get('assigned')
)
def todict(self):
"""
Converts a NotificationData object to a python dictionary.
Returns:
dict: A dictionary representation of self.
"""
return {
'veh_id': self.veh_id,
'req_id': self.req_id,
'waiting_duration': self.waiting_duration,
'assigned': self.assigned
}
def __str__(self):
return str(self.todict())
| 3.109375
| 3
|
tests/errors/test_assign5.py
|
akshanshbhatt/lpython
| 31
|
12775787
|
<reponame>akshanshbhatt/lpython
def f():
x: list[list[i32]]
x = [[1, 2, 3]]
y: list[list[str]]
y = [['a', 'b']]
x = y
| 2.9375
| 3
|
basic_python/encapsulation.py
|
ravic499/blog-tips
| 0
|
12775788
|
"""Module to explain Encapsulation in Python
"""
class SeeMe():
"""Class grouping the public, private varaibles and methods
"""
def __init__(self):
"""Constructor
"""
self.see_me = 'See Me' # public variable
self._still_see_me = 'Still See Me !' # private varaible
self.__cant_see_me = 'Cant See Me !!' # strictly private
def can_see_me(self):
"""Public Method
"""
return 'Can See Me'
def __cant_see_me_method(self):
"""Private Method
"""
return 'Cant See Me !!'
"""
Output:
check = SeeMe()
print(check.see_me)
See Me
print(check._still_see_me)
Still See Me
print(check.__cant_see_me)
#AttributeError: 'SeeMee' object has no attribute '__cant_see_me'
check.can_see_me()
Can See Me
check.__cant_see_me_method()
#AttributeError: 'SeeMee' object has no attribute '__cant_see_me_method'
"""
# Getters and Setters, Name Mangling - Explained
class Circle():
"""Class to calculate area of a circle
"""
def __init__(self):
"""Constructor
"""
self.__radius = 3
def get_radius(self):
"""To get radius of a circle
"""
return self.__radius
def set_radius(self, radius):
"""To set radius of a circle
"""
self.__radius = radius
def calculate_area(self):
"""To calculate area of a circle
"""
return 3.14 * self.__radius * self.__radius
"""
Output
c = Circle()
# Name Mangling
print(_Circle__radius)
3
# Getters and Setters
c.get_radius()
3
c.calculate_area()
28.26
c.set_radius(4)
c.get_radius()
4
c.calculate_area()
50.24
"""
| 4.59375
| 5
|
homework/03/hw-03-youens-clark/model_bias_variance.py
|
kyclark/info521
| 0
|
12775789
|
#!/usr/bin/env python3
# Author: <NAME>
# INFO521 Homeword 3 Problem 6
import numpy as np
import matplotlib.pyplot as plt
# --------------------------------------------------
def true_function(x):
"""$t = 5x+x^2-0.5x^3$"""
return (5 * x) + x**2 - (0.5 * x**3)
# --------------------------------------------------
def sample_from_function(N=100, noise_var=1000, xmin=-5., xmax=5.):
""" Sample data from the true function.
N: Number of samples
Returns a noisy sample t_sample from the function
and the true function t. """
x = np.random.uniform(xmin, xmax, N)
t = true_function(x)
# add standard normal noise using np.random.randn
# (standard normal is a Gaussian N(0, 1.0) (i.e., mean 0, variance 1),
# so multiplying by np.sqrt(noise_var) make it N(0,standard_deviation))
t = t + np.random.randn(x.shape[0]) * np.sqrt(noise_var)
return x, t
# --------------------------------------------------
def main():
xmin = -4.
xmax = 5.
noise_var = 6
orders = [1, 3, 5, 9]
N = 25
num_samples = 20
# Make a set of N evenly-spaced x values between xmin and xmax
test_x = np.linspace(xmin, xmax, N)
true_y = true_function(test_x)
for i in orders:
plt.figure(0)
for _ in range(0, num_samples):
x, t = sample_from_function(
N=25, xmin=xmin, xmax=xmax, noise_var=noise_var)
X = np.zeros(shape=(x.shape[0], i + 1))
testX = np.zeros(shape=(test_x.shape[0], i + 1))
for k in range(i + 1):
X[:, k] = np.power(x, k)
testX[:, k] = np.power(test_x, k)
# fit model parameters
w = np.dot(np.linalg.inv(np.dot(X.T, X)), np.dot(X.T, t))
# calculate predictions
prediction_t = np.dot(testX, w)
plt.plot(test_x, prediction_t, color='blue')
# Plot the true function in red so it will be visible
plt.plot(test_x, true_y, color='red', linewidth=3)
plt.xlabel('x')
plt.ylabel('t')
plt.title('Model order {} prediction of {}, $x \in [{},{}]$'.format(
i, true_function.__doc__, xmin, xmax))
plt.pause(.1) # required on some systems so that rendering can happen
outfile = 'model_bias-{}.png'.format(i)
plt.savefig(outfile, format='png')
plt.show()
# --------------------------------------------------
if __name__ == '__main__':
main()
| 3.984375
| 4
|
modules/batt_health/tools/auto_test/auto_test.py
|
namagi/android_device_motorola_qcom-common
| 1
|
12775790
|
<reponame>namagi/android_device_motorola_qcom-common
import os
import struct
import time
import random
import traceback
import re
state = dict()
nvm_state = dict()
def adb_command(sub_cmd):
rsp =''
cmd = 'adb ' + sub_cmd
res = os.popen(cmd, "r")
while 1:
line = res.readline()
if not line: break
rsp += line
return rsp
def adb_wait_for_device():
print "--- Waiting for device"
adb_command('wait-for-device')
def adb_reboot():
print "--- Rebooting phone"
adb_command('reboot')
adb_wait_for_device()
def adb_clear_bhd_data():
print "--- Clearing battery health PDS data"
adb_command('shell rm /pds/batt_health/*')
def adb_pull_bhd_data(local_path):
print "--- Pulling battery health PDS data"
adb_command('pull /pds/batt_health ' + local_path)
def adb_stop_bhd():
adb_command('shell stop batt_health')
def adb_start_bhd():
adb_command('shell start batt_health')
def adb_update_charge_state():
global state
string = struct.pack('<7i', state['ph_is_charging'], state['ph_soc'],
state['ph_cc_uah'], state['ph_real_fcc_batt_temp'],
state['ph_real_fcc'], state['ph_ocv'],
state['ph_rbatt'])
string = '\\x' + '\\x'.join('%02x' % ord(b) for b in string)
string = 'shell \"echo -e \'' + string + '\' > /sys/devices/platform/msm_ssbi.0/pm8921-core/pm8921-bms/override.bin\"'
rsp = adb_command(string)
if rsp != '':
raise RuntimeError('Invalid response from phone = ' + rsp)
def state_init():
global state
state['ph_is_charging'] = 0
state['ph_soc'] = 100
state['ph_cc_uah'] = -22
state['ph_real_fcc_batt_temp'] = -22
state['ph_real_fcc'] = -22
state['ph_ocv'] = -22
state['ph_rbatt'] = -22
state['charge_cycles'] = 0
state['charge_inc'] = 0
state['file_write_count'] = 0
state['aged_begin_cc_uah'] = -22
state['aged_begin_ocv'] = -22
state['aged_begin_percent'] = -22
state['aged_end_cc_uah'] = -22
state['aged_end_ocv'] = -22
state['aged_end_percent'] = -22
def nvm_state_copy():
global state
global nvm_state
nvm_state['charge_cycles'] = state['charge_cycles']
nvm_state['charge_inc'] = state['charge_inc']
nvm_state['file_write_count'] = state['file_write_count']
if (state['ph_real_fcc_batt_temp'] != -22):
nvm_state['ph_real_fcc_batt_temp'] = state['ph_real_fcc_batt_temp']
if (state['ph_real_fcc'] != -22):
nvm_state['ph_real_fcc'] = state['ph_real_fcc']
if (state['ph_soc'] != -22):
nvm_state['ph_soc'] = state['ph_soc']
if (state['ph_ocv'] != -22):
nvm_state['ph_ocv'] = state['ph_ocv']
if (state['ph_rbatt'] != -22):
nvm_state['ph_rbatt'] = state['ph_rbatt']
if (state['aged_begin_cc_uah'] != -22):
nvm_state['aged_begin_cc_uah'] = state['aged_begin_cc_uah']
if (state['aged_begin_percent'] != -22):
nvm_state['aged_begin_percent'] = state['aged_begin_percent']
if (state['aged_begin_ocv'] != -22):
nvm_state['aged_begin_ocv'] = state['aged_begin_ocv']
if (state['aged_begin_cc_uah'] != -22):
nvm_state['aged_begin_cc_uah'] = state['aged_begin_cc_uah']
if (state['aged_end_percent'] != -22):
nvm_state['aged_end_percent'] = state['aged_end_percent']
if (state['aged_end_ocv'] != -22):
nvm_state['aged_end_ocv'] = state['aged_end_ocv']
if (state['aged_end_cc_uah'] != -22):
nvm_state['aged_end_cc_uah'] = state['aged_end_cc_uah']
def update_nvm_state():
global state
global nvm_state
if (nvm_state['charge_cycles'] != state['charge_cycles']):
print "--- NVM state updated due to charge cycles"
state['file_write_count'] = state['file_write_count'] + 1
nvm_state_copy()
elif ( (state['charge_inc'] - nvm_state['charge_inc']) >= 50):
print "--- NVM state updated due to charge increase"
state['file_write_count'] = state['file_write_count'] + 1
nvm_state_copy()
def execute_batt_health_reset():
adb_stop_bhd()
adb_clear_bhd_data()
adb_reboot()
adb_stop_bhd()
adb_clear_bhd_data()
adb_update_charge_state()
adb_start_bhd()
def execute_discharge_cycle(target_soc, interval, sleep):
global state
print "--- Discharging from",state['ph_soc'],"to",target_soc
state['ph_is_charging'] = 0
adb_update_charge_state()
time.sleep(sleep)
while state['ph_soc'] > target_soc:
state['ph_soc'] -= interval
if (state['ph_soc'] < target_soc): state['ph_soc'] = target_soc
adb_update_charge_state()
time.sleep(sleep)
def execute_charge_cycle(target_soc, interval, sleep):
global state
print "--- Charging from",state['ph_soc'],"to",target_soc
start_soc = state['ph_soc']
state['ph_is_charging'] = 1
if (0 <= state['ph_soc'] <= 5):
state['ph_cc_uah'] = random.randint(1500000, 1766000)
state['ph_ocv'] = random.randint(3200000, 4300000)
begin_cc_uah = state['ph_cc_uah']
begin_ocv = state['ph_ocv']
adb_update_charge_state()
time.sleep(sleep)
while state['ph_soc'] < target_soc:
state['ph_soc'] += interval
if (state['ph_soc'] > target_soc): state['ph_soc'] = target_soc
if ((95 <= state['ph_soc'] <= 100) and (state['ph_soc'] == target_soc)):
state['ph_cc_uah'] = random.randint(0, 10000)
state['ph_ocv'] = random.randint(3200000, 4300000)
end_cc_uah = state['ph_cc_uah']
end_ocv = state['ph_ocv']
adb_update_charge_state()
time.sleep(sleep)
state['ph_is_charging'] = 0
adb_update_charge_state()
state['charge_inc'] += (target_soc - start_soc)
if (state['charge_inc'] > 100):
state['charge_cycles'] = state['charge_cycles'] + 1
state['charge_inc'] = state['charge_inc'] % 100
if ( (0 <= start_soc <= 5) and (95 <= target_soc <= 100)):
print "--- Aged event occurred!"
state['aged_begin_cc_uah'] = begin_cc_uah
state['aged_begin_ocv'] = begin_ocv
state['aged_begin_percent'] = start_soc
state['aged_end_cc_uah'] = end_cc_uah
state['aged_end_ocv'] = end_ocv
state['aged_end_percent'] = target_soc
update_nvm_state()
def test_case_random_event():
global state
global nvm_state
i = random.randint(0, 100)
if (i == 0):
temp_soc = state['ph_soc']
state = nvm_state.copy()
state['ph_is_charging'] = 0
state['ph_soc'] = temp_soc
state['ph_cc_uah'] = -22
state['ph_real_fcc_batt_temp'] = -22
state['ph_real_fcc'] = -22
state['ph_ocv'] = -22
state['ph_rbatt'] = -22
state['aged_begin_cc_uah'] = -22
state['aged_begin_percent'] = -22
state['aged_begin_ocv'] = -22
state['aged_begin_cc_uah'] = -22
state['aged_end_percent'] = -22
state['aged_end_ocv'] = -22
state['aged_end_cc_uah'] = -22
adb_reboot()
adb_update_charge_state()
elif (i == 1):
state['ph_real_fcc_batt_temp'] = random.randint(0, 50)
print "--- Setting real_fcc_batt_temp =",state['ph_real_fcc_batt_temp']
adb_update_charge_state()
elif (i == 2):
state['ph_real_fcc'] = random.randint(0, 4000000)
print "--- Setting real_fcc =",state['ph_real_fcc']
adb_update_charge_state()
elif (i == 3):
state['ph_ocv'] = random.randint(3200000, 4300000)
print "--- Setting ocv =",state['ph_ocv']
adb_update_charge_state()
elif (i == 4):
state['ph_rbatt'] = random.randint(1000, 2000)
print "--- Setting rbatt =",state['ph_rbatt']
adb_update_charge_state()
def test_case_random():
global state
global nvm_state
pds_save_path = "random/"
state_init()
nvm_state = state.copy()
max_cycle = input('Please enter number of cycles to execute: ')
print "- Start random test case"
adb_wait_for_device()
execute_batt_health_reset()
random.seed()
for i in range(1, max_cycle):
target_discharge = random.randint(0, state['ph_soc'])
target_charge = random.randint(target_discharge, 100)
print "- Executing cycle",i,"/",max_cycle
execute_discharge_cycle(target_discharge, 10, 0.1)
test_case_random_event()
execute_charge_cycle(target_charge, 10, 0.1)
test_case_random_event()
adb_pull_bhd_data(pds_save_path)
print "- Random test case completed!"
print "- PDS data files saved in " + pds_save_path
print "- Expected NVM State:"
print "\tFile Write Count:\t\t",nvm_state['file_write_count']
print "\tCharge Cycle Count:\t\t",nvm_state['charge_cycles']
print "\tCharge Increase:\t\t",nvm_state['charge_inc']
print "\tReal FCC Batt Temp:\t\t",nvm_state['ph_real_fcc_batt_temp']
print "\tReal FCC:\t\t\t",nvm_state['ph_real_fcc']
print "\tState of charge:\t\t",nvm_state['ph_soc']
print "\tOCV:\t\t\t\t",nvm_state['ph_ocv']
print "\tRbatt:\t\t\t\t",nvm_state['ph_rbatt']
if (nvm_state['aged_begin_cc_uah'] != -22):
print "\tAged Values:"
print "\t\tBOC - Percent:\t\t",nvm_state['aged_begin_percent']
print "\t\tBOC - OCV:\t\t",nvm_state['aged_begin_ocv']
print "\t\tBOC - CC:\t\t",nvm_state['aged_begin_cc_uah']
print "\t\tEOC - Percent:\t\t",nvm_state['aged_end_percent']
print "\t\tEOC - OCV:\t\t",nvm_state['aged_end_ocv']
print "\t\tEOC - CC:\t\t",nvm_state['aged_end_cc_uah']
print
def test_case_charge_by_1_step():
global state
global nvm_state
state_init()
nvm_state = state.copy()
print "- Start charge by 1 step test"
adb_wait_for_device()
execute_batt_health_reset()
for i in range (0,5):
execute_discharge_cycle(0, 1, 0.1)
for j in range (0, 101):
execute_charge_cycle(j, 1, 0.1)
pds_save_path = "by_1/"
adb_pull_bhd_data(pds_save_path)
print "- Charge by 1 step test case completed!"
print "- PDS data files saved in " + pds_save_path
print "- Expected NVM State:"
print "\tFile Write Count:\t\t",nvm_state['file_write_count']
print "\tCharge Cycle Count:\t\t",nvm_state['charge_cycles']
print "\tCharge Increase:\t\t",nvm_state['charge_inc']
def test_case_reboots():
global state
global nvm_state
state_init()
nvm_state = state.copy()
print "- Start reboots test case"
adb_wait_for_device()
execute_batt_health_reset()
execute_discharge_cycle(0, 10, 0.1)
state['ph_real_fcc_batt_temp'] = 50
execute_charge_cycle(50, 1, 0.1)
adb_reboot()
execute_discharge_cycle(50, 1, 0.1)
state['ph_real_fcc'] = 1500000
execute_charge_cycle(100, 1, 0.1)
adb_reboot()
execute_discharge_cycle(0, 1, 0.1)
state['ph_ocv'] = 4000001
execute_charge_cycle(50, 1, 0.1)
adb_reboot()
execute_discharge_cycle(50, 1, 0.1)
state['ph_rbatt'] = 1400
execute_charge_cycle(99, 1, 0.1)
adb_reboot()
pds_save_path = "reboots/"
adb_pull_bhd_data(pds_save_path)
print "- Reboots test case completed!"
print "- PDS data files saved in " + pds_save_path
print "- Expected NVM State:"
print "\tFile Write Count:\t\t",nvm_state['file_write_count']
print "\tCharge Cycle Count:\t\t",nvm_state['charge_cycles']
print "\tCharge Increase:\t\t",nvm_state['charge_inc']
print "\tReal FCC Batt Temp:\t\t",nvm_state['ph_real_fcc_batt_temp']
print "\tReal FCC:\t\t\t",nvm_state['ph_real_fcc']
print "\tState of charge:\t\t",nvm_state['ph_soc']
print "\tOCV:\t\t\t\t",nvm_state['ph_ocv']
print "\tRbatt:\t\t\t\t",nvm_state['ph_rbatt']
print
def handle_force_nvm_write():
global state
global nvm_state
#To force NVM write, just do a quick 1 -> 100 charge cycle
state_init()
nvm_state = state.copy()
print "- Forcing NVM write"
execute_discharge_cycle(1, 100, 0.1)
state['ph_real_fcc_batt_temp'] = 50
execute_charge_cycle(100, 100, 0.1)
def handle_manual_basic_entry():
global state
global nvm_state
state_init()
nvm_state = state.copy()
print "- Manual entry, actions: "
print "\t'=##' = charge/discharge to ##"
print "\t'-##' = discharge by ##"
print "\t'+##' = charge by ##"
print "\tx = exit"
exit_manual_entry = 0
while not exit_manual_entry:
choice = raw_input('Please enter action: ')
if choice == 'x':
exit_manual_entry = 1;
else:
soc = state['ph_soc'];
choice.replace(' ', '')
m = re.match('(=|-|\+)(\d*)', choice)
if m:
delta = eval(m.group(2))
if (delta > 100): delta = 100
if (delta < 0): delta = 0
if (m.group(1) == '-'):
soc = soc - delta
if (soc < 0): soc = 0
execute_discharge_cycle(soc, 5, 0.1)
elif (m.group(1) == '+'):
soc = soc + delta
if (soc > 100): soc = 100
execute_charge_cycle(soc, 5, 0.1)
else:
if (soc < delta):
execute_charge_cycle(delta, 5, 0.1)
else:
execute_discharge_cycle(delta, 5, 0.1)
else:
print "Invalid input"
def handle_main_menu():
exit_requested = 0
print
print "Battery Health Daemon Tester"
print "============================"
print "1) Execute 'Random' test"
print "2) Execute 'Charge By 1' test"
print "3) Execute 'Reboots' test"
print "f) Force NVM write"
print "m) Manual basic entry"
print "r) Reset phone battery health data"
print "x) Exit"
choice = raw_input('Please enter a value: ')
print
if choice == '1':
test_case_random()
elif choice == '2':
test_case_charge_by_1_step()
elif choice == '3':
test_case_reboots()
elif choice == 'r':
adb_wait_for_device()
adb_stop_bhd()
adb_clear_bhd_data()
adb_reboot()
elif choice == 'f':
handle_force_nvm_write()
elif choice == 'm':
handle_manual_basic_entry()
elif choice == 'x':
exit_requested = 1
else:
print "Invalid value"
return exit_requested
def main():
try:
exit_app = 0
while (exit_app != 1):
exit_app = handle_main_menu()
except Exception, err:
print 'ERROR: ' + str(err)
traceback.print_exc()
if __name__ == '__main__':
main()
| 2.125
| 2
|
framework/components/gen_dataset.py
|
HXX97/rng-kbqa
| 37
|
12775791
|
<filename>framework/components/gen_dataset.py
"""
Copyright (c) 2021, salesforce.com, inc.
All rights reserved.
SPDX-License-Identifier: BSD-3-Clause
For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
"""
from components.utils import *
import os
from components.dataset_utils import LFCandidate
from executor.sparql_executor import get_label
from tqdm import tqdm
from transformers import BartTokenizer
class GenerationExample:
def __init__(self, qid, query, gt, candidates, entity_label_map, answers=[]):
self.qid = qid
self.query = query
self.gt = gt
self.candidates = candidates
self.entity_label_map = entity_label_map
self.answers = answers
def __str__(self):
return '{}\n\t->{}\n'.format(self.query, self.gt.normed_expr)
def __repr__(self):
return self.__str__()
class GenerationFeature:
def __init__(self, ex, src_input_ids, tgt_input_ids):
self.ex = ex
self.src_input_ids = src_input_ids
self.tgt_input_ids = tgt_input_ids
def _vanilla_linearization_method(expr, entity_label_map):
expr = expr.replace('(', ' ( ')
expr = expr.replace(')', ' ) ')
toks = expr.split(' ')
toks = [x for x in toks if len(x)]
norm_toks = []
for t in toks:
# normalize entity
if t.startswith('m.'):
if t in entity_label_map:
t = entity_label_map[t]
else:
name = get_label(t)
if name is not None:
entity_label_map[t] = name
t = name
elif 'XMLSchema' in t:
format_pos = t.find('^^')
t = t[:format_pos]
elif t == 'ge':
t = 'GREATER EQUAL'
elif t == 'gt':
t = 'GREATER THAN'
elif t == 'le':
t = 'LESS EQUAL'
elif t == 'lt':
t = 'LESS THAN'
else:
if '_' in t:
t = t.replace('_', ' ')
if '.' in t:
t = t.replace('.', ' , ')
# normalize type
norm_toks.append(t)
return ' '.join(norm_toks)
def proc_webqsp_gen_exs(candidates_info, data_bank):
qid = candidates_info['qid']
raw_data = data_bank[qid]
query = raw_data['RawQuestion']
gt_expr = candidates_info['genation_target']
entity_label_map = {} # resolve_entity_label(qid, gt, candidates)
norm_gt = _vanilla_linearization_method(gt_expr, entity_label_map)
# print('normed gt', norm_gt)
gt = LFCandidate(gt_expr, norm_gt, True, 1.0, 0.0)
top_candidates = candidates_info['top_candidates']
candidates = []
for c in top_candidates:
c_expr = c['logical_form']
normed_c_expr = _vanilla_linearization_method(c_expr, entity_label_map)
# print('normed c_expr', normed_c_expr)
c_ex = c['ex']
lf_candidate = LFCandidate(c_expr, normed_c_expr, c_ex)
candidates.append(lf_candidate)
return GenerationExample(qid, query, gt, candidates, entity_label_map, answers=[])
def webqsp_read_gen_examples_from_json(dataset_file, candidate_file, is_eval=False):
data_bank = load_json(dataset_file)
data_bank = dict([(str(x['QuestionId']), x) for x in data_bank])
lines = load_json(candidate_file)
examples = []
for l in tqdm(lines, desc='Reading', total=len(lines)):
ex = proc_webqsp_gen_exs(l, data_bank)
if ex is None:
continue
examples.append(ex)
return examples
def proc_grail_gen_exs(candidates_info, data_bank):
qid = candidates_info['qid']
raw_data = data_bank[qid]
query = raw_data['question']
gt_expr = candidates_info['genation_target']
entity_label_map = {} # resolve_entity_label(qid, gt, candidates)
norm_gt = _vanilla_linearization_method(gt_expr, entity_label_map)
# print('normed gt', norm_gt)
gt = LFCandidate(gt_expr, norm_gt, True, 1.0, 0.0)
top_candidates = candidates_info['top_candidates']
candidates = []
for c in top_candidates:
c_expr = c['logical_form']
normed_c_expr = _vanilla_linearization_method(c_expr, entity_label_map)
# print('normed c_expr', normed_c_expr)
c_ex = c['ex']
lf_candidate = LFCandidate(c_expr, normed_c_expr, c_ex)
candidates.append(lf_candidate)
return GenerationExample(qid, query, gt, candidates, entity_label_map, answers=[])
def grail_read_gen_examples_from_json(dataset_file, candidate_file, is_eval=False):
data_bank = load_json(dataset_file)
data_bank = dict([(str(x['qid']), x) for x in data_bank])
lines = load_json(candidate_file)
examples = []
for l in tqdm(lines, desc='Reading', total=len(lines)):
ex = proc_grail_gen_exs(l, data_bank)
if ex is None:
continue
examples.append(ex)
return examples
def _extract_gen_feature_from_example(args, tokenizer, ex, add_prefix_space=False):
# gt_input_ids, gt_token_type_ids, candidates_input_ids, candidates_token_type_ids
qid = ex.qid
q = ex.query
gt_lf = ex.gt.normed_expr
if args.do_lower_case:
q = q.lower()
gt_lf = gt_lf.lower()
candidate_lfs = []
for c in ex.candidates[:args.top_k_candidates]:
c_lf = c.normed_expr
if args.do_lower_case:
c_lf = c_lf.lower()
candidate_lfs.append(c_lf)
src_text = ' ; '.join([q] + candidate_lfs)
dst_text = gt_lf
if add_prefix_space:
batch_encoding = tokenizer.prepare_seq2seq_batch(
[src_text],
[dst_text],
max_length=args.max_source_length,
max_target_length=args.max_target_length,
return_tensors="pt",
add_prefix_space=add_prefix_space,
).data
else:
batch_encoding = tokenizer.prepare_seq2seq_batch(
[src_text],
[dst_text],
max_length=args.max_source_length,
max_target_length=args.max_target_length,
return_tensors="pt",
).data
# batch_encoding["ids"] = torch.tensor([x["id"] for x in batch])
# return batch_encoding # return GrailRankingFeature(qid, ex, gt_input_ids, gt_token_type_ids, candidate_input_ids, candidate_token_type_ids)
input_ids, labels = batch_encoding['input_ids'][0], batch_encoding['labels'][0]
# encoded = tokenizer.pad({'input_ids': [input_ids, input_ids[:20]]},return_tensors='pt')
# encoded = tokenizer.pad({'input_ids': [labels, labels[:5]]},return_tensors='pt')
return GenerationFeature(ex, input_ids, labels)
def generation_collate_fn(data, tokenizer):
all_input_ids = []
all_labels = []
for feat in data:
all_input_ids.append(feat.src_input_ids)
all_labels.append(feat.tgt_input_ids)
src_encoded = tokenizer.pad({'input_ids': all_input_ids},return_tensors='pt')
tgt_encoded = tokenizer.pad({'input_ids': all_labels},return_tensors='pt')
return {
'input_ids': src_encoded['input_ids'],
'attention_mask': src_encoded['attention_mask'],
'labels': tgt_encoded['input_ids']
}
def extract_gen_features_from_examples(args, tokenizer, examples):
features = []
add_prefix_space = isinstance(tokenizer, BartTokenizer)
for ex in tqdm(examples, desc='Indexing', total=len(examples)):
feat = _extract_gen_feature_from_example(args, tokenizer, ex, add_prefix_space=add_prefix_space)
features.append(feat)
return features
| 1.960938
| 2
|
plot/plot_results_tests.py
|
biomac-lab/COVID_schools_dashboard
| 0
|
12775792
|
import sys
sys.path.append('../')
from matplotlib import figure
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import os
from tqdm import tqdm
from models import model
### Config folders
config_data = pd.read_csv('config.csv', sep=',', header=None, index_col=0)
figures_path = config_data.loc['figures_dir'][1]
results_path = config_data.loc['results_test_dir'][1]
ages_data_path = config_data.loc['bogota_age_data_dir'][1]
houses_data_path = config_data.loc['bogota_houses_data_dir'][1]
### Arguments
import argparse
parser = argparse.ArgumentParser(description='Dynamics visualization.')
parser.add_argument('--population', default=10000, type=int,
help='Speficy the number of individials')
parser.add_argument('--type_sim', default='intervention', type=str,
help='Speficy the type of simulation to plot')
args = parser.parse_args()
number_nodes = args.population
pop = number_nodes
### Read functions
def load_results_ints(type_res,n,int_effec,schl_occup,layer,path=results_path):
read_path = os.path.join(path,'{}_layerInt_{}_inter_{}_schoolcap_{}_{}.csv'.format(str(n),str(layer),str(int_effec),
str(schl_occup),type_res))
read_file = pd.read_csv(read_path)
return read_file
### Read file
results_path = os.path.join(results_path,str(pop))
###------------------------------------------------------------------------------------------------------------------------------------------------------
### Bar plots
intervention_effcs = [0.0,0.2,0.4]
school_cap = [1.0] #,0.35]
layers_test = ['work','community','all']
layers_labels = ['Intervention over work','Intervention over community','Intervention over-all']
layers_labels = dict(zip(layers_test,layers_labels))
df_list = []
for l, layer_ in enumerate(layers_test):
for i, inter_ in enumerate(intervention_effcs):
for j, schl_cap_ in enumerate(school_cap):
res_read = load_results_ints('soln_cum',args.population,inter_,schl_cap_,layer_,results_path)
for itr_ in range(10):
res_read_i = res_read['iter'] == itr_
res_read_i = pd.DataFrame(res_read[res_read_i])
end_cases = res_read_i['E'].iloc[-1]
df_res_i = pd.DataFrame(columns=['iter','Inter.Layer','interven_eff','end_cases'])
df_res_i['iter'] = [int(itr_)]
df_res_i['Inter.Layer'] = layers_labels[layer_]
df_res_i['interven_eff'] = r'{}%'.format(int(inter_*100))
df_res_i['end_cases'] = end_cases*pop
df_list.append(df_res_i)
df_final_E = pd.concat(df_list)
fig,ax = plt.subplots(1,1,figsize=(9, 6))
sns.catplot(ax=ax, data=df_final_E, y='interven_eff', x='end_cases', hue='Inter.Layer',kind='bar',palette='winter',alpha=0.7,legend=False)
#ax.legend(bbox_to_anchor=(1.02,1)).set_title('')
plt.legend(bbox_to_anchor=(1.02,0.6),title='',frameon=False, fontsize=16)
#plt.setp(ax.get_legend().get_texts(), fontsize='17') # for legend text
plt.ylabel(r'Intervention efficiency ($\%$)',fontsize=17)
plt.xlabel(r'Infections per 10,000',fontsize=17)
plt.title(r'Total infections | schools at {}%'.format(str(int(school_cap[0]*100))),fontsize=17)
plt.xticks(size=16)
plt.yticks(size=16)
save_path = os.path.join(figures_path,'bar_plots','layersInter_totalInfections_n_{}_schoolcap_{}_.png'.format(str(pop),str(school_cap[0])))
plt.savefig(save_path,dpi=400, transparent=False, bbox_inches='tight', pad_inches=0.1 )
# Deaths
school_cap = [0.35] #,0.35]
layers_test = ['work','community','all']
layers_labels = ['Intervention over work','Intervention over community','Intervention over-all']
layers_labels = dict(zip(layers_test,layers_labels))
df_list = []
for l, layer_ in enumerate(layers_test):
for i, inter_ in enumerate(intervention_effcs):
for j, schl_cap_ in enumerate(school_cap):
res_read = load_results_ints('soln_cum',args.population,inter_,schl_cap_,layer_,results_path)
for itr_ in range(10):
res_read_i = res_read['iter'] == itr_
res_read_i = pd.DataFrame(res_read[res_read_i])
end_dead = res_read_i['D'].iloc[-1]
df_res_i = pd.DataFrame(columns=['iter','Inter.Layer','interven_eff','end_dead'])
df_res_i['iter'] = [int(itr_)]
df_res_i['Inter.Layer'] = layers_labels[layer_]
df_res_i['interven_eff'] = r'{}%'.format(int(inter_*100))
df_res_i['end_dead'] = end_dead*pop
df_list.append(df_res_i)
df_final_D = pd.concat(df_list)
fig,ax = plt.subplots(1,1,figsize=(9, 6))
sns.catplot(ax=ax, data=df_final_D, y='interven_eff', x='end_dead', hue='Inter.Layer',kind='bar',palette='winter',alpha=0.7,legend=False)
#ax.legend(bbox_to_anchor=(1.02,1)).set_title('')
plt.legend(bbox_to_anchor=(1.02,0.6),title='',frameon=False, fontsize=16)
#plt.setp(ax.get_legend().get_texts(), fontsize='17') # for legend text
plt.ylabel(r'Intervention efficiency ($\%$)',fontsize=17)
plt.xlabel(r'Deaths per 10,000',fontsize=17)
plt.title(r'Total deaths | schools at {}%'.format(str(int(school_cap[0]*100))),fontsize=17)
plt.xticks(size=16)
plt.yticks(size=16)
plt.xlim([0,141])
save_path = os.path.join(figures_path,'bar_plots','layersInter_totalDeaths_n_{}_schoolcap_{}_.png'.format(str(pop),str(school_cap[0])))
plt.savefig(save_path,dpi=400, transparent=False, bbox_inches='tight', pad_inches=0.1 )
| 2.328125
| 2
|
choi/object_search/webcam_train.py
|
HDNua/kwin
| 2
|
12775793
|
"""
webcam_train
Developer: <NAME>, <NAME>
Version: 0.1.0
Release Date: 2017-09-30
"""
import numpy as np
import cv2
import tensorflow as tf
import sys
from kwin import *
import time
import webcam
import dataset
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
########################################################################
#
########################################################################
#
while True:
target_name = input("무엇을 학습할까요? (종료하려면 exit) ")
target_name=target_name.strip()
target_dir = "%s/%s" %(dataset.train_data_path(), target_name)
if target_name == 'exit':
break
if os.path.exists(target_dir) is False:
os.mkdir(target_dir)
webcam.record_avi(target_name=target_name, target_dir=target_dir)
print("[%s]에 대한 동영상 촬영이 완료되었습니다." %target_name)
#
print("학습을 시작합니다. 종료 메시지가 나타날 때까지 잠시 기다리십시오.")
#
import retrain
retrain.do_train()
#
print("학습이 종료되었습니다. bottleneck을 확인하십시오.")
| 2.4375
| 2
|
videoToAudio.py
|
anthonyattard/video-to-audio
| 2
|
12775794
|
#!/usr/bin/env python3
import os
import subprocess
sourceDir = "/Video/File/Directory"
#Choose the file formats below. See https://ffmpeg.org/ffmpeg.html for supported file types.
videoFormat = ".mp4"
audioFormat = ".mp3"
for file in os.listdir(sourceDir):
name = file[:file.rfind(".")]
subprocess.call(["ffmpeg", "-i", sourceDir+"/"+name+".MP4", sourceDir+"/"+name+".mp3"])sourceDir+"/"+name+videoFormat, sourceDir+"/"+name+audioFormat])
| 2.515625
| 3
|
posts/admin.py
|
lakshaykhatter/djangox-project-attempt
| 0
|
12775795
|
from django.contrib.auth import get_user_model
from django.contrib import admin
# Register your models here.
from .models import Post, Link
User = get_user_model()
class LinkInline(admin.StackedInline):
model = Link
extra = 0
# readonly_fields = ['url',]
fields = ['url']
class PostAdmin(admin.ModelAdmin):
inlines = [LinkInline]
list_display = ['title', 'description']
readonly_fields = ['date',]
raw_id_fields = ['author']
admin.site.register(Post, PostAdmin)
| 2.015625
| 2
|
app/gws/lib/test.py
|
gbd-consult/gbd-websuite
| 3
|
12775796
|
<reponame>gbd-consult/gbd-websuite
"""Support tests"""
import datetime
import http.cookies
import inspect
import os.path
import shutil
import sys
import time
import psycopg2
import psycopg2.extras
import pytest
import werkzeug.test
import werkzeug.wrappers
import gws
import gws.base.web.web_app
import gws.config
import gws.core.tree
import gws.lib.feature
import gws.lib.json2
import gws.lib.net
import gws.lib.os2
import gws.lib.password
import gws.lib.vendor.slon
import gws.lib.mpx.config
import gws.server.control
import gws.spec.runtime
fixture = pytest.fixture
# configuration for tests, see bin/_test.py
CONFIG = {}
TEMP_DIR = '/tmp'
MANIFEST_PATH = TEMP_DIR + '/gws_test_manifest.json'
DEFAULT_MANIFEST = {
'withStrictConfig': True,
'withFallbackConfig': False,
}
# GWS configuration defaults
SESSION_STORE_PATH = '/tmp/gws_test_session_store.sqlite'
GWS_CONFIG_PATH = '/gws-var/gws_test_gws_config.json'
GWS_CONFIG_DEFAULTS = {
'server': {
'log': {'level': 'DEBUG'},
'mapproxy': {'forceStart': True},
},
'auth': {
'sessionStore': 'sqlite',
'sessionStorePath': SESSION_STORE_PATH,
},
}
# test runner
def main(args):
CONFIG.update(gws.lib.json2.from_path('/gws-var/TEST_CONFIG.json'))
gws.lib.json2.to_path(MANIFEST_PATH, CONFIG.get('MANIFEST', DEFAULT_MANIFEST))
rootdir = gws.APP_DIR + '/gws'
files = list(gws.lib.os2.find_files(rootdir, r'_test\.py'))
spec = True
if args and not args[0].startswith('-'):
pattern = args.pop(0)
if pattern.startswith('nospec:'):
pattern = pattern.split(':')[1]
spec = False
if pattern:
files = [f for f in files if pattern in f]
if not files:
gws.log.error(f'no files to test')
return
_sort_order = ['/core/', '/lib/', '/base/', '/plugin/']
def _sort_key(path):
for n, s in enumerate(_sort_order):
if s in path:
return n, path
return 99, path
files.sort(key=_sort_key)
if spec:
gws.spec.runtime.create_and_store()
pytest_args = ['-c', CONFIG['PYTEST_INI_PATH'], '--rootdir', rootdir]
pytest_args.extend(args)
pytest_args.extend(files)
gws.log.debug(f'running pytest with args: {pytest_args}')
pytest.main(pytest_args)
##
def setup():
gws.log.debug(f'TEST:setup')
pass
def teardown():
gws.log.debug(f'TEST:teardown')
gws.lib.os2.unlink(SESSION_STORE_PATH)
gws.base.web.web_app.reload()
gws.core.tree.unregister_ext()
gws.config.deactivate()
web_server_command('reset')
##
def configure(config, parse=True):
def _dct2cfg(d):
if isinstance(d, dict):
return gws.Config({k: _dct2cfg(v) for k, v in d.items()})
if isinstance(d, (list, tuple)):
return [_dct2cfg(v) for v in d]
return d
gws.log.debug(f'TEST:configure')
if isinstance(config, str):
config = gws.lib.vendor.slon.parse(config, as_object=True)
dct = gws.deep_merge(GWS_CONFIG_DEFAULTS, config)
config = _dct2cfg(dct)
gws.lib.json2.to_path(GWS_CONFIG_PATH, config, pretty=True)
if parse:
r = gws.config.configure(manifest_path=MANIFEST_PATH, config_path=GWS_CONFIG_PATH)
else:
r = gws.config.configure(manifest_path=MANIFEST_PATH, config=config)
gws.config.activate(r)
gws.config.store(r)
return r
def configure_and_reload(config, parse=True):
def _wait_for_port(service):
while 1:
port = CONFIG[f'service.gws.{service}_port']
url = 'http://' + CONFIG['runner.host_name'] + ':' + str(port)
res = gws.lib.net.http_request(url)
if res.ok:
return
gws.log.debug(f'TEST:waiting for {service}:{port}')
sleep(2)
r = configure(config, parse)
gws.server.control.reload(['mapproxy', 'web'])
for service in 'http', 'mpx':
_wait_for_port(service)
return r
def root():
return gws.config.root()
# requests and responses
def local_request(url, **kwargs):
"""Perform a get request to the local server."""
return gws.lib.net.http_request('http://127.0.0.1' + '/' + url, **kwargs)
class ClientCmdResponse(gws.Data):
status: int
json: dict
cookies: dict
response: werkzeug.wrappers.BaseResponse
def client_cmd_request(cmd, params, cookies=None, headers=None) -> ClientCmdResponse:
gws.log.debug(f'TEST:client_cmd_request {cmd}')
client = _prepare_client(cookies)
resp = client.open(
method='POST',
path='/_/' + cmd,
data=gws.lib.json2.to_string({'params': params}),
content_type='application/json',
headers=headers,
)
js = None
try:
js = gws.lib.json2.from_string(resp.data)
except:
pass
cookie_headers = ';'.join(v for k, v in resp.headers if k == 'Set-Cookie')
response_cookies = {}
mor: http.cookies.Morsel
for k, mor in http.cookies.SimpleCookie(cookie_headers).items():
response_cookies[k] = dict(mor)
response_cookies[k]['value'] = mor.value
return ClientCmdResponse(
status=resp.status_code,
json=js,
cookies=response_cookies,
response=resp,
)
def _prepare_client(cookies):
client = werkzeug.test.Client(
gws.base.web.web_app.application,
werkzeug.wrappers.BaseResponse)
if cookies:
for k, v in cookies.items():
if not v:
client.delete_cookie('localhost', k)
elif isinstance(v, str):
client.set_cookie('localhost', k, v)
else:
client.set_cookie('localhost', k, **v)
return client
# web server
def web_server_command(cmd, params=None):
base_url = f"http://{CONFIG['runner.host_name']}:{CONFIG['service.web.port']}"
params = params or {}
params['cmd'] = cmd
res = gws.lib.net.http_request(
base_url,
data=gws.lib.json2.to_string(params),
method='post'
)
return gws.lib.json2.from_string(res.text)
def web_server_poke(pattern, response):
return web_server_command('poke', {'pattern': pattern, 'response': response})
def web_server_begin_capture():
return web_server_command('begin_capture')
def web_server_end_capture():
res = web_server_command('end_capture')
return [gws.lib.net.parse_url('http://host' + u) for u in res['urls']]
def web_server_create_wms(config):
web_server_command('create_wms', {'config': config})
def web_server_url(url):
base_url = f"http://{CONFIG['runner.host_name']}:{CONFIG['service.web.port']}"
return base_url + '/' + url
# features
def make_features(name, geom_type, columns, crs, xy, rows, cols, gap):
features = []
sx, sy = xy
for r in range(rows):
for c in range(cols):
uid = r * cols + (c + 1)
atts = []
for k, v in columns.items():
val = ''
if v == 'int':
val = uid * 100
if v == 'float':
val = uid * 200.0
if v in ('varchar', 'text'):
val = f"{name}/{uid}"
if v == 'date':
val = datetime.datetime(2019, 1, 1) + datetime.timedelta(days=uid - 1)
atts.append(gws.Attribute(name=k, value=val))
x = sx + c * gap
y = sy + r * gap
geom = None
if geom_type == 'point':
geom = {
'type': 'Point',
'coordinates': [x, y]
}
if geom_type == 'square':
w = h = gap / 2
geom = {
'type': 'Polygon',
'coordinates': [[
[x, y],
[x + w, y],
[x + w, y + h],
[x, y + h],
[x, y],
]]
}
features.append(gws.lib.feature.from_props(gws.Data(
uid=uid,
attributes=atts,
shape={'crs': crs, 'geometry': geom} if geom else None
)))
return features
def geojson_make_features(path, geom_type, columns, crs, xy, rows, cols, gap):
name = gws.lib.os2.parse_path(path)['name']
features = make_features(name, geom_type, columns, crs, xy, rows, cols, gap)
text = gws.lib.json2.to_pretty_string({
'type': 'FeatureCollection',
'crs': {'type': 'name', 'properties': {'name': crs}},
'features': [f.to_geojson() for f in features],
})
write_file_if_changed(path, text)
# postgres
def postgres_connect_params():
return {
'database': CONFIG['service.postgres.database'],
'user': CONFIG['service.postgres.user'],
'password': CONFIG['service.postgres.password'],
'port': CONFIG['service.postgres.port'],
'host': CONFIG['runner.host_name'],
}
def postgres_connection():
return psycopg2.connect(**postgres_connect_params())
def postgres_make_features(name, geom_type, columns, crs, xy, rows, cols, gap):
colnames = list(columns)
coldefs = [f'{c} {columns[c]}' for c in colnames]
features = make_features(name, geom_type, columns, crs, xy, rows, cols, gap)
shape = features[0].shape
if shape:
colnames.append('p_geom')
coldefs.append(f'p_geom GEOMETRY({shape.geometry_type},{shape.srid})')
data = []
for f in features:
rec = [a.value for a in f.attributes]
if f.shape:
rec.append(f.shape.ewkb_hex)
data.append(rec)
conn = postgres_connection()
cur = conn.cursor()
cur.execute(f'BEGIN')
cur.execute(f'DROP TABLE IF EXISTS {name}')
cur.execute(f'''
CREATE TABLE {name} (
id SERIAL PRIMARY KEY,
{','.join(coldefs)}
)
''')
cur.execute(f'COMMIT')
cur.execute(f'BEGIN')
ins = f'''INSERT INTO {name} ({','.join(colnames)}) VALUES %s'''
psycopg2.extras.execute_values(cur, ins, data)
cur.execute(f'COMMIT')
conn.close()
def postgres_drop_table(name):
conn = postgres_connection()
cur = conn.cursor()
cur.execute(f'BEGIN')
cur.execute(f'DROP TABLE IF EXISTS {name}')
cur.execute(f'COMMIT')
conn.close()
# utilities
def make_users_json(lst):
path = '/tmp/gws_test_users.json'
if lst is None:
gws.lib.os2.unlink(path)
return None
for v in lst:
v['password'] = <PASSWORD>.lib.password.encode(v['password'])
gws.lib.json2.to_path(path, lst)
return path
def register_ext(class_name, cls):
gws.core.tree.register_ext(class_name, cls)
def write_file(path, text):
pp = gws.lib.os2.parse_path(path)
if pp['dirname'].startswith(TEMP_DIR):
gws.ensure_dir(pp['dirname'])
with open(path, 'wt', encoding='utf8') as fp:
fp.write(text)
def read_file(path):
with open(path, 'rt', encoding='utf8') as fp:
return fp.read()
def write_file_if_changed(path, text):
curr = read_file(path)
if text != curr:
write_file(path, text)
def copy_file(path, dir):
shutil.copy(path, dir)
def rel_path(path):
f = inspect.stack(2)[1].filename
return os.path.join(os.path.dirname(f), path)
def sleep(n):
time.sleep(n)
def raises(exc):
return pytest.raises(exc)
def dict_of(x):
if gws.is_data_object(x):
# noinspection PyTypeChecker
return dict(sorted(vars(x).items()))
return x
# div. geodata
class POINTS:
# PT Passy
paris = [254451, 6250716]
# PT Maxplatz
dus = [753834, 6660874]
# Linden x Birken Str
dus1 = [756871, 6661810]
# PT Wehrhahn
dus2 = [756766, 6661801]
# Linden x Mendelssohn Str
dus3 = [757149, 6661832]
# PT Neßler Str.
dus4 = [765513, 6648529]
# PT Gärdet
stockholm = [2014778, 8255502]
# PT Ustinksy Most
moscow = [4189555, 7508535]
# PT Cho Ba Chieu / <NAME>
vietnam = [11877461, 1209716]
# PT Flemington Racecourse / Melbourne
australia = [16131032, -4549421]
# Yarawa Rd x Namara Rd
fiji = [19865901, -2052085]
# Main Road Y junction
pitcairn = [-14482452, -2884039]
# PT Allende
mexico = [-11035867, 2206279]
# Park Av x Carson St
memphis = [-10014603, 4178550]
# PT Broadway & West 3rd
ny = [-8237102, 4972223]
# PT Lime Str
liverpool = [-331463, 7058753]
# PT East India Dock Rd
london = [-48, 6712663]
# PT Tema Harbour
ghana = [201, 627883]
| 1.625
| 2
|
app/comm/psql_wrapper.py
|
viaacode/teamleader2db
| 0
|
12775797
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# @Author: <NAME>
#
# app/comm/psql_wrapper.py
#
# PostgresqlWrapper that wraps postgres connection
# and also allows for our unit and integration tests to more
# easily mock it.
#
import psycopg2
from functools import wraps
class PostgresqlWrapper:
"""Allows for executing SQL statements to a postgresql database"""
def __init__(self, params: dict):
self.params_postgresql = params
def _connect_curs_postgresql(function):
"""Wrapper function that connects and authenticates to the PostgreSQL DB.
The passed function will receive the open cursor.
"""
@wraps(function)
def wrapper_connect(self, *args, **kwargs):
with psycopg2.connect(**self.params_postgresql) as conn:
with conn.cursor() as curs:
val = function(self, cursor=curs, *args, **kwargs)
return val
return wrapper_connect
@_connect_curs_postgresql
def execute(self, query: str, vars=None, cursor=None):
"""Connects to the postgresql DB and executes the statement.
Returns all results of the statement if applicable.
"""
cursor.execute(query, vars)
if cursor.description is not None:
return cursor.fetchall()
@_connect_curs_postgresql
def executemany(self, query: str, vars_list: list, cursor=None):
"""Connects to the postgresql DB and executes the many statement"""
cursor.executemany(query, vars_list)
| 3.015625
| 3
|
vectorhub/encoders/video/sampler.py
|
boba-and-beer/vectorhub
| 385
|
12775798
|
<gh_stars>100-1000
from math import ceil
import numpy as np
import os
import tempfile
from ...import_utils import *
if is_all_dependency_installed('encoders-video'):
import librosa
import soundfile as sf
from cv2 import cv2
from moviepy.video.io.ffmpeg_reader import ffmpeg_parse_infos
from moviepy.video.io.VideoFileClip import VideoFileClip
class FrameSamplingFilter():
def __init__(self, every=None, hertz=None, top_n=None):
if every is None and hertz is None and top_n is None:
raise ValueError("When initializing the FrameSamplingFilter, "
"one of the 'every', 'hertz', or 'top_n' must "
"be specified.")
self.every = every
self.hertz = hertz
self.top_n = top_n
def get_audio_sampling_rate(self, filename: str):
infos = ffmpeg_parse_infos(filename)
fps = infos.get('audio_fps', 44100)
if fps == 'unknown':
fps = 44100
return fps
def load_clip(self, filename: str):
audio_fps = self.get_audio_sampling_rate(filename)
self.clip = VideoFileClip(filename, audio_fps)
def initialize_video(self, filename: str):
self.filename = filename
self.load_clip(filename)
self.fps = self.clip.fps
self.width = self.clip.w
self.height = self.clip.h
self.frame_index = range(int(ceil(self.fps * self.clip.duration)))
self.duration = self.clip.duration
self.n_frames = len(self.frame_index)
def get_audio_vector(self, new_sampling_rate: int = 16000):
fd, fp = tempfile.mkstemp()
audio = f'{fp}.wav'
self.clip.audio.to_audiofile(audio)
data, sampling_rate = sf.read(audio, dtype='float32')
os.close(fd)
os.remove(audio)
return np.array(librosa.resample(data.T, sampling_rate, new_sampling_rate))
def transform(self, filename: str):
self.initialize_video(filename)
if (self.every is not None):
new_idx = range(self.n_frames)[::self.every]
elif (self.hertz is not None):
interval = self.fps / float(self.hertz)
new_idx = np.arange(0, self.n_frames, interval).astype(int)
new_idx = list(new_idx)
elif self.top_n is not None:
diffs = []
for i, img in enumerate(range(self.n_frames)):
if i == 0:
last = img
continue
pixel_diffs = cv2.sumElems(cv2.absdiff(
self.get_frame(last), self.get_frame(img)))
diffs.append(sum(pixel_diffs))
last = img
new_idx = sorted(range(len(diffs)),
key=lambda i: diffs[i],
reverse=True)[:self.top_n]
result = []
for index in new_idx:
result.append(self.get_frame(index))
return result
def get_frame(self, index: int):
return self.clip.get_frame(index)
def iter_frames(self):
for i, f in enumerate(self.frame_index):
yield self.get_frame(f)
| 2.625
| 3
|
gigbackend/about/migrations/0002_auto_20201225_1328.py
|
sourabhmandal/goofygig
| 0
|
12775799
|
<gh_stars>0
# Generated by Django 3.1.4 on 2020-12-25 07:58
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('about', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='aboutpage',
name='about_section',
field=wagtail.core.fields.StreamField([('Section_Block', wagtail.core.blocks.StructBlock([('section_pictue', wagtail.images.blocks.ImageChooserBlock(blank=True, null=True, required=False)), ('section_title', wagtail.core.blocks.CharBlock(blank=True, max_lenght=500, null=True, required=True)), ('section_description', wagtail.core.blocks.RichTextBlock(blank=True, max_lenght=500, null=True, required=True))]))], blank=True, null=True),
),
migrations.AlterField(
model_name='aboutpage',
name='hero_about',
field=wagtail.core.fields.StreamField([('Title', wagtail.core.blocks.CharBlock()), ('subtitle', wagtail.core.blocks.CharBlock()), ('profile_image', wagtail.images.blocks.ImageChooserBlock())], blank=True, null=True),
),
]
| 1.835938
| 2
|
upcfcardsearch/c246.py
|
ProfessorSean/Kasutamaiza
| 0
|
12775800
|
<filename>upcfcardsearch/c246.py<gh_stars>0
import discord
from discord.ext import commands
from discord.utils import get
class c246(commands.Cog, name="c246"):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.command(name='Magia_Dance_Sacramentum', aliases=['c246', 'Magia_11'])
async def example_embed(self, ctx):
embed = discord.Embed(title='Magia Dance Sacramentum',
color=0xBC5A84)
embed.set_thumbnail(url='https://www.duelingbook.com/images/custom-pics/2300000/2359467.jpg')
embed.add_field(name='Status (Archetype)', value='Casual:3/Tournament:3 (Magia)', inline=True)
embed.add_field(name='Type', value='Trap/Counter', inline=False)
embed.add_field(name='Card Effect', value='When a card or effect is activated: Banish 2 "Magia" cards from your GY; negate the activation, and if you do, place that card on the bottom of the Deck. You can only activate 1 "Magia Dance Sacramentum" per turn.', inline=False)
embed.set_footer(text='Set Code: ANCF')
await ctx.send(embed=embed)
def setup(bot: commands.Bot):
bot.add_cog(c246(bot))
| 2.90625
| 3
|
config.py
|
durbar/AllSpark
| 1
|
12775801
|
PORT=5000
HOST='127.0.0.1'
DEBUG=True
| 1.117188
| 1
|
tree_classifier.py
|
kubapok/human-motion-classification
| 2
|
12775802
|
import csv
from sklearn import tree
class tree_classifier():
classes_dict = {0: 'going_left',
1: 'going_right',
2: 'falling',
3: 'just_sitting',
4: 'just_standing',
5: 'just_lying',
6: 'sitting_down',
7: 'standing_up'}
def train():
going_left = tree_classifier.load_class('going_left')
going_left_value = [0 for _ in range(len(going_left))]
going_right = tree_classifier.load_class('going_right')
going_right_value = [1 for _ in range(len(going_right))]
falling = tree_classifier.load_class('falling')
falling_value = [2 for _ in range(len(falling))]
just_sitting = tree_classifier.load_class('just_sitting')
just_sitting_value = [3 for _ in range(len(just_sitting))]
just_standing = tree_classifier.load_class('just_standing')
just_standing_value = [4 for _ in range(len(just_standing))]
lying_down = tree_classifier.load_class('lying_down')
lying_down_value = [5 for _ in range(len(lying_down))]
sitting_down = tree_classifier.load_class('sitting_down')
sitting_down_value = [6 for _ in range(len(sitting_down))]
standing_up = tree_classifier.load_class('standing_up')
standing_up_value = [7 for _ in range(len(standing_up))]
X = going_left + going_right + falling + just_sitting + \
just_standing + lying_down + sitting_down + standing_up
Y = going_left_value + going_right_value + falling_value + just_sitting_value + \
just_standing_value + lying_down_value + sitting_down_value + standing_up_value
tree_classifier.clf = tree.DecisionTreeClassifier(max_depth = 10)
tree_classifier.clf.fit(X, Y)
return tree_classifier.clf.predict([[43.48047639929654, 4.3354936021207635, 3.59]])
def predict(sample):
return tree_classifier.clf.predict([sample])[0]
def load_class(class_name):
l2 = []
with open(class_name + '.tsv', 'r') as tsv:
for line in csv.reader(tsv, quotechar='\t'):
l2.append(line[0].split())
l = []
for x in l2:
l.append([float(r) for r in x])
return l
print(tree_classifier.train())
| 3
| 3
|
examples/example_egscollabE2.py
|
GeoDesignTool/GeoDT
| 0
|
12775803
|
<gh_stars>0
# ****************************************************************************
#### EGS collab example
# ****************************************************************************
# ****************************************************************************
#### standard imports
# ****************************************************************************
import numpy as np
import matplotlib.pyplot as plt
import GeoDT as gt
import pylab
import copy
import math
deg = gt.deg
MPa = gt.MPa
GPa = gt.GPa
yr = gt.yr
cP = gt.cP
mD = gt.mD
mLmin = 1.66667e-8 #m3/s
gal = 1.0/264.172 #m3
# ****************************************************************************
#### model setup
# ****************************************************************************
#full randomizer
for i in range(0,100):
#create model object
geom = []
geom = gt.mesh()
# #rock properties
geom.rock.size = 140.0 #m #!!!
geom.rock.ResDepth = np.random.uniform(1250.0,1250.0) #6000.0 # m #!!!
geom.rock.ResGradient = np.random.uniform(34.0,36.0) #50.0 #56.70 # C/km; average = 25 C/km #!!!
geom.rock.ResRho = np.random.uniform(2925.0,3040.0) #2700.0 # kg/m3 #!!!
geom.rock.ResKt = np.random.uniform(2.55,3.81) #2.5 # W/m-K #!!!
geom.rock.ResSv = np.random.uniform(1900.0,2200.0) #2063.0 # kJ/m3-K
geom.rock.AmbTempC = np.random.uniform(20.0,20.0) #25.0 # C #!!!
geom.rock.AmbPres = 0.101 #Example: 0.01 MPa #Atmospheric: 0.101 # MPa
geom.rock.ResE = np.random.uniform(89.0,110.0)*GPa #50.0*GPa #!!!
geom.rock.Resv = np.random.uniform(0.17,0.28) #0.3 #!!!
geom.rock.Ks3 = 0.26197 #np.random.uniform(0.5,0.5) #0.5 #!!!
geom.rock.Ks2 = 1.05421 #geom.rock.Ks3 + np.random.uniform(0.4,0.6) # 0.75 #!!!
geom.rock.s3Azn = 14.4*deg #!!!
geom.rock.s3AznVar = 5.0*deg #!!!
geom.rock.s3Dip = 27.0*deg #!!!
geom.rock.s3DipVar = 5.0*deg #!!!
#fracture orientation parameters #[i,:] set, [0,0:2] min, max --or-- nom, std
# geom.rock.fNum = np.asarray([int(np.random.uniform(0,30)),
# int(np.random.uniform(0,30)),
# int(np.random.uniform(0,30))],dtype=int) #count
# r1 = np.random.uniform(50.0,800.0)
# r2 = np.random.uniform(50.0,800.0)
# r3 = np.random.uniform(50.0,800.0)
# geom.rock.fDia = np.asarray([[r1,r1+np.random.uniform(100.0,800.0)],
# [r2,r2+np.random.uniform(100.0,800.0)],
# [r3,r3+np.random.uniform(100.0,800.0)]],dtype=float) #m
# geom.rock.fStr = np.asarray([[np.random.uniform(0.0,360.0)*deg,np.random.uniform(0.0,90.0)*deg],
# [np.random.uniform(0.0,360.0)*deg,np.random.uniform(0.0,90.0)*deg],
# [np.random.uniform(0.0,360.0)*deg,np.random.uniform(0.0,90.0)*deg]],dtype=float) #m
# geom.rock.fDip = np.asarray([[np.random.uniform(0.0,90.0)*deg,np.random.uniform(0.0,45.0)*deg],
# [np.random.uniform(0.0,90.0)*deg,np.random.uniform(0.0,45.0)*deg],
# [np.random.uniform(0.0,90.0)*deg,np.random.uniform(0.0,45.0)*deg]],dtype=float) #m
geom.rock.fNum = np.asarray([int(np.random.uniform(0,30)),
int(np.random.uniform(0,30)),
int(np.random.uniform(0,30))],dtype=int) #count
geom.rock.fDia = np.asarray([[50.0,180.0],
[50.0,180.0],
[50.0,180.0]],dtype=float) #m
#EGS Collab #!!!
geom.rock.fStr = np.asarray([[15.0*deg,7.0*deg],
[260.0*deg,7.0*deg],
[120.0*deg,7.0*deg,]],dtype=float) #m
geom.rock.fDip = np.asarray([[35.0*deg,7.0*deg],
[69.0*deg,7.0*deg,],
[35.0*deg,7.0*deg]],dtype=float) #m
#fracture hydraulic parameters
geom.rock.gamma = np.asarray([10.0**-3.0,10.0**-2.0,10.0**-1.2])
geom.rock.n1 = np.asarray([1.0,1.0,1.0])
geom.rock.a = np.asarray([0.000,0.200,0.800])
geom.rock.b = np.asarray([0.999,1.0,1.001])
geom.rock.N = np.asarray([0.0,0.6,2.0])
geom.rock.alpha = np.asarray([2.0e-9,2.9e-8,10.0e-8])
geom.rock.bh = np.asarray([0.00000001,0.00005,0.0001]) #np.asarray([0.00005,0.00010,0.00020]) #!!!
# #fracture hydraulic parameters
# # r = np.random.exponential(scale=0.25,size=2)
# # r[r>1.0] = 1.0
# # r[r<0] = 0.0
# # r = r*(0.100/MPa-0.001/MPa)+0.001/MPa
# # u1 = -np.min(r)
# # u2 = -np.max(r)
# # u3 = 0.5*(u1+u2)
# # geom.rock.alpha = np.asarray([u1,u3,u2])
# geom.rock.alpha = np.asarray([-0.028/MPa,-0.028/MPa,-0.028/MPa])
# # r = np.random.exponential(scale=0.25,size=2)
# # r[r>1.0] = 1.0
# # r[r<0] = 0.0
# # r = r*(0.1-0.001)+0.001
# # u1 = np.min(r)
# # u2 = np.max(r)
# # u3 = 0.5*(u1+u2)
# # geom.rock.gamma = np.asarray([u1,u3,u2])
# geom.rock.gamma = np.asarray([0.01,0.01,0.01])
# geom.rock.n1 = np.asarray([1.0,1.0,1.0])
# # r = np.random.exponential(scale=0.25,size=2)
# # r[r>1.0] = 1.0
# # r[r<0] = 0.0
# # r = r*(0.2-0.012)+0.012
# # u1 = np.min(r)
# # u2 = np.max(r)
# # u3 = 0.5*(u1+u2)
# # geom.rock.a = np.asarray([u1,u3,u2])
# geom.rock.a = np.asarray([0.05,0.05,0.05])
# # u1 = np.random.uniform(0.7,0.9)
# # u2 = np.random.uniform(0.7,0.9)
# # u3 = 0.5*(u1+u2)
# # geom.rock.b = np.asarray([np.min([u1,u2]),u3,np.max([u1,u2])])
# geom.rock.b = np.asarray([0.8,0.8,0.8])
# # u1 = np.random.uniform(0.2,1.2)
# # u2 = np.random.uniform(0.2,1.2)
# # u3 = 0.5*(u1+u2)
# # geom.rock.N = np.asarray([np.min([u1,u2]),u3,np.max([u1,u2])])
# geom.rock.N = np.asarray([0.2,0.5,1.2])
# # u1 = np.random.uniform(0.00005,0.00015)
# # u2 = np.random.uniform(0.00005,0.00015)
# # u3 = 0.5*(u1+u2)
# # geom.rock.bh = np.asarray([np.min([u1,u2]),u3,np.max([u1,u2])])
# geom.rock.bh = np.asarray([0.00005,0.0001,0.003])
geom.rock.bh_min = 0.00000005 #m #!!!
geom.rock.bh_max = 0.0001 #0.02000 #m #!!!
# geom.rock.bh_bound = np.random.uniform(0.001,0.005)
geom.rock.bh_bound = np.random.uniform(0.00000005,0.0001) #!!!
geom.rock.f_roughness = np.random.uniform(0.25,1.0) #0.8
#well parameters
geom.rock.w_count = 4 #2 #wells
geom.rock.w_spacing = 30.0 #np.random.uniform(100.0,800.0) #300.0 #m
geom.rock.w_length = 60.0 #1500.0 #800.0 #m
geom.rock.w_azimuth = 60.0*deg #geom.rock.s3Azn + np.random.uniform(-15.0,15.0)*deg #rad
geom.rock.w_dip = 20.0*deg #geom.rock.s3Dip + np.random.uniform(-15.0,15.0)*deg #rad
geom.rock.w_proportion = 0.8 #m/m
geom.rock.w_phase = 15.0*deg #rad
geom.rock.w_toe = -35.0*deg #rad
geom.rock.w_skew = 0.0*deg #rad
geom.rock.w_intervals = 2 #int(np.random.uniform(1,3)) #3 #breaks in well length
geom.rock.ra = 0.1*0.0191 #limited by a_max, actual is 0.096 m #!!!
geom.rock.rgh = 80.0 #!!!
#cement properties
geom.rock.CemKt = 2.0 # W/m-K
geom.rock.CemSv = 2000.0 # kJ/m3-K
#thermal-electric power parameters
geom.rock.GenEfficiency = 0.85 # kWe/kWt
geom.rock.LifeSpan = 1.0*yr/2 #years #!!!
geom.rock.TimeSteps = 41 #steps
geom.rock.p_whp = 1.0*MPa #Pa
geom.rock.Tinj = 10.0 #95.0 #C #!!!
geom.rock.H_ConvCoef = 3.0 #kW/m2-K
geom.rock.dT0 = 1.0 #K #!!!
geom.rock.dE0 = 50.0 #kJ/m2 #!!!
#water base parameters
geom.rock.PoreRho = 980.0 #kg/m3 starting guess
geom.rock.Poremu = 0.9*cP #Pa-s
geom.rock.Porek = 0.1*mD #m2
geom.rock.Frack = 100.0*mD #m2
#stimulation parameters
if geom.rock.w_intervals == 1:
geom.rock.perf = int(np.random.uniform(1,1))
else:
geom.rock.perf = 1
geom.rock.r_perf = 5.0 #m #!!!
geom.rock.sand = 0.3 #sand ratio in frac fluid
geom.rock.leakoff = 0.0 #Carter leakoff
# geom.rock.dPp = -1.0*np.random.uniform(1.0,10.0)*MPa #-2.0*MPa #production well pressure drawdown
geom.rock.dPp = -2.0*MPa #production well pressure drawdown
geom.rock.dPi = 0.1*MPa #!!!
geom.rock.stim_limit = 5
# geom.rock.Qinj = 0.01 #m3/s
# geom.rock.Qstim = 0.01 #0.08 #m3/s
# geom.rock.Vstim = 1000.0 #100000.0 #m3
geom.rock.bval = 1.0 #Gutenberg-Richter magnitude scaling
# u1 = np.random.uniform(20.0,55.0)*deg
# u2 = np.random.uniform(20.0,55.0)*deg
# u3 = 0.5*(u1+u2)
# geom.rock.phi = np.asarray([np.min([u1,u2]),u3,np.max([u1,u2])]) #rad
geom.rock.phi = np.asarray([20.0*deg,35.0*deg,45.0*deg]) #rad #!!!
# u1 = np.random.uniform(5.0,20.0)*MPa
# u2 = np.random.uniform(5.0,20.0)*MPa
# u3 = 0.5*(u1+u2)
# geom.rock.mcc = np.asarray([np.min([u1,u2]),u3,np.max([u1,u2])]) #Pa
geom.rock.mcc = np.asarray([2.0,10.0,15.0])*MPa #Pa #!!!
geom.rock.hfmcc = np.random.uniform(0.0,0.2)*MPa #0.1*MPa #!!!
geom.rock.hfphi = np.random.uniform(15.0,35.0)*deg #30.0*deg #!!!
#**********************************
#recalculate base parameters
geom.rock.re_init()
#generate domain
geom.gen_domain()
#generate natural fractures
geom.gen_joint_sets()
# ****************************************************************************
#### placed fractures
# ****************************************************************************
data = np.recfromcsv('Well Geometry Info.csv',delimiter=',',filling_values=np.nan,deletechars='()',case_sensitive=True,names=True)
dia = 5.0
for i in range(0,len(data)):
if data['type'][i] == 1:
c0 = [data['x_m'][i],data['y_m'][i],data['z_m'][i]]
dia = dia
strike = data['azn_deg'][i]*deg
dip = data['dip_deg'][i]*deg
geom.gen_fixfrac(False,c0,dia,strike,dip)
# ****************************************************************************
#### common well geometry
# ****************************************************************************
#generate wells
# wells = []
# geom.gen_wells(True,wells)
# well geometry using gyro log for E2-TC and centered on kickoff point of E2-TC, other wells based on original design Jan 2021
#gt.line(x0=0.0,y0=0.0,z0=0.0,length=1.0,azn=0.0*deg,dip=0.0*deg,w_type='pipe',dia=0.0254*3.0,rough=80.0)
wells = []
#monitoring
wells += [gt.line(5.0282856,51.9184128,-0.931164,49.9872,0,1.570796327,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(8.019288,53.2199088,0.2785872,10.668,0.788888822,0.093724181,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(9.0034872,51.1996944,-0.3691128,59.436,1.752310569,0.67718775,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(9.1452192,51.3560568,0.001524,59.436,1.761037215,0.151843645,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(-1.00584,41.0135832,-0.3447288,54.864,2.138028334,0.616101226,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(-1.0107168,41.192196,0.185928,54.864,2.038544566,0.02443461,'producer',geom.rock.ra, geom.rock.rgh)]
#injector
#wells += [gt.line(0,0,0,77.10353424,0.832584549,0.257981302,'injector',geom.rock.ra, geom.rock.rgh)]
#producers
wells += [gt.line(-0.6257544,0.4255008,-0.0356616,76.2,0.740019603,0.230383461,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(-0.7824216,0.4474464,-0.3249168,76.2,0.841248699,0.390953752,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(0.1679448,-0.4300728,0.0917448,76.2,0.900589894,0.132645023,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(0.2505456,-0.760476,-0.1063752,80.772,1.007054978,0.287979327,'producer',geom.rock.ra, geom.rock.rgh)]
#drift
wells += [gt.line(-4.3290744,-70.1844672,-0.077724,155.7528,0.019024089,0,'producer',geom.rock.ra, geom.rock.rgh)]
wells += [gt.line(-2.1954744,51.7355328,-0.077724,11.5824,1.584060829,0,'producer',geom.rock.ra, geom.rock.rgh)]
#wells += [gt.line(-1.9211544,85.5073728,-0.077724,77.724,6.119124357,0,'producer',geom.rock.ra, geom.rock.rgh)]
#wells += [gt.line(-8.5657944,125.4056928,-0.077724,25.908,0.944223125,0,'producer',geom.rock.ra, geom.rock.rgh)]
#wells += [gt.line(12.4958856,140.6456928,-0.077724,29.8704,5.246459731,0,'producer',geom.rock.ra, geom.rock.rgh)]
#split injector in different locations
for s in range(0,5):
#copy natural fracture geometry
comm = []
comm = copy.deepcopy(geom)
wells2 = []
wells2 = copy.deepcopy(wells)
#select interval
zone_leg = 1.5 #packer interval length
comm.rock.sand = zone_leg #!!!
rati_leg = np.random.uniform(0.3,0.95) #interval center depth
comm.rock.leakoff = rati_leg #!!!
azn = 0.832584549
dip = 0.257981302
leg = 77.10353424
vAxi = np.asarray([math.sin(azn)*math.cos(-dip), math.cos(azn)*math.cos(-dip), math.sin(-dip)])
x0 = np.asarray([0.0, 0.0, 0.0])
x1 = x0 + vAxi*(rati_leg*leg - 0.5*zone_leg)
x2 = x0 + vAxi*(rati_leg*leg + 1.0*zone_leg)
wells2 += [gt.line(x0[0],x0[1],x0[2],rati_leg*leg - 1.0*zone_leg,0.832584549,0.257981302,'producer',comm.rock.ra, comm.rock.rgh)]
wells2 += [gt.line(x1[0],x1[1],x1[2],1.0*zone_leg,0.832584549,0.257981302,'injector',comm.rock.ra, comm.rock.rgh)]
wells2 += [gt.line(x2[0],x2[1],x2[2],leg-(rati_leg*leg + 1.0*zone_leg),0.832584549,0.257981302,'producer',comm.rock.ra, comm.rock.rgh)]
#install
comm.wells = wells2
#stimulate
comm.rock.Qstim = np.random.uniform(500.0*mLmin, 10000.0*mLmin) #0.08 #m3/s
comm.rock.Vstim = np.random.uniform(100.0*gal, 10000.0*gal) #100000.0 #m3
comm.dyn_stim(Vinj=comm.rock.Vstim,Qinj=comm.rock.Qstim,target=[],
visuals=False,fname='stim')
#test multiple randomly selected flow rates
rates = np.random.uniform(100.0*mLmin,10000.0*mLmin,5)
for r in rates:
#copy base parameter set
base = []
base = copy.deepcopy(comm)
#set rate
base.rock.Qinj = r
base.rock.re_init()
#random identifier (statistically should be unique)
pin = np.random.randint(100000000,999999999,1)[0]
try:
# if True:
# #single rate long term flow
# base.dyn_flow(target=[],visuals=False,fname='run_%i' %(pin))
# #stim then flow
# base.stim_and_flow(target=[],visuals=False,fname='run_%i' %(pin))
#Solve production
base.dyn_stim(Vinj=base.rock.Vinj,Qinj=base.rock.Qinj,target=[],
visuals=False,fname='run_%i' %(pin))
#calculate heat transfer
base.get_heat(plot=True)
plt.savefig('plt_%i.png' %(pin), format='png')
plt.close()
except:
print( 'solver failure!')
#show flow model
base.build_vtk(fname='fin_%i' %(pin))
if False: #3D temperature visual
base.build_pts(spacing=50.0,fname='fin_%i' %(pin))
#save primary inputs and outputs
x = base.save('inputs_results_collabE2.txt',pin)
#show plots
pylab.show()
| 2.046875
| 2
|
dev/mixtures/TableA7_to_JSON.py
|
tarment10/CoolProp
| 0
|
12775804
|
from CoolProp.CoolProp import get_fluid_param_string
lines = open('KunzWagner2012_TableA7.txt','r').read()
template = """{{"Name1" : "{Name1:s}",
"Name2" : "{Name2:s}",
"CAS1" : "{CAS1:s}",
"CAS2" : "{CAS2:s}",
"d" : {d:s},
"t" : {t:s},
"n" : {n:s},
"eta" : {eta:s},
"epsilon" : {epsilon:s},
"beta": {beta:s},
"gamma": {gamma:s}
}},"""
chunks = lines.split('\n\n')
for chunk in chunks:
lines = chunk.split('\n')
D,T,N,ETA,EPSILON,BETA,GAMMA = [0],[0],[0],[0],[0],[0],[0]
names = lines.pop(0)
for line in lines:
vals = line.strip().split(' ')
if len(vals) == 4:
i, d, t, n = vals
eta = 0
epsilon = 0
beta = 0
gamma = 0
else:
i, d, t, n, eta, epsilon, beta, gamma = vals
D.append(int(d))
T.append(float(t))
N.append(float(n))
ETA.append(float(eta))
EPSILON.append(float(epsilon))
BETA.append(float(beta))
GAMMA.append(float(gamma))
name1,name2 = names.split('/')
CAS1 = get_fluid_param_string(name1,'CAS')
CAS2 = get_fluid_param_string(name2,'CAS')
print(template.format(Name1 = name1,
Name2 = name2,
CAS1 = CAS1,
CAS2 = CAS2,
d = str(D),
t = str(T),
n = str(N),
eta = str(ETA),
epsilon= str(EPSILON),
beta = str(BETA),
gamma = str(GAMMA)
))
| 2.3125
| 2
|
graph_generation/attribute_generator.py
|
googleinterns/data-dependency-graph-analysis
| 4
|
12775805
|
"""
This module implements methods for generating random attributes from nodes in a graph based on distribution and range.
Method generate() will create all the necessary attributes for the graph:
Collection: name.
Dataset collection: name.
System collection: name.
System: system criticality, environment, description, name, regex grouping.
Dataset: slo, environment, description, name, regex grouping.
Data integrity: reconstruction time, volatility, regeneration time, restoration time.
Dataset processing: impact, freshness.
"""
import random
class AttributeGenerator:
"""
A class to generate random attributes for nodes based on distribution or range of values.
...
Attributes:
collection_params: Instance of CollectionParams.
dataset_params: Instance of DatasetParams.
system_params: Instance of SystemParams.
data_integrity_params: Instance of DataIntegrityParams.
processing_params: Instance of ProcessingParams.
connection_params: Instance of ConnectionParams.
dataset_attributes: Dictionary with keys as attribute type, and value lists of generated attributes.
system_attributes: Dictionary with keys as attribute type, and value lists of generated attributes.
dataset_processing_attributes: Dictionary with keys as attribute type, and value lists of generated attributes.
data_integrity_attributes: Dictionary with keys as attribute type, and value lists of generated attributes.
Methods:
_generate_time()
Generates time strings from given range in seconds.
_generate_from_proba()
Generates value from given probability map.
_generate_dataset_attributes()
Generates all necessary dataset attributes.
_generate_system_attributes()
Generates all necessary system attributes.
_generate_processing_attributes()
Generates all dataset processing attributes.
_generate_data_integrity_attributes()
Generates all data integrity attributes.
generate()
Generates all the needed attributes for data dependency mapping graph.
"""
def __init__(self, collection_params, dataset_params, system_params, data_integrity_params, processing_params,
connection_params):
self.collection_params = collection_params
self.dataset_params = dataset_params
self.system_params = system_params
self.data_integrity_params = data_integrity_params
self.processing_params = processing_params
self.connection_params = connection_params
self.collection_attributes = {}
self.dataset_collection_attributes = {}
self.system_collection_attributes = {}
self.dataset_attributes = {}
self.system_attributes = {}
self.dataset_processing_attributes = {}
self.data_integrity_attributes = {}
@staticmethod
def _generate_time(n=1):
"""Generates n random time strings in format 1d / 25h / 121m / 46s"""
generated_time = []
time_ranges = {
"d": (1, 30),
"h": (1, 120),
"m": (1, 720),
"s": (1, 360)
}
for i in range(n):
time_metric = random.choice(list(time_ranges.keys()))
time_value = random.randint(time_ranges[time_metric][0], time_ranges[time_metric][1])
generated_time.append(f"{time_value}{time_metric}")
return generated_time
@staticmethod
def _generate_from_proba(proba_map, n=1):
"""Generates n random values with replacement from map using their probability."""
population = list(proba_map.keys())
probability = list(proba_map.values())
# Normalise probability
probability = [i / sum(probability) for i in probability]
return random.choices(population, probability, k=n)
@staticmethod
def _generate_description(node_type, node_id):
"""Generates random description for a node (ex. Dataset number 1.)."""
return f"{node_type.capitalize()} number {node_id}."
@staticmethod
def _generate_regex(node_type, node_id):
"""Generates random regex grouping."""
return f"{node_type}.{node_id}.*"
@staticmethod
def _generate_name(node_type, node_id):
"""Generates random node name."""
return f"{node_type}.{node_id}"
def _generate_collection_attributes(self):
"""Generates name for collections."""
collection_names = [self._generate_name("collection", i) for i in range(self.collection_params.collection_count)]
self.collection_attributes["names"] = collection_names
def _generate_dataset_collection_attributes(self):
"""Generates name for dataset collections."""
dataset_collection_names = [self._generate_name("dataset collection", i)
for i in range(self.collection_params.dataset_collection_count)]
self.dataset_collection_attributes["names"] = dataset_collection_names
def _generate_system_collection_attributes(self):
"""Generates name for system collections."""
system_collection_names = [self._generate_name("system collection", i)
for i in range(self.collection_params.system_collection_count)]
self.system_collection_attributes["names"] = system_collection_names
def _generate_dataset_attributes(self):
"""Generates slo, environments, regex groupings and names for datasets."""
dataset_descriptions = [self._generate_description("dataset", i) for i in range(self.dataset_params.dataset_count)]
dataset_regexs = [self._generate_regex("dataset", i) for i in range(self.dataset_params.dataset_count)]
dataset_names = [self._generate_name("dataset", i) for i in range(self.dataset_params.dataset_count)]
dataset_slos = self._generate_time(n=self.dataset_params.dataset_count)
# View counts as probability of being picked
dataset_environments = self._generate_from_proba(self.dataset_params.dataset_env_count_map,
n=self.dataset_params.dataset_count)
self.dataset_attributes["descriptions"] = dataset_descriptions
self.dataset_attributes["names"] = dataset_names
self.dataset_attributes["regex_groupings"] = dataset_regexs
self.dataset_attributes["dataset_slos"] = dataset_slos
self.dataset_attributes["dataset_environments"] = dataset_environments
def _generate_system_attributes(self):
"""Generates system criticality, system environments, regex groupings, names and descriptions for systems."""
system_descriptions = [self._generate_description("system", i) for i in range(self.system_params.system_count)]
system_regexs = [self._generate_regex("system", i) for i in range(self.system_params.system_count)]
system_names = [self._generate_name("system", i) for i in range(self.system_params.system_count)]
system_criticalities = self._generate_from_proba(self.system_params.system_criticality_proba_map,
n=self.system_params.system_count)
# View counts as probability of being picked
system_environments = self._generate_from_proba(self.system_params.system_env_count_map,
n=self.system_params.system_count)
self.system_attributes["regex_groupings"] = system_regexs
self.system_attributes["names"] = system_names
self.system_attributes["descriptions"] = system_descriptions
self.system_attributes["system_criticalities"] = system_criticalities
self.system_attributes["system_environments"] = system_environments
def _generate_processing_attributes(self):
"""Generates dataset impacts and dataset freshness."""
dataset_impacts = self._generate_from_proba(self.processing_params.dataset_impact_proba_map,
n=self.connection_params.dataset_system_connection_count)
dataset_freshness = self._generate_from_proba(self.processing_params.dataset_criticality_proba_map,
n=self.connection_params.dataset_system_connection_count)
self.dataset_processing_attributes["dataset_impacts"] = dataset_impacts
self.dataset_processing_attributes["dataset_freshness"] = dataset_freshness
def _generate_data_integrity_attributes(self):
"""Generates restoration, regeneration, reconstruction times and volatility for each dataset collection."""
data_restoration_time = self._generate_time(n=self.collection_params.dataset_collection_count)
data_regeneration_time = self._generate_time(n=self.collection_params.dataset_collection_count)
data_reconstruction_time = self._generate_time(n=self.collection_params.dataset_collection_count)
data_volatility = self._generate_from_proba(self.data_integrity_params.data_volatility_proba_map,
n=self.collection_params.dataset_collection_count)
self.data_integrity_attributes["data_restoration_time"] = data_restoration_time
self.data_integrity_attributes["data_regeneration_time"] = data_regeneration_time
self.data_integrity_attributes["data_reconstruction_time"] = data_reconstruction_time
self.data_integrity_attributes["data_volatility"] = data_volatility
def generate(self):
"""Generates all needed attributes."""
self._generate_collection_attributes()
self._generate_dataset_collection_attributes()
self._generate_system_collection_attributes()
self._generate_dataset_attributes()
self._generate_system_attributes()
self._generate_processing_attributes()
self._generate_data_integrity_attributes()
| 3.546875
| 4
|
tests/models/test_vtk_sphere_model.py
|
NMontanaBrown/scikit-surgeryvtk
| 1
|
12775806
|
<filename>tests/models/test_vtk_sphere_model.py
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
import pytest
import vtk
import six
import numpy as np
import sksurgeryvtk.widgets.vtk_overlay_window as ow
import sksurgeryvtk.models.vtk_sphere_model as sm
def test_sphere_model_invalid_because_null_points():
with pytest.raises(ValueError):
sm.VTKSphereModel(None, 0.5)
def test_sphere_model_invalid_because_points_not_numpy_array():
with pytest.raises(TypeError):
sm.VTKSphereModel(1, 0.5)
def test_sphere_model_invalid_because_points_not_got_3_columns():
with pytest.raises(ValueError):
sm.VTKSphereModel(np.ones((1, 2)), 0.5)
def test_sphere_model_invalid_because_no_points():
with pytest.raises(ValueError):
sm.VTKSphereModel(np.ones((0, 3)), 0.5)
def test_sphere_model_invalid_because_points_not_float():
with pytest.raises(TypeError):
sm.VTKSphereModel(np.ones((1, 3), dtype=np.int), 0.5)
def test_sphere_model_invalid_because_radius_not_positive():
with pytest.raises(ValueError):
sm.VTKSphereModel(np.eye(3), -1)
def test_sphere_model_3_points(setup_vtk_overlay_window):
points = np.eye(3, dtype=np.float)
vtk_model = sm.VTKSphereModel(points, 0.5)
widget, _, _, app = setup_vtk_overlay_window
widget = ow.VTKOverlayWindow()
widget.add_vtk_actor(vtk_model.actor)
widget.show()
#app.exec_()
| 2.09375
| 2
|
main/__init__.py
|
MrDrache333/InformatiCup2021-spe_ed-Python-Bot
| 0
|
12775807
|
<gh_stars>0
import asyncio
import copy
import json
import logging
import os
import sys
import time
from datetime import datetime
import pygame
import websockets as websockets
from JsonInterpreter import JsonInterpreter
from game.Playground import Playground
from game.graphic.PlaygroundPresenter import PlaygroundPresenter
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
logger = logging.getLogger()
sys.setrecursionlimit(1000000)
def createFolderIfNotExist(path):
path = path[0: path.rindex('/')]
if not os.path.isdir(path):
try:
os.makedirs(path)
return True
except Exception:
logger.error("Could not create Folder \"" + path + "\"")
return False
return False
def saveGameFieldBeforeDeath(path, json):
"""
Saves the current Gamefield as a file to debug them later
:param json: The JSON String to store
:param path: The path where to store the file
:return: Nothing
"""
if json is None:
logger.info("JOSN is None: No GameField JSon will be stored.")
return
try:
created = createFolderIfNotExist(path)
if created:
with open(path, "w") as text_file:
n = text_file.write("[" + json + "]")
if n != len(json):
logger.info("Could not completely write GameField in \"" + path + "\"")
return False
else:
return True
else:
return False
except Exception:
logger.info("Could not store GameField in \"" + path + "\"")
def saveImage(path):
"""
Saves an image of the game after a win/draw/loose
:param path: path to the save location
"""
try:
if createFolderIfNotExist(path):
pygame.image.save(game.playgroundPresenter.gameWindow, path)
except pygame.error:
logger.info("Can't store image at \"" + path + "\"")
class Game(object):
def __init__(self, docker=False, url="", key=""):
self.ownPlayer = None
self.URL = url
self.KEY = key
self.width = 0
self.height = 0
self.clock = pygame.time.Clock()
self.interpreter = JsonInterpreter()
self.playground = None
self.playgroundPresenter = None
self.printedStatistics = False
self.gameStartTime = 0
self.oldData = None
self.oldStateJson = None
if docker:
os.environ["SDL_VIDEODRIVER"] = "dummy"
def printInfo(self, data):
"""
Prints the converted json data
:param data: data loaded out of json
"""
logger.info("Playfield: " + str(self.width) + " x " + str(self.height))
for p in data[0]['players']:
if data[0]['players'][p]['active']:
logger.info("Player " + p + " is on [" + str(data[0]['players'][p]['x']) + "] [" + str(
data[0]['players'][p]['y']) + "], looking " + str(
data[0]['players'][p]['direction']) + " at speed " + str(
data[0]['players'][p]['speed']))
else:
logger.info("Player " + p + " is out.")
logger.info("Your are Player " + str(data[0]['you']))
async def playOffline(self, PlaygroundPath):
"""
Run the simulation offline with x players with the same strategy
:param PlaygroundPath: Path to the playground json file
"""
with open(PlaygroundPath) as f:
data = json.load(f)
self.width = data[0]['width']
self.height = data[0]['height']
self.playground = Playground(self.interpreter.getCellsFromLoadedJson(data),
self.interpreter.getPlayersFromLoadedJson(data))
# Den eigenen Spieler heraussuchen
self.ownPlayer = self.playground.players[int(data[0]['you']) - 1]
self.playgroundPresenter = PlaygroundPresenter(self.playground, self.width, self.height)
self.playgroundPresenter.generateGameField()
running = True
self.printInfo(data)
self.gameStartTime = time.time()
while running:
self.clock.tick(20)
# Check if pressed Key to interrupt
keys = pygame.key.get_pressed()
if keys[pygame.K_q]:
pygame.quit()
active = 0
for player in self.playground.players:
if player.active:
active += 1
logger.info("Player " + str(player.id))
player.tryToSurvive(self.playground)
logger.info("Turn: " + player.choosenTurn)
logger.info("Chosen by " + player.turnSetFrom)
logger.info("")
player.fitness += 1
if active == 0 and not self.printedStatistics:
self.printStatistics()
self.printedStatistics = True
else:
self.playground.movePlayer()
self.playgroundPresenter.playground = self.playground
self.playground.addTurn()
self.playgroundPresenter.updateGameField()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
pygame.quit()
async def playOnline(self):
"""
Run the simulation offline with x players with the same strategy
"""
wslogger = logging.getLogger('websockets')
wslogger.setLevel(logging.ERROR)
wslogger.addHandler(logging.StreamHandler())
# Wait for the Client to connect to server
async with websockets.connect(f"{self.URL}?key={self.KEY}") as websocket:
logger.info("Connected to server. Waiting in lobby...This can take up to 5 min.!")
self.clock.tick(1000)
while True:
# Wait for the servers response
state_json = await websocket.recv()
if self.gameStartTime == 0:
self.gameStartTime = time.time()
# Store the current time to calculate the time needed for a turn
startTime = time.time_ns()
data = json.loads(state_json)
data = [data]
# If game was just created, create needed objects too
if self.playground is None:
self.width = data[0]['width']
self.height = data[0]['height']
self.printInfo(data)
self.playground = Playground(self.interpreter.getCellsFromLoadedJson(data),
self.interpreter.getPlayersFromLoadedJson(data))
# Get own player out of the Data
self.ownPlayer = self.playground.players[int(data[0]['you']) - 1]
else:
self.playground.update(self.interpreter.getCellsFromLoadedJson(data),
self.interpreter.getPlayersFromLoadedJson(data))
self.playgroundPresenter = PlaygroundPresenter(self.playground, self.width, self.height)
self.playgroundPresenter.generateGameField()
for player in self.playground.players:
if player.active:
player.fitness += 1
# Compare if a player died from last round
if self.oldData is not None:
for player in self.oldData:
if player.active != self.playground.players[player.id - 1].active:
logger.info("The Player " + str(player.id) + "[" + self.playgroundPresenter.getColorName(
player.id) + "]" + " died!" + (" <-- WE" if self.ownPlayer.id == player.id else ""))
logger.info("")
# If our player is active and the game is running, try to Survive
if self.ownPlayer.active and data[0]['running']:
self.ownPlayer.tryToSurvive(self.playground)
logger.info("Turn: " + self.ownPlayer.choosenTurn)
logger.info("Chosen by " + self.ownPlayer.turnSetFrom)
self.playground.addTurn()
self.playgroundPresenter.update(self.playground)
self.playgroundPresenter.updateGameField()
self.oldData = copy.deepcopy(self.playground.players)
# If game is running an we're still active, print out our Turn, duration and send choosen turn to server
if self.ownPlayer.active and data[0]['running']:
action = self.ownPlayer.choosenTurn
action_json = json.dumps({"action": action})
logger.info("Our turn took " + str((time.time_ns() - startTime) // 1000000) + " milliseconds!")
logger.info("")
await websocket.send(action_json)
self.oldStateJson = copy.deepcopy(state_json)
else:
return
def printStatistics(self):
"""
Prints statistics of the played game
How long did it take, who won?
"""
if self.playground is None or self.playground.players is None:
logger.info("Playground must not be None!")
return
# Sort playes based on their fitness value
players = sorted(self.playground.players, key=lambda p: p.fitness, reverse=True)
logger.info("---------Game OVER---------")
logger.info("The game lasts " + str(round(time.time() - game.gameStartTime, 1)) + " Seconds!")
logger.info("Average turntime was about " + str(
round(float((time.time() - game.gameStartTime) / players[0].fitness), 2)) + " Seconds")
if self.ownPlayer.active:
logger.info("WE WON !!! PS: Because we can ;)")
# Store Scrrenshot of the Gamefield
saveImage("results/won/result_" + str(datetime.timestamp(datetime.now())) + ".jpg")
elif self.ownPlayer.fitness == players[0].fitness:
logger.info("It's a draw. Your tried your best...but hey...he died too")
# Store Scrrenshot of the Gamefield
saveImage("results/draw/result_" + str(datetime.timestamp(datetime.now())) + ".jpg")
saveGameFieldBeforeDeath("results/draw/result_" + str(datetime.timestamp(datetime.now())) + ".json",
self.oldStateJson)
else:
logger.info("We lost... :/ Maybe they're hacking...")
# Store Scrrenshot of the Gamefield
saveImage("results/lost/result_" + str(datetime.timestamp(datetime.now())) + ".jpg")
saveGameFieldBeforeDeath("results/lost/result_" + str(datetime.timestamp(datetime.now())) + ".json",
self.oldStateJson)
logger.info("---------Stats---------")
for player in players:
logger.info("Player " + str(player.id) + ": " + str(player.fitness) + " State: " + str(
"ALIVE" if player.active else "DEAD") + " Color: " + self.playgroundPresenter.getColorName(
player.id)
+ (" <---WE" if self.ownPlayer.id == player.id else ""))
logger.info("-------------------------------")
def sleep(secs):
"""
Wait for x seconds
:param secs: seconds
"""
for i in range(secs, 0, -1):
if i <= 3 or i % 10 == 0:
logger.info("WAIT " + str(i) + " SECONDS TO START AGAIN!")
try:
time.sleep(1)
except KeyboardInterrupt:
logger.info("---PROGRAM INTERRUPTED!---")
exit()
docker = False
ONLINE = True
OfflinePath = ""
url = ""
key = ""
try:
ONLINE = os.environ["Online"] == "True"
except KeyError:
logger.info("Online Parameter is not set. DEFAULT=True")
if not ONLINE:
try:
OfflinePath = os.environ["Playground"]
except KeyError:
logger.info("Playground Parameter is not set but Online was set to FALSE")
logger.info("Please set the needed environment variables. Please take a look at our "
"documentation to ensure the proper use of our program")
exit(-1)
else:
try:
url = os.environ["URL"]
key = os.environ["KEY"]
except KeyError:
logger.info("URL or KEY Parameter is not set but Online was set to TRUE")
logger.info("Please set the needed environment variables. Please take a look at our "
"documentation to ensure the proper use of our program")
exit(-1)
try:
docker = os.environ["Docker"] == "True"
except KeyError:
logger.info("Docker Parameter is not set. DEFAULT=FALSE")
if ONLINE:
logger.info("API-SERVER-URL: " + url)
logger.info("API-KEY: " + key)
logger.info("DOCKER: " + str(docker))
while True:
game = Game(docker, url, key)
try:
asyncio.get_event_loop().run_until_complete(game.playOnline())
game.printStatistics()
sleep(5)
except websockets.InvalidStatusCode as e:
if e.status_code == 429:
logger.info("TOO MANY REQUESTS")
sleep(30)
else:
logger.debug(e)
except websockets.ConnectionClosedOK as e:
if e.code == 1000:
logger.debug("Server Closed with Code: 1000 OK")
game.printStatistics()
sleep(5)
except websockets.ConnectionClosedError as e:
if e.code == 1006:
logger.debug("Server Closed with Code: 1006 ERROR")
game.printStatistics()
sleep(5)
except KeyboardInterrupt:
logger.info("\n---Programm wurde unterbrochen!---")
exit()
else:
game = Game(docker)
try:
asyncio.get_event_loop().run_until_complete(game.playOffline(OfflinePath))
while True:
time.sleep(1)
except KeyboardInterrupt:
logger.info("\n---Programm wurde unterbrochen!---")
| 2.65625
| 3
|
Signatures/train_classifier.py
|
angelos-c/OCR-with-Neural-Networks-and-Support-Vector-Machines
| 0
|
12775808
|
import numpy as np
import os
import itertools
import operator
import random
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from skimage.feature import hog
from skimage import color, exposure
from scipy.misc import imread,imsave,imresize
import numpy.random as nprnd
from sklearn.svm import SVC
from sklearn import linear_model
from sklearn.svm import LinearSVC
import matplotlib
import pickle
if __name__ == '__main__':
#paths for the training samples
path_angelos = './training/angelos/'
path_tim = './training/tim/'
path_hank = './training/hank/'
angelos_filenames = sorted([filename for filename in os.listdir(path_angelos) if (filename.endswith('.jpg') or filename.endswith('.png') or (filename.endswith('.bmp'))) ])
tim_filenames = sorted([filename for filename in os.listdir(path_tim) if (filename.endswith('.jpg') or filename.endswith('.png') or (filename.endswith('.bmp'))) ])
hank_filenames = sorted([filename for filename in os.listdir(path_hank) if (filename.endswith('.jpg') or filename.endswith('.png') or (filename.endswith('.bmp'))) ])
#add the full path to all the filenames
angelos_filenames = [path_angelos+filename for filename in angelos_filenames]
tim_filenames = [path_tim+filename for filename in tim_filenames]
hank_filenames = [path_hank+filename for filename in hank_filenames]
print 'Number of training images -> angelos: ' + str(len(angelos_filenames))
print 'Number of training images -> tim: ' + str(len(tim_filenames))
print 'Number of training images -> hank: ' + str(len(hank_filenames))
total = len(angelos_filenames) + len(tim_filenames) + len(hank_filenames)
print 'Total Number of samples: ' + str(total)
#create the list that will hold ALL the data and the labels
#the labels are needed for the classification task:
# 0 = angelos
# 1 = tim
# 2 = hank
data = []
labels = []
for filename in angelos_filenames:
#read the images
image = imread(filename,1)
#flatten it
image = imresize(image, (200,200))
hog_features = hog(image, orientations=12, pixels_per_cell=(16, 16),
cells_per_block=(1, 1))
data.append(hog_features)
labels.append(0)
print 'Finished adding angelos samples to dataset'
for filename in tim_filenames:
image = imread(filename,1)
image = imresize(image, (200,200))
hog_features = hog(image, orientations=12, pixels_per_cell=(16, 16),
cells_per_block=(1, 1))
data.append(hog_features)
labels.append(1)
print 'Finished adding tim samples to dataset'
for filename in hank_filenames:
image = imread(filename,1)
image = imresize(image, (200,200))
hog_features = hog(image, orientations=12, pixels_per_cell=(16, 16),
cells_per_block=(1, 1))
data.append(hog_features)
labels.append(2)
print 'Finished adding hank samples to dataset'
print 'Training the SVM'
#create the SVC
clf = LinearSVC(dual=False,verbose=1)
#train the svm
clf.fit(data, labels)
#pickle it - save it to a file
pickle.dump( clf, open( "place.detector", "wb" ) )
data_shape = np.array(data)
print "shape of data: " + str(data_shape.shape)
hog_shape = np.array(hog_features)
print "shape of hog_features: " + str(hog_shape.shape)
| 2.21875
| 2
|
prime.py
|
koen1711/pythongportfolio
| 0
|
12775809
|
<gh_stars>0
import math
first = 1
last = 500
prime_numbers = []
if last == 1:
print("Not a prime number.")
else:
for i in range(first, last):
prime_flag = 0
for x in range(2, int(math.sqrt(i)) + 1):
if (i % x == 0):
prime_flag = 1
break
if (prime_flag == 0) and (i != 1):
prime_numbers.append(i)
print(set(prime_numbers))
| 3.546875
| 4
|
src/fuckbot/db.py
|
Zer0-One/fuckbot
| 0
|
12775810
|
<gh_stars>0
from sqlalchemy import create_engine
from .config import Config
config = Config()
def db_init():
engine = create_engine("sqlite+pysqlite:///" + config["WORKING_DIR"] + "/" + config["SQLITE_DB"], future=True)
with engine.connect() as con:
res = con.execute(text("CREATE TABLE trigger"))
# engine.connect()
| 2.515625
| 3
|
tqt/function/adder.py
|
guohao-fly/TQT
| 2
|
12775811
|
"""
Provide quantilized form of Adder2d, https://arxiv.org/pdf/1912.13200.pdf
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Function
import math
from . import extra as ex
from .number import qsigned
class Adder2d(ex.Adder2d):
def __init__(self,
input_channel,
output_channel,
kernel_size,
stride=1,
padding=0,
bias=False,
weight_bit_width=8,
bias_bit_width=16,
inter_bit_width=32,
acti_bit_width=8,
retrain=True,
quant=False):
super().__init__(input_channel,
output_channel,
kernel_size,
stride=stride,
padding=padding,
bias=bias)
self.weight_bit_width = weight_bit_width
self.bias_bit_width = bias_bit_width
self.inter_bit_width = inter_bit_width
self.acti_bit_width = acti_bit_width
self.retrain = retrain
self.quant = quant
if retrain is True:
self.weight_log2_t = nn.Parameter(torch.Tensor(1))
self.acti_log2_t = nn.Parameter(torch.Tensor(1))
if self.bias is not None:
self.bias_log2_t = nn.Parameter(torch.Tensor(1))
else:
self.weight_log2_t = torch.Tensor(1)
self.acti_log2_t = torch.Tensor(1)
if self.bias is not None:
self.bias_log2_t = torch.Tensor(1)
def static(self):
self.retrain = False
if isinstance(self.bias_log2_t, nn.Parameter):
self.bias_log2_t.requires_grad_(False)
if isinstance(self.weight_log2_t, nn.Parameter):
self.weight_log2_t.requires_grad_(False)
if isinstance(self.acti_log2_t, nn.Parameter):
self.acti_log2_t.requires_grad_(False)
def quantilize(self):
self.quant = True
def floatilize(self):
self.quant = False
def adder_forward(self, input):
input_log2_t = input.abs().max().log2()
weight = qsigned(self.weight, self.weight_log2_t,
self.weight_bit_width)
inter = qsigned(
ex.adder2d_function(input,
weight,
None,
stride=self.stride,
padding=self.padding),
self.weight_log2_t + input_log2_t + math.log2(self.weight.numel()),
self.inter_bit_width)
if self.bias is not None:
inter += qsigned(
self.bias, self.bias_log2_t,
self.bias_bit_width).unsqueeze(1).unsqueeze(2).unsqueeze(0)
return qsigned(inter, self.acti_log2_t, self.acti_bit_width)
def adder_forward_unquant(self, input):
return ex.adder2d_function(input,
self.weight,
self.bias,
stride=self.stride,
padding=self.padding)
def forward(self, input):
return self.adder_forward(
input) if self.quant else self.adder_forward_unquant(input)
if __name__ == '__main__':
add = Adder2d(3, 4, 3, bias=True)
x = torch.rand(10, 3, 10, 10)
print(add(x).shape)
| 2.765625
| 3
|
src/server.py
|
benob/howto-app
| 0
|
12775812
|
import sys
import asyncio
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
from aiohttp import web
from aiohttp_session import get_session, setup
from aiohttp_session.cookie_storage import EncryptedCookieStorage
import aiohttp_jinja2
import jinja2
import user
import search
#import personal
import friend
import share
import comment
import notification
import playlists
import history
import export
import speech
if '-debug' in sys.argv[1:]:
print('WARNING: running in debug mode')
import debug
import secrets
from util import routes, get_user, add_globals, error_middleware
ssl_context = None
if secrets.USE_SSL:
import ssl
ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_context.load_cert_chain(secrets.SSL_CRT, secrets.SSL_KEY)
async def update_certificate():
while True:
# let's encrypt certificate is updated every 3 months, we need to reload it
# TODO: only do it if certificate changed
print('reloading SSL certificate')
ssl_context.load_cert_chain(secrets.SSL_CRT, secrets.SSL_KEY)
await asyncio.sleep(3600 * 24) # once a day
async def run_web_app():
app = web.Application(middlewares=[error_middleware])
setup(app, EncryptedCookieStorage(secrets.SERVER_COOKIE_KEY))
aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader('templates/'), context_processors=[add_globals])
# warning from doc: in production, /static should be handled by apache/nginx
routes.static('/static', 'static', append_version=True)
routes.static('/pictures', 'data/pictures')
routes.static('/qrcodes', 'data/qrcodes')
routes.static('/export', 'data/export')
routes.static('/', 'static/favicon')
app.add_routes(routes)
if secrets.USE_SSL:
asyncio.get_event_loop().create_task(update_certificate())
return app
app = asyncio.get_event_loop().run_until_complete(run_web_app())
print('Running app at http%s://%s:%d' % ('s' if secrets.USE_SSL else '', secrets.HOST, secrets.PORT))
web.run_app(app, ssl_context=ssl_context, host=secrets.HOST, port=secrets.PORT)
| 1.875
| 2
|
plugins/custom_operators.py
|
FrankSchleyCBA/docker-airflow
| 0
|
12775813
|
import logging
from airflow.models import BaseOperator
from airflow.operators.sensors import BaseSensorOperator
from airflow.plugins_manager import AirflowPlugin
from airflow.utils.decorators import apply_defaults
log = logging.getLogger(__name__)
#Test comment
class CustomOperator(BaseOperator):
@apply_defaults
def __init__(self, my_operator_param, *args, **kwargs):
self.operator_param = my_operator_param
super(CustomOperator, self).__init__(*args, **kwargs)
def execute(self, context):
log.info("Hello World!")
log.info('operator_param: %s', self.operator_param)
class CustomSensor(BaseOperator):
@apply_defaults
def __init__(self, *args, **kwargs):
super(CustomSensor, self).__init__(*args, **kwargs)
def poke(self, context):
"""Determines whether the task is successful or not
if True: continue with the dag
if False: call poke again
if Exception: call poke again until the max number of retries has been reached
"""
current_minute = datetime.now().minute
if current_minute % 3 != 0:
log.info("Current minute (%s) not is divisible by 3, sensor will retry.", current_minute)
return False
log.info("Current minute (%s) is divisible by 3, sensor finishing.", current_minute)
return True
class MyFirstPlugin(AirflowPlugin):
name = "my_first_plugin"
operators = [CustomOperator, CustomSensor]
| 2.375
| 2
|
pilight2mqtt/core.py
|
mcdeck/pilight2mqtt
| 6
|
12775814
|
<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
core module of pilight2mqtt
"""
from __future__ import print_function
import socket
import sys
import re
import json
import signal
import logging
import paho.mqtt.client as mqtt
from pilight2mqtt.discover import discover
__all__ = ['Pilight2MQTT', 'PilightServer']
DISCOVER_SCHEMA = "urn:schemas-upnp-org:service:pilight:1"
DELIM = b'\n\n'
class ConnectionLostException(Exception):
"""Connection lost exception"""
class Loggable: # pylint: disable=too-few-public-methods
"""base class for objects that need logging"""
@property
def log(self):
"""log message to a logger named like the class"""
return logging.getLogger(self.__class__.__name__)
class PilightServer(Loggable):
"""class to interact with pilight"""
@classmethod
def discover(cls):
"""discover pilight servers in the network"""
log = logging.getLogger('PilightAutoDiscover')
log.debug('trying to discover servers')
responses = discover(DISCOVER_SCHEMA)
if not responses:
log.error('failed to locate any servers - terminating')
sys.exit(1)
locationsrc = re.search('Location:([0-9.]+):([0-9.]+)',
str(responses[0]),
re.IGNORECASE)
if locationsrc:
location = locationsrc.group(1)
port = locationsrc.group(2)
else:
log.error("Whoops, could not find any servers")
sys.exit(1)
log.info('Found server at %s:%d', location, int(port))
return PilightServer(location, int(port))
def __init__(self, address, port):
"""initialize"""
self.log.debug('__init__(%s, %s)', address, port)
self._address = address
self._port = port
self._socket = None
self._should_terminate = True
self._event_handler = None
def _readlines(self):
buffer = b''
while not self._should_terminate:
try:
data = self._socket.recv(1024)
buffer += data
self.log.debug('_readlines buffer is %s', buffer)
while buffer.find(DELIM) != -1:
line, buffer = buffer.split(DELIM, 1)
self.log.debug('_readlines yield line %s', line)
yield line
except socket.timeout:
continue
def _read(self):
"""read data from socket"""
self.log.debug('read')
lines_generator = self._readlines()
text = next(lines_generator)
self.log.debug('_read received %s', text)
return text
def send_check_success(self, msg_dct):
"""send message and check that it was successfull"""
self.log.debug('_send_check_success')
response = self.send_json(msg_dct)
if response.get('status', '') == 'success':
return True
return False
def send_json(self, msg_dct):
"""send json data and read response, which is also json"""
self.log.debug('_send_json')
msg = bytes(json.dumps(msg_dct)+'\n', 'utf-8')
response = self.send_raw(msg)
if self._should_terminate:
return {}
return json.loads(response.decode("utf-8"))
def send_raw(self, msg):
"""send and read raw data"""
self.log.debug('_send_raw')
self._socket.send(msg)
response = self._read()
return response
def _open_socket(self):
"""open a socket to pilight"""
self.log.debug('open socket')
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.settimeout(1)
self._socket.connect((self._address, int(self._port)))
self._should_terminate = False
def connect(self, cb_recv=None):
"""initialize connection progress.
registers handlers as well.
"""
self.log.info('connect')
if cb_recv:
self._event_handler = cb_recv
self._open_socket()
suc = self.send_check_success({
'action': 'identify',
'options': {
'receiver': 1,
'core': 0,
'config': 1,
'forward': 1
},
'uuid': '0000-d0-63-00-000000',
'media': 'all'
})
return suc
def reconnect(self):
"""try to reconnect if the connection got lost"""
try:
connected = False
while not self._should_terminate and not connected:
connected = self.connect()
return connected
except Exception: # pylint: disable=broad-except
pass
return False
def disconnect(self):
"""disconnect from pilight"""
self.log.info('disconnect')
self._should_terminate = True
if self._socket:
self._socket.close()
self._socket = None
def process_events(self, callback):
"""process incoming events from pilight"""
self.log.info('process_events')
while not self._should_terminate:
response = self._read()
if not self._should_terminate:
self.log.debug('call callback')
callback(response)
def terminate(self):
"""indicate that the system should shut down"""
self.log.info('terminate')
self._should_terminate = True
def heartbeat(self):
"""send and read heart beat to/from pilight"""
response = self.send_raw(b'HEART')
if response == b'BEAT':
return True
return False
def set_device_state(self, device, state):
"""update the state of a device in pilight"""
self.log.info('set_device_state: "%s" to "%s"', device, state)
msg = {
'action': 'control',
'code': {
'device': device,
'state': state
}
}
return self.send_check_success(msg)
class Pilight2MQTT(Loggable):
"""translate between pilight events and mqtt messages"""
def __init__(self, # pylint: disable=too-many-arguments
server,
mqtt_host,
mqtt_username=None,
mqtt_password=<PASSWORD>,
mqtt_port=1883,
mqtt_topic='PILIGHT'):
"""initialize"""
self.log.debug('__init__')
self._mqtt_host = mqtt_host
self._mqtt_port = mqtt_port
self._mqtt_topic = mqtt_topic
self._server = server
self._mqtt_username = mqtt_username
self._mqtt_password = <PASSWORD>
def on_connect(client, userdata, flags, result_code):
# pylint: disable=missing-docstring
return self._on_connect(client, userdata, flags, result_code)
def on_message(client, userdata, msg):
# pylint: disable=missing-docstring
return self._on_message(client, userdata, msg)
self._mqtt_client = mqtt.Client()
self._mqtt_client.on_connect = on_connect
self._mqtt_client.on_message = on_message
def _on_connect(self, client, userdata, flags, result_code):
"""execute setup of mqtt, i.e. subscribe to a channel"""
if result_code == 5:
self.log.debug(
"Connection failed: %s: possible authentication failure",
str(result_code))
else:
self.log.debug(
"Connected with result code %s",
str(result_code))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
self.log.info('MQTT Subscribe %s', self._mqtt_topic)
client.subscribe("%s/#" % self._mqtt_topic)
def _on_message(self, client, userdata, msg):
"""process messages received from MQTT"""
self.log.debug("%s %s", msg.topic, str(msg.payload))
match = re.search('%s/set/(.*?)/STATE' % self._mqtt_topic, msg.topic)
if match:
device = match.group(1)
state = msg.payload
self._server.set_device_state(device, state.decode('utf-8'))
def _send_mqtt_msg(self, device, topic, payload):
self.log.info(
'Update for device "%s" on topic "%s", new value "%s"',
device, topic, payload) # flake8: NOQA
(result, mid) = self._mqtt_client.publish(topic,
payload=payload,
qos=0,
retain=False)
assert result == mqtt.MQTT_ERR_SUCCESS, (
"Failed to send message (%s)" % str(result))
self.log.debug('Message send with id %d', mid)
def _mktopic(self, device, reading):
return '%s/status/%s/%s' % (self._mqtt_topic, device, reading)
def _handle_event(self, evt):
"""event handling for message from pilight"""
self.log.debug(evt)
try:
evt_dct = json.loads(evt.decode('utf-8'))
if evt_dct.get('origin', '') == 'update':
evt_type = evt_dct.get('type', None)
if evt_type == 1: # switch
for device in evt_dct.get('devices', []):
self._send_mqtt_msg(
device,
self._mktopic(device, 'STATE'),
evt_dct['values']['state'])
elif evt_type == 3:
for device in evt_dct.get('devices', []):
self._send_mqtt_msg(
device,
self._mktopic(device, 'HUMIDITY'),
evt_dct['values']['humidity'])
self._send_mqtt_msg(
device,
self._mktopic(device, 'TEMPERATURE'),
evt_dct['values']['temperature'])
else:
raise RuntimeError('Unsupported event type %d' % evt_type)
except Exception as ex: # pylint: disable=broad-except
self.log.error('%s: %s', ex.__class__.__name__, ex)
def run(self):
"""main run method"""
self.log.debug('run')
def stop_server(signum, frame): # pylint: disable=missing-docstring
self.log.debug("SIGINT")
self._server.terminate()
signal.signal(signal.SIGINT, stop_server)
self.log.info('MQTT Connect %s:%d',
self._mqtt_host, self._mqtt_port)
try:
if (self._mqtt_username is not None
and self._mqtt_password is not None):
self._mqtt_client.username_pw_set(
self._mqtt_username,
self._mqtt_password)
self._mqtt_client.connect(self._mqtt_host, self._mqtt_port, 60)
except Exception as ex: # pylint: disable=broad-except
self.log.error('Failed to connect to MQTT server: %s', str(ex))
return 1
self._mqtt_client.loop_start()
suc = self._server.connect()
if not suc:
self.log.error('Could not connect to server')
return 1
assert self._server.heartbeat()
def callback(event): # pylint: disable=missing-docstring
self._handle_event(event)
self._server.process_events(callback)
self._server.disconnect()
self.log.info('disconnect MQTT')
self._mqtt_client.loop_stop(force=False)
self._mqtt_client.disconnect()
return 0
| 2.40625
| 2
|
SparkAutoML/utils/models_dict_file.py
|
brainalysis/sparkify
| 0
|
12775815
|
# make a dictionary of available models in pyspark
from pyspark.ml.classification import (
LogisticRegression,
GBTClassifier,
RandomForestClassifier,
DecisionTreeClassifier,
MultilayerPerceptronClassifier,
LinearSVC,
NaiveBayes,
FMClassifier,
)
from pyspark.ml.regression import (
LinearRegression,
GeneralizedLinearRegression,
DecisionTreeRegressor,
RandomForestRegressor,
GBTRegressor,
AFTSurvivalRegression,
IsotonicRegression,
FMRegressor,
)
model_dict_classifier = {
"lr": LogisticRegression,
"rfc": RandomForestClassifier,
"gbc": GBTClassifier,
"dtc": DecisionTreeClassifier,
"mlpc": MultilayerPerceptronClassifier,
"svc": LinearSVC,
"nbc": NaiveBayes,
"fmc": FMClassifier,
}
model_dict_regression = {
"lr": LinearRegression,
"glr": GeneralizedLinearRegression,
"dtr": DecisionTreeRegressor,
"rfr": RandomForestRegressor,
"gbr": GBTRegressor,
"sr": AFTSurvivalRegression,
"isor": IsotonicRegression,
"fmr": FMRegressor,
}
| 2.65625
| 3
|
load_affnist.py
|
YuZiHanorz/stacked_capsule_autoencoders
| 0
|
12775816
|
<filename>load_affnist.py
import numpy as np
import tensorflow as tf
import scipy.io as sio
from glob import glob
import os
from monty.collections import AttrDict
def load_data_from_mat(path):
data = sio.loadmat(path, struct_as_record=False, squeeze_me=True)
for key in data:
if isinstance(data[key], sio.matlab.mio5_params.mat_struct):
data[key] = _todict(data[key])
return data
def _todict(matobj):
# A recursive function which constructs from matobjects nested dictionaries
dict = {}
for strg in matobj._fieldnames:
elem = matobj.__dict__[strg]
if isinstance(elem, sio.matlab.mio5_params.mat_struct):
dict[strg] = _todict(elem)
else:
dict[strg] = elem
return dict
def affnist_reader(batch_size):
test_path = glob(os.path.join('../data/affnist/', "test.mat"))
print(test_path)
test_data = load_data_from_mat(test_path[0])
testX = test_data['affNISTdata']['image'].transpose()
testY = test_data['affNISTdata']['label_int']
testX = testX.reshape((320000, 40, 40, 1)).astype(np.float32)
testY = testY.reshape((320000)).astype(np.int32)
X = tf.convert_to_tensor(testX, dtype=tf.float32) / 255.
Y = tf.convert_to_tensor(testY, dtype=tf.int64)
input_queue = tf.train.slice_input_producer([X, Y], shuffle=True)
images = tf.image.resize_images(input_queue[0], [40, 40])
labels = input_queue[1]
X, Y = tf.train.batch([images, labels], batch_size=batch_size)
testset = AttrDict(image=X, label=Y)
return testset
| 2.265625
| 2
|
project_name/settings/prod.py
|
wearespindle/Estrada
| 2
|
12775817
|
from {{ project_name }}.settings.base import * # noqa
DEBUG = boolean(os.environ.get('DEBUG', 0))
TEMPLATE_DEBUG = boolean(os.environ.get('DEBUG', DEBUG))
ALLOWED_HOSTS = ['.example.com']
# Use the cached template loader so template is compiled once and read from
# memory instead of reading from disk on each load.
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
# HTTPS and Security Settings
SECURE_HSTS_SECONDS = 31536000 # Future requests for the next year should use HTTPS only
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_SSL_REDIRECT = True
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
| 1.71875
| 2
|
phjokes/jokes/jokes.py
|
Tmpts21/phjokes
| 0
|
12775818
|
pinoy_jokes = {
1: {'dialect': 'bisaya',
'joke': ['Teban: Dok, ngano gasakit man akong dughan kada inum nakug '
'tuba, pero kung libre gani dili mosakit??',
'Doktor: Ah kabalo nako ana, nipis imong BAGA, pero BAGA IMONG '
'NAWONG']},
2: {'dialect': 'bisaya',
'joke': ['Teacher: How many liters does a Coke litro have?',
'Pupil: Four liters Ma’am!',
'Teacher: Are you sure?',
'Pupil: Yes ma’am! Liter C, Liter O, Liter K, and Liter E!']},
3: {'dialect': 'bisaya',
'joke': ['Kumpisalan!',
'Tulume: Bindisyoni ako padre kay ako makakasala.',
'Pari: Unsa man nang imong sala?',
'Tulume: Nangawat ko ug lima ka hiniktang igtataring manok '
'padre.',
'Pari: Ah… Mag-ampo ka ug lima ka Amahan Namo',
'Tulume: Padre. Waloon na lang nako ka Amahan Namo',
'Pari: Ngano man iho?',
'Tulume: Tua pa man guy tulo sweater-hatch ra ba to pang derby '
'ako balikan unyang gabii……']},
4: {'dialect': 'bisaya',
'joke': ['Bon: Day, usa ka basong HOT MILK ang ako beh.',
'Tindera: Atong butangan og gatas Sir?',
'Bon: Ayaw kay luod !',
'Gi-ahak!']},
5: {'dialect': 'bisaya',
'joke': ['Konsehal: Paki acknowledge si Mayor. Late dumating, hayun '
'kararaan lang!',
'Pedro (Emcee): I wud like to acknowledge the late mayor who '
'just passed away.']},
6: {'dialect': 'bisaya',
'joke': ['Correct Pronounciation',
'Anak: Tay, paliti ko bi ug Jucyfruwet',
'Tatay: Anak, dili magbinulok, dili man na jucyfruwet',
'Anak: Unsa man diay na tay?',
'Tatay: BAGOLBAM…']},
7: {'dialect': 'bisaya',
'joke': ['Apo Ug Lolo',
'Apo: Lo, ngano nag kaang2x man kag lakaw?',
'Lolo: Aw, ayaw nagud ni pansina apo oi',
'Apo: ngano lagi na lo?',
'Lolo: ingon mn gud sa akong Doktor na likayan ang itlog kay '
'taas ug kolesterol',
'unsaon na lang kaha kung giingnan si Lolo nga likayan ang '
'hotdog.']},
8: {'dialect': 'bisaya',
'joke': ['Andir-di-saya',
'Kulas: Bay Tasyo, matod sa mga silingan andir-di-saya man kuno '
'ka.',
'Tasyo: Unsay andir-di-saya nga bag-o lang nakong gikasab-an ang '
'akong Misis!',
'Kulas: Ngano man?',
'Tasyo: Gisugo man ko niya sa pamalantsa. Mao nga akong '
'gisinghagan ug UNYA RA KAY MAGLUTO PA KO!!!']},
9: {'dialect': 'bisaya',
'joke': ['Sud-an',
'Bata: Nay, unsay atong sud-an?',
'Inahan: Christmas tree ug lansang, Dong.',
'Bata: Ha, christmas tree ug lansang?',
'Inahan: Kamunggay ba, nga gisubakan ug buwad bulinaw']},
10: {'dialect': 'bisaya',
'joke': ['Kinsay Mas Brayt?',
'Bata: Kinsay mas brayt Pa, ang Amerikano o Pilipino?',
'Amahan: Mas brayt ang Amerikano Dong, kay ang mga Bata didto '
'sa Amerika, gagmay pa gani, maayo na kaayo mo-ininglis.']},
11: {'dialect': 'bisaya',
'joke': ['Gidamgo',
'Caloy: Doc, unsa man nga kada gabii damgohon man ko nga NBA '
'player kuno ko. Ako ang point guard sa Lakers.',
'Doctor: Buweno, tagaan tika ug reseta aron dili ka na '
'damgohon.',
'Caloy: Ayaw lang sa Doc kay championship ron namong gabii.']},
12: {'dialect': 'bisaya',
'joke': ['Flashlight',
'Sa usa ka diskortal, nakigsayaw si Kulas ug gwapa. Tungod kay '
'gwapa man kaayo iyang paris, puwerteng gakus ni Kulas.',
'Daga: Unsa may sulod sa imong bulsa, Dong?',
'Kulas: Ahhh.. .kanang, flashlight ni Day.',
'Daga: Nganong init man?',
'Kulas: Nagsiga man gud!']},
13: {'dialect': 'bisaya',
'joke': ['Ngitngit',
'Anak: Nay, pirme lang kong sugsugon sa akong mga classmates '
'nga anak kuno ko sa liking kawayan. Kinsa man ang akong '
'amahan, Nay?',
'Inahan: Wa gayud ko kahibalo, Dong, kay puwerteng ngitngita '
'adtong gabhiona.']},
14: {'dialect': 'bisaya',
'joke': ['Inday',
'MC: Unsa’y kantahon nimo, Miss?',
'Contestant: Inday.',
'MC: Inday? Wala may kanta nga “Inday”.',
'Contestant: Naa uy! Katong kang <NAME>’g kanta ba nga '
'“INDAAAAY WILL ALWAYS LOVE YOU.”']},
15: {'dialect': 'bisaya',
'joke': ['Anak: Unsay escalator, Tay?',
'Tatay: Hagdan saka kanaog.',
'Anak: Kanang elevator?',
'Tatay: Kahon sakyan saka kanaog.',
'Anak: Calculator, Tay?',
'Tatay: Kana, wa pa ko kasakay']},
16: {'dialect': 'bisaya',
'joke': ['2 ka Misis nangamote…',
'Misis 1: mahinumdon gyug ko sa ITLOG sa akong BANA aning '
'KAMOTEHA.',
'Misis 2: sa KADAK-ON?',
'Misis 1: Dili… sa KABULINGON!']},
17: {'dialect': 'bisaya',
'joke': ['English / Bisaya',
'1) Aspect – Pangbuak Sa Yelo',
'2) City – Bag-o Mag-otso',
'3) Deduct – Ang Itik',
'4) Defeat – Ang Tiil',
'5) Deposit – Ang Gripo',
'6) Detail – Ang Ikog',
'7) Devastation – Stasyonan Ug Bus',
'8) Effort – Tugpahanan Ug Eroplano',
'9) Persuading – Unang Kasal',
'10) Depress – Pari',
'11) Predicate – Buhian Ang Iring',
'12) Protestant – Tindahan Ug Prutas',
'13) Statue – Ikaw Ba Na?']},
18: {'dialect': 'bisaya',
'joke': ['Usaka bisaya miadto sa Manila…',
'Bisaya: Pabili nga ng lemoncito.',
'Tindera: Anong lemoncito?',
'Bisaya: Lemoncito gud… Yong maliit na buongon!']},
19: {'dialect': 'bisaya',
'joke': ['Teacher: give me a tag question.',
'Pupil: My teacher is beautiful, isn’t she?',
'Teacher: Very good! Ibinisaya dong.',
'Pupil: Ang akong maestra gwapa, wa sya kuyapi?']},
20: {'dialect': 'bisaya',
'joke': ['Teacher: Class what are the different colors of bananas?',
'Juan: Mam! Mam! brown!',
'Teacher: Tanga! May brown ba na saging?',
'Juan: Ay op kors mam! Nilung-ag diay?']},
21: {'dialect': 'bisaya',
'joke': ['Kano: (Gisumbag ang pinoy ug nahayang) Take it easy men, take '
'it easy!',
'Pinoy: (Mibakod ug mibalos. Nahayang ang kano) Kisikisi men! '
'Kisikisi!']},
22: {'dialect': 'bisaya',
'joke': ['Customer: Day, kape.',
'Tindera: Tag P10 na ra ba.',
'Customer: Diba tag P8 ra na?',
'Tindera: Nimahal naman gud ang gasolina.',
'Customer: Ah, ayaw na lang butangig gasolina!']},
23: {'dialect': 'bisaya',
'joke': ['Juan: (gikan sa iskwelahan) Tay, akong mga klasmeyt puro zero.',
'Amahan: Hahaha! Ka-brayt jud sa akong anak. Unya ikaw dong?',
'Juan: Aw palupig ba diay ko? Zero pud!']},
24: {'dialect': 'bisaya',
'joke': ['Anak: Tay unsay English sa otot?',
'Tatay: Wind of Change',
'Anak: Ug Otot nga wa tingog?',
'Tatay: Sound of Silence',
'Anak: Ug Otot nga dalang tae tay?',
'Tatay: Dust in the wind',
'Anak: Pag ka bright dyod ning tatay, Liwat dyod ko nimo!']},
25: {'dialect': 'bisaya',
'joke': ['Botyok: Pre, ngano ingon ka nawong kog unggoy?',
'Bruno: Wa ko ga-ingon nawong kag unggoy, akong giingon ang '
'unggoy parihag nawong nimo.',
'Botyok: aw lagi, klaroha!']},
26: {'dialect': 'bisaya',
'joke': ['Tatay: Nak,imo ng gpainom ang kabaw?',
'Anak: Oo tay pro dli mn muinom..',
'Tatay: Ha?Asa mn diay nmo gpainum?',
'Anak: sa baso.. Tatay: buang! sa sunod butangig straw!']},
27: {'dialect': 'bisaya',
'joke': ['Studyante nsakpan may kodigo…',
'Teacher: unsa ni?',
'Student: prayer nakomam!',
'Teacher: unya nganong answers man ang nakasulat?',
'Student: hala, gitubag ang akong prayers!!']},
28: {'dialect': 'bisaya',
'joke': ['Balemtyms prayer',
'Lord,',
'kung dili ko nimo hatagan ug ka date, karong balemtymes '
'palihog be,make all my friends wa pud ka date! tablahay na ni '
'Lord.. tinuuray na ni! huhuhuhuhu ug iapil ang nagbasa.',
'Amen',
'Sugod na ampo ha para makatabla! hehehehehee']},
29: {'dialect': 'bisaya',
'joke': ['Warning: Kung unsa mahitabo nimo di nako sala… lingaw lingaw '
'ra ning ako.',
'Isend ni sa imong mga prends o sa tanan contacts nimo:',
'Kung wala koy uyab karon,kinsa imong ihatag nako nga uyab, ug '
'ngano? (name ha..)',
'Ilang tubag will determine your future hahahahahaha']},
30: {'dialect': 'bisaya',
'joke': ['Dodong: Tay, hain akon grief?',
'Tatay: kaw Bata ka, dili ka gihapon kabalo. Brief lagi dili '
'grief. ge nak dili be.',
'Dodong: Aw, hain man tay?',
'Tatay: Tua sa kwarto. Gi hammer!']},
31: {'dialect': 'bisaya',
'joke': ['Pinaskuhan nga Bahaw',
'It is almost 3 weeks already since Christmas and isa ra akong '
'pinaskuhan nga nadawat. I can’t bilibit! hehehehehehhe Last '
'Christmas is the only Christmas that I have received only 1 '
'gift…just imagine ISA LANG! sounds bitter hahahahaha pero sige '
'lang mudawat man gihapon ko sa pinaskuhan nga bahaw…sige na '
'kay pay day baya 2 days ago hahahahahha….',
'Sa usa ka tawo nga nihatag pasalamat ko. Pasalamat pud ko kay '
'duna manito manita sa office kay ikaw ra intawon ang naghatag '
'nako ug kalipay.. hahahahaha you are my hero.. kung wala imong '
'gift….zero jud uy…so consider yourself blessed! hahahahahaha '
'bahaw nga pinaskuhan still accepted.']},
32: {'dialect': 'bisaya',
'joke': ['Tatay: Unsa imo gusto nak?',
'Anak: Ay superman tay..',
'Tatay: sa hunahuna (hay maayo kay dili Barbie) Nilakaw ang '
'Tatay nga lipay..nibalik gihatag ang dulaan nga superman sa '
'anak.',
'Anak: ay si superman! gwapo lagi ka uy! Bugdo kaayo!',
'Ang Tatay na heart atak! Hayang!']},
33: {'dialect': 'bisaya',
'joke': ['Hubog 1: Kabalo ka pre, ako man uyab imo mama sa una.',
'Hibog2 2: (hilom, naminaw lang)',
'Hubog1: naminaw ba ka fre? ingon ko ba uyab nako imo mama sha '
'una!',
'Hubog 2: Hilom na Pa uy! Hubog na kaayo ka…']},
34: {'dialect': 'bisaya',
'joke': ['Si Dodong nikarga ug kanding sa bus. Nagutana ang conduktor, '
'Kinsa Ning kanding diri? Tubag si Dodong “Akua nang kanding '
'bai”. Ug ingon ang conduktor, “plitihan ta ni ha bai”…tubag si '
'Dodong “OO bai sige, pangutan-a lang na sya kung duna na sya '
'kawarta bai”.',
'Anak: Nay nagtambling tambling mi sa among P.E. sa skul ganiha '
'ba.',
'Nanay: ingon baya ko nga ayaw jud ug pag tambling tambling nak '
'kay makita imo panti. Kaulaw pud.',
'Anak: Nay wala man nakita akong panti kay ako man gisulod sa '
'akong bag akong panti.']},
35: {'dialect': 'bisaya',
'joke': ['Osa ka pirata gi interview sa reporter.',
'Reporter: Nganong imong pikas tiil kahoy man?',
'Pirata: Ah, naigo ni sa bala unya giputul giilisan na lang ug '
'kahoy.',
'Reporter: Imong toong kamot naa may hook?',
'Pirata: Ah, naputol ni sa espada diha nga duna kuy kaaway.',
'Reporter: Unya imong pikas mata duna man nai itum nga tabon? '
'(eye patch).',
'Pirata: Ah, naithan ni ug langgam unya nabuta.',
'Reporter: Ha? Makabuta diay nang iti sa langgam?',
'Pirata: Gilugud man gud nako sa akong toong kamut.']},
36: {'dialect': 'bisaya',
'joke': ['Anak: Mama, Mama, dili nako mokaun og itlog kay manimaho ko og '
'iti ingon atong silingan',
'Mama: motoo man ka nila na botbot mana',
'Anak: tinood no na mama',
'Mama: so og mukaon ka og itlog manimaho ka og iti,so kaon '
'nalang iti para maniho ka og itlog']},
37: {'dialect': 'bisaya',
'joke': ['Si Danilo niadto sa simbahan kay magpabunyag sa iya anak.',
'Danilo: Padre, magpabunyag ko sa akong anak',
'Pari: Unsay may gusto mong pangalan sa imong anak',
'Danilo: Toyota, dre.',
'Pari: Di na mahimo, ngan nag awto.',
'Danilo: Mercedes, lagi dre ngan man nag awto mahimo man.',
'Pari: O sigi sugot ko pero unsa may gusto nimong ibendita nato '
'sa imong anak , crudo o gasolina?']},
38: {'dialect': 'bisaya',
'joke': ['Laki: Kuhaon ko ang mga bitoon og ihatag ko kanimo.',
'Babae: saba diha! wa man gali ka kakuha anang kugmo gatambisay '
'sa imong ilong…',
'Laki: Aw! sorry day ha..wala man gud ko nasayod nga gusto sad '
'ka ani…..']},
39: {'dialect': 'bisaya',
'joke': ['Atty: Iha, mahimo bang ihulagway namo sa korte ang naglugos '
'nimo.',
'Biktima: Itumon, bugason, pislat ilong, pangag, putot iya nga '
'pisut…',
'Suspek: Bwesiit… Sigeee… Panaway gyud!']},
40: {'dialect': 'bisaya',
'joke': ['Killer: Padre mngumpsal ko',
'Pari: Unsa imu sala?',
'Killer: Nagpatay ko 20 ka tao bisaya joke, binisaya jokes '
'bisayan jokes, binisaya',
'Pari: Unsa!Ngano!?',
'Killer: Mutu-o man gud sila GINOO! kaw Padre Tuo baka GINOO?',
'Pari: Ha…Aw Sauna,karon JAM2X nalang.']},
41: {'dialect': 'bisaya',
'joke': ['Tindero: Uy, dia tay gold nga relo, palit na mo! Kon moputi '
'ni, white gold! Kon mourong, stopwatch!']},
42: {'dialect': 'bisaya',
'joke': ['Kustomer: I-hard lang ang masahe Day.',
'Masahista: Sir baya, pa-hard-hard unya pa-soft-soft ra gihapon '
'diay.']},
43: {'dialect': 'bisaya',
'joke': ['Negrense: Sa amo gid sa Bacolod, kada pusod may pulis.',
'Sugboanon: Sa amo sa Sugbo, kada pulis may pusod!']},
44: {'dialect': 'bisaya',
'joke': ['Dad: Anak, bili mo ko softdrinx!',
'Anak: Coke o Pepsi?',
'Dad: Coke',
'Anak: Diet o regular?',
'Dad: Regular',
'Anak: Bote o can?',
'Dad: Bote',
'Anak: 8 oz o litro?',
'Dad: PUNYETA! tubig na lang! binisaya, bisaya joke, bisaya '
'jokes',
'Anak: natural o mineral?',
'Dad: mineral',
'Anak: bugnaw o dili?',
'Dad: lambusan ta man ka aning silhig ron…',
'Anak: lanot o tukog?',
'Dad: animal man cguro ka!',
'Anak: baka o baBoy?',
'Dad: LAYAS!!!!',
'Anak: karon o ugma?',
'Dad: karon na!!!',
'Anak: alas 11 o alas 12?',
'Dad: Yawa!!!',
'Anak: lake o baye?',
'Dad: letche!!',
'Anak: liquid o powder?',
'Dad: kanang powder ilambos sa imong dagway!',
'Anak: sa agtang o aping?',
'Heart attack ang amahan, patay buhi? Buhi….baskog '
'dili….paralize…hol body hap body? asus!']},
45: {'dialect': 'bisaya',
'joke': ['Pare 1: Asa ka gikan Pre?',
'Pare 2: Sa lubong sa akong ugangan Pre.',
'Pare 1: Unya nganong daghan man kaayo kang kinawrasan ug '
'bun-og?',
'Pare 2: Misukol man.']},
46: {'dialect': 'bisaya',
'joke': ['Lalaki nga boarder: Miga, excuse sa bi, kuhaon nako ang akong '
'brief diha sa banyo.',
'Babaye nga boarder: Taym sa kay nag-panty pa ko.',
'Lalaki: Okey, hulat lang ko.',
'Babaye: Okey na, wa nako mag-panty. bisaya joke, bisaya jokes, '
'binisaya, binisaya jokes, binisaya joke']},
47: {'dialect': 'bisaya',
'joke': ['Patient: Doc, regular lagi ko malibang kada alas 7 sa buntag.',
'Doc: o di maau nuon! Unsay problema ana?',
'Patient: Alas 8 man ko mahigmata doc!']},
48: {'dialect': 'bisaya',
'joke': ['Lalake: Dong, mabaw ning suba?',
'Bata: Oo nong!',
'Lalake: (nilukso sa suba) Pisti tabang! Kalalum, ingon ka dong '
'mabaw!',
'Bata: Mabaw bitaw nong nitabok gani ang itik ganina!']},
49: {'dialect': 'bisaya',
'joke': ['Mag Uyab',
'Nag-istoryahanay sila ug dugay ug taod taod nihilum silang '
'duha ug niingon si babae.',
'Girl: nihilom lagi ka, ngano man?',
'Boy: nihilom lang!',
'Girl: unsa diay imong gihuna-huna?',
'Boy: akong gihuna huna? pareho lang sa imong gihuna-huna.',
'Girl: Aaaaaaaay Bastos…']},
50: {'dialect': 'bisaya',
'joke': ['Guwapo',
'Photgrapher: Dia ra imong letrato boss. binisaya joke',
'Customer: Di ko ani uy! Bati kaayo akong nawong aning '
'letratoha.',
'Photographer: <NAME>ud. Tan-awa ang imong back view morag si '
'<NAME>, ang side view, Paquito Diaz',
'Customer: Unya ang front view?',
'Photographer: Aw, <NAME>!']},
51: {'dialect': 'bisaya',
'joke': ['Egypt',
'Maestra: Miguel, where is Egypt? bisaya jokes',
'Miguel: Egypt is parked across the street.']},
52: {'dialect': 'bisaya',
'joke': ['Asawa: Hon, bisan taas na imong edad, nakapaanak pa ka.',
'Bana: Super engine gud ni',
'Asawa: Ipa check-up na.',
'Bana: Ngano man?',
'Asawa: Smoke belching! Itom kaayong Bata.']},
53: {'dialect': 'bisaya',
'joke': ['Sir: Inday, si Sir mo ito. Bangga kotse ko and I needd cash!',
'Inday: Aru!!! Dugo-dugo gang ka anoh!',
'Sir: Gaga! Si Sir mo talaga to!',
'Inday: Gago! Si Sir ang tawag saken CUPCAKE!!!!']},
54: {'dialect': 'bisaya',
'joke': ['Inday: koya, karamehan pala nakalebeng sa simintiryu…. '
'GENAHASA!!!',
'Kuya: Pano mo nalaman???',
'Inday: eh kase, nakalagay sa lafeda nela… RIP!!!!']},
55: {'dialect': 'bisaya',
'joke': ['In a restaurant…',
'Man: Waiter? Bakit ng inorder ko?ilan ba ang cook ninyo rito?',
'Waiter: Ay suri sir, la man kami cuk, pipsi lang.']},
56: {'dialect': 'bisaya',
'joke': ['Teacher: Class, Draw a fish.',
'Class: Yeeesss maam!',
'Teacher: Ruby! Kahigko higko kag ka itom gid na ya sang '
'drawing mo haw?',
'Ruby: kalma bala maam! Daw manul ka! Kita mo sinugba ni!']},
57: {'dialect': 'bisaya',
'joke': ['Anak: Tay unsa nang naga Lopad sa ibabaw sa langit nga mura ug '
'Krus na dako?',
'Tatay: Baw linti ka na Bata ano gali na tun-an mo sa '
'iskwelahan nyo?',
'Anak: Ano gani na tay??!!',
'Tatay: Amo na ginatawag na Temprano!! 🙂']},
58: {'dialect': 'bisaya',
'joke': ['Tawo: Padre, nganong naa man ka’y daghan hinayhay nga bra, '
'panty, ug blouses? Naa ka’y asawa?',
'Pari: Sus! Kung and inyong limos ug amot ra ang akong saligan, '
'dili ko mabuhi. Nanglabada ko no!!!!!!!!!']},
59: {'dialect': 'bisaya',
'joke': ['Sa hospital…',
'Mrs: Dok, komosta na man ang akong bana? binisaya nga joke, '
'visayan jokes, visayan joke',
'Dok: Amoa na gyod giputol ang iyang tiil ug kamot.',
'Mrs: Ha? Dok, dili nako madawat!',
'Dok: Joke, joke, joke! Patay na bitaw uy.']},
60: {'dialect': 'bisaya',
'joke': ['Pari: Ang maigo ning bola maoy labing dakog sala nato! '
'(Miuntol ang bola og naigo ang pari.)',
'Pari: Uy, praktis pa to ha!']},
61: {'dialect': 'bisaya',
'joke': ['Sa hospital…',
'Doktor: Mrs. kinahanglan na jud samentuhon ang tiil sa imong '
'anak! grabe na ang diperensya!',
'Mrs: Hala oi! mga pila kaha ka sako dok kay palit na ko '
'hardware? Karon dayon aron mauga na ugma.',
'Anak: Nay oi! pakauwaw ra man ka! sagulan pa gani ug balas!',
'Doktor: Ayaw kalimti ang hollow block ha!!']},
62: {'dialect': 'bisaya',
'joke': ['Mrs: Sir,pwede ko manawagan s akong bna s radyo? Gidala among '
'lima ka anak.',
'Announcer: Go ahed, Mrs!',
'Mrs: Hoy, amaw! I-uli ang mga bta,usa ra imo ana!']},
63: {'dialect': 'bisaya',
'joke': ['Helper: Padre, gi texan ko sa akong amo nga naa ron sa abroad '
'nga pamisahan kunu ang ilang iro nga namatay.',
'Pari: Inday, tawo ra intawon ang misahan walay labot ang iro! '
'Nabuang na sila?',
'Helper: Na! Unya unsaon ta man ni rong gi padala nga $10,000 '
'para sa misa?',
'Pari: Aw! Wala man ka mo ingon nga katoliko diay ning inyong '
'IRO! Dad-a dire!']},
64: {'dialect': 'bisaya',
'joke': ['Apo: Lola, attend ako tipar ha.',
'Lola: Unsa ng tipar bah?',
'Apo: Tipar gud party bah, sa binali..',
'Lola: Puro sturya istambay! Mga PS mo tanan!',
'Apo: Unsa ng PS la?',
'Lola: Pakeng shet!']},
65: {'dialect': 'bisaya',
'joke': ['Q: Unsa ang ginaingon sa Americano kon nakautot?',
'A: Excuse me',
'Q: British?',
'A: Pardon me',
'Q: Pinoy?',
'A: Not me!']},
66: {'dialect': 'bisaya',
'joke': ['Unsaun nimo mahibal-an kon ang siopao dunay karne sa iring, '
'ilaga o iro?',
'Ipasimhot gamay ang karne sa iring. Kon ganahan ang iring '
'ilaga ni, kon dili iring ni, kon mudagan karne sa iro.']},
67: {'dialect': 'bisaya',
'joke': ['HOLIDAYS:',
'Sa mga inahan >>> MOTHERS DAY',
'Sa mga Amahan >>>> FATHERS DAY',
'SA MGA BabayeNG GASABAK >>> LABOR DAY',
'SA MGA ULITAWO >>>> PALM SUNDAY']},
68: {'dialect': 'bisaya',
'joke': ['Anak: ‘Tay, unsay kalainan sa Supper ug Dinner?',
'Tatay: Anak, kon mukaun ta sa gawas mao na ang Dinner. Kon '
'mukaon ta sa luto sa imo inahan mao na ang Suffer.']},
69: {'dialect': 'bisaya',
'joke': ['Ngano halos tanan nga ginakidnap instsik? Kay kung',
'Pinoy – hulugan!',
'kon Bumbay – 5-6!',
'kon Kano – credit card!',
'kon Intsik – C.O.D.!!!! Bongga talaga…Cash on delivery.',
'hala panago na mo nga Instsik!']},
70: {'dialect': 'bisaya',
'joke': ['Question: Unsa ang lahi sa corruption sa US ug sa Pinas?',
'Answwer: Sa US maPriso ang corrupt, sa Pinas ang corrupt '
'ga-adto sa US. pinoy jokes, bisaya jokes']},
71: {'dialect': 'bisaya',
'joke': ['Nanay: Grabe man ka mukaun nak uy!Di naka masugo.',
'Anak: Nay ang atong baBoy kung kusog mukaon ganahan kaayo ka. '
'Kinsa ba gyud imong anak ako o ang baBoy?']},
72: {'dialect': 'bisaya',
'joke': ['Lalaki: Love mao na siya akong ex Girlfriend.',
'Babae: Kamaut man sa iyang nawong love uy!',
'Lalaki: Wala koy mahimo love, mao gyud na akong weakness '
'sukad..']},
73: {'dialect': 'bisaya',
'joke': ['Bob: Pila pud imong kita sa usa ka adlaw nong?',
'Manlilimos: Sugod ko limos ala ocho sa buntag. Karon alas '
'nuEve duna na koy 80 pesos.',
'Bob: uy ok man diay pud no? Unsa pud imong palitun ana?',
'Manlilimos: aw palit lang ko espresso macchiato sa '
'starbucks.']},
74: {'dialect': 'bisaya',
'joke': ['Divorcees',
'X bana: Anak pag-uli nimo ihatag ang tseke sa iya ug ingna nga '
'mao na ang last nga tseke nga madawat niya kay 18 naka ha. Ug '
'ayaw kalimot ug ta-awa iyang expression.',
'Anak: (pagabot) Nay ingon tatay mao na ni ang last nga tseke '
'madawat nimo para child child support kay 18 nako ug lantawon '
'daw nako imo expression.',
'X Asawa: Aw diay, ingna imo tatay pagkita ninyo nga salamat sa '
'iyang suporta maskin dili siya imo tatay ha. Tapos tan-awa '
'gyud ang expression sa iynag nawong.']},
75: {'dialect': 'bisaya',
'joke': ['Ungo Napangag',
'Ungo #1: Mare, nganong napangag ka man?',
'Ungo #2: Unsaon mare nga nangabat man gud ko gabii.',
'Ungo #1: Pero nganong napangag ka man?',
'Ungo #2: Ang liog nga akong napaak sa estatuwa man gud ni '
'Bonifacio.']},
76: {'dialect': 'bisaya',
'joke': ['<NAME>',
'Maestra: Nganong nag-away man mo?',
'Pedro: Si Juan man gud Maam, iya kong gihapak sa Scrub The '
'Floor.',
'Juan: Si Pedro: biyay nanguna ha. Iya kong gilabayan sa Erase '
'The Board.',
'Maestra: Kung dili gani mo mopoyong duha, bitayon ta mong duha '
'sa Bayang']},
77: {'dialect': 'bisaya',
'joke': ['Bukol',
'Kulas: Nganong nabukol man na imong ulo, Bay?',
'Badoy: Nangharana man gud ko gabii didto sa ilang Marilou, Bay '
'Kulas. Pagsugod nako’g kanta, giitsahan dayon ko ni Marilou ug '
'buwak.',
'Kulas: Pero nganong nabukol man ka?',
'Badoy: Ang buwak gisud man gud ug kaang. bisaya joke, bisaya '
'jokes, bisayan, bisaya']},
78: {'dialect': 'bisaya',
'joke': ['<NAME>',
'Si Bosyo, nga primero pang sakay og eroplano, kuyog ni Onyot '
'nga iyang amigo, diha nilingkud sa window seat',
'Bosyo: Bay Onyot, taas na gyud tag giluparan no? Tan-awa gud '
'nang mga tao sa ubosgagmay kaayo morag holmigas.',
'Onyot: Holmigas na sila nga tinooray, Bay. Wala pa gani '
'molupad ang eroplano.']},
79: {'dialect': 'bisaya',
'joke': ['Shy',
'Pasing: Mare, naunsa man na imong son-in-law nga pila ka tuig '
'na mang walay klarong trabaho?',
'Claring: Unsaon nga shy man gud na kaayo siya.',
'Pasing: Shy gud nga tabian man kaayo na!',
'Claring: Shy tiglaba ba, shy tiglimpyo sa balay, shy say '
'tigluto.']},
80: {'dialect': 'bisaya',
'joke': ['Panawagan Sa Radio',
'Mrs : Sir, pwede ko manawag sa akong bana sa radyo, gidala '
'among lima ka anak',
'Announcer : Go ahead, Mrs.',
'Mrs : Hoy, amaw! I-uli ang mga Bata! Usa ra imo ana! Bagag '
'nawong!']},
81: {'dialect': 'bisaya',
'joke': ['Away sa Pamilya',
'Husband: Pastilan sige ta ug away; magbuwag ta!',
'Wife: Sige! Ato bahinon atong anak!',
'Husband: Ako ang gwapo ug gwapa!',
'Wife: Nah! Gipili pa gyod ang dili iya!']},
82: {'dialect': 'bisaya',
'joke': ['Padulungan',
'Pare 1: pre ug mamatay ta unsa kaha mahitabo sa ato?',
'Pare 2: pre, ayaw lang ug kabalaka kay duha ray atong '
'padulngan, ikaw unsa imo pili-on? magpaabot ra sa paghukom o '
'mabuhi pag-usab?',
'P1: aw, mabuhi pag-usab',
'P2: kung mao na, duha ra pud imong padulngan. Mamahimong hayop '
'o mamahimong punuan sa kahoy.',
'P1: aw, mamahimong punuan nga kahoy. bisaya joke, bisaya '
'jokes, binisaya',
'P2: pero ug kahoy imong pili-on ayaw ug kabalaka kay duha ray '
'imong padulngan. Ang mamahimong gamit nga salog o gamit nga '
'haligi sa balay.',
'P1: aw, mamahimong gamit nga haligi.',
'P2: Apan ug ikaw mamahimong haligi, ayaw ug kabalaka kay duha '
'ray imong padulngan. Ang mamahimong papel nga sulatanan o '
'gamit sa kaselyasan.',
'P1: aw, mamahimong gamit sa kaselyasan.',
'P2: Apan ug ikaw mamahimong gamit sa kaselyasan, ayaw ug '
'kabalaka pare kay duha ra ang imong padulngan. Gamiton ka sa '
'kaselyasan sa kaLalakihan o kaselyasan sa kababaihan.',
'P1: aw, mamahimong gamit kaselyas sa kababaihan(hehe)',
'P2: Apan ug ikaw mamahimong gamit sa kaselyasan sa kababaihan, '
'pare ayaw ug kabalaka kay duha ray imong padulngan. pangpahid '
'gamit para sa ibabaw o gamit sa ubos.',
'P1: aw, mamahimong gamit sa ubos (hehehe)',
'P2: Apan ug ikaw mamahimong gamit pangpahid sa ubos, pare ayaw '
'ug kabalaka kay duha ra imong padulngan. Gamit sa likod o '
'gamit sa atubangan.',
'P1: aw, mamahimong gamit sa atubangan (hehehehehehe)',
'P2: Apan ug ikaw mamahimong gamit sa atubagan, pare ayaw '
'kabalaka kay duha ray imong padulngan. Ang mamahimong gamit sa '
'bangag ihi-anan o bangag sudlanan',
'P1: aw, mamahimong gamit sa bangag sudlanan. '
'(hehehehehehehehehe)',
'P2: Apan ug ikaw mamahimong gamit sa bangag sudlanan, pare '
'ayaw kabalaka kay duha ray imong padulngan. ITAGO KAY TIPIGAN '
'O ILABAY SA BASURAHAN.']},
83: {'dialect': 'bisaya',
'joke': ['Luis: bay tonyo, sigurado na jud nga lalake ang mahimong sunod '
'nga presidente sa pilipinas',
'Tonyo: ngano nakasulti man ka ana bay luis?',
'Luis: klaro naman kaayo na bay oi. syaro ug wala ka kadungog '
'sa balita sa radyo.',
'Tonyo: gibalita na diay daan nga layo pa ang eleksyon? sunod '
'tuig pa gani.',
'Luis: gibalita lagi. ingon ang balita ay. presidentiables for '
'twenty o ten (2010). lalake ra man ang naay oten. di lalake '
'jud ang presidente nato sunod. alangan man ug Babaye nga wala '
'man na silayu oten.']},
84: {'dialect': 'bisaya',
'joke': ['Teban: Kinsa imo idol Goliath?',
'Goliath: Si <NAME>.',
'Teban: Sige e spell kono ang Schwarzenegger.',
'Goliath: Joke ra bitaw nong Teban, si Jet Li bitaw…']},
85: {'dialect': 'bisaya',
'joke': ['Dodong: Tay,bakasyon naman. Magpatuli ko Tay kay free man sa '
'barangay health center.',
'Nanay: Hulata si Tatay dong aron mag-uban mo.',
'Dodong: Kita lang Nay……',
'Nanay: Dili….,kamo ang mag-uban aron magdungan mo ug patuli sa '
'imong Tatay.',
'Dodong : he..he… he… Roll Eyes']},
86: {'dialect': 'bisaya',
'joke': ['Wa Kaabot',
'Usa ka gwapa kaayo nga Dalaga ang milingkod sa front seat '
'tupad sa Driver. Mibyahe sila gikan sa Alcoy. Pagkataud-taod '
'naglain ang tiyan sa Dalaga tungod sa iyang gikaon nga kamote '
'sa probinsya. Kusog ang pag padagan sa drayber busa nag-ampo '
'ang Dalaga nga moagi sila ug libaong. (Nagpasalamat siya sa '
'hilom nga gisira ang SRP).',
'Dalaga: (Naghuna-huna: Pastang paita uy. Naa na pod mogawas na '
'pod. Pls. naa untay libaong sa unahan)',
'Tuod man dunay libaong ug nasalbar ang Dalaga. Mipahiyum siya '
'sa Driver sama nga way nahitabo.',
'Dalaga: (Naghuna-huna: Sus, maayo gani. Sakpan unta ko. Kauwaw '
'gyud)',
'Apan duna na po’y dautan nga hangin nga nakigbisog nga '
'mogawas. Busa nag-ampo na usab ang Dalaga nga aduna na usa’y '
'libaong ug tuod man nasalbar siya. Iyang gikihatan ang Driver '
'susama nga way nahitabo.',
'Dalaga: (Naghuna-huna: Sus, hapit gyud ko mabisto da. Maayo '
'gani wa kabantay ang Driver.)',
'Sa ikatulong higayon, gisakit na sab sa kapalaran ang Dalaga, '
'kay mogawas na usab ang di maayong hangin. Nag-ampo na usab '
'siya nga moagi ug libaong. Ug tuod man dunay libaong sa unahan '
'apan halayo pa. Miutong ang Dalaga aron lang gyud mapug-ngan '
'ang paghuyop sa dautang hangin. Gipaningot nga nagpamaak sa '
'ngabil. Gamay na lay kulang apan wa damha sa makusog nga '
'tingog mibulhot kini, POOOOOOT!',
'Driver: WA NA KA KAABOT SA LIBAONG NO!']},
87: {'dialect': 'bisaya',
'joke': ['Gusto ng Magka-anak',
'Naay magtiayon nga gusto na gyud ug anak, kay sa ilang '
'probinsya daku kaayo sila ug kayotaan, unya gusto na sila nga '
'anak nga Lalaki para naay maka ugmad sa ilang yuta.',
'Asawa: Hon daku na gyud ang akung tiyan.',
'Bana: hapit na gyud ka manganak day',
'Asawa: Maayo unta Lalaki ni atong anak hon. kay atong '
'padaruhon.',
'Bana: maayo unta dya uy kay atong padaruhon.',
'(naglabay ang pila ka buwan ug nilapas na sa iyam ka bulan ang '
'tiyan sa asawa)',
'Asawa: Hon lapas naman sa bulan akong tiyan pero wala pa ko '
'manganak',
'Bana: Bitaw day no lapas naman kaayo. hay mas maayo day nga '
'ako ning bulikaton.',
'(Tuod man gibulikat sa bana iyang asawa)',
'nagyawyaw ang Bata nga nagkanayon…. PWEEEE, di ko mugawas kay '
'inyo kung padarohun.']},
88: {'dialect': 'bisaya',
'joke': ['Collector',
'Estoria ni sa usa ka Collector sa floorwax: usa ka adlaw ana '
'sa dihang nanguleksyon ang usa ka Collector ngadto sa naka '
'utang ug floorwax:',
'Collector: Ayo…ayo..naay tawo maningil unta ko.',
'(Sa dihang gitunga ang BABAE nga naka utang labihan ka sexy '
'nag short pants ra ug mobo pa kaayo ang blouse)',
'Babae: Uy… nong, unsay ato?',
'Collector: Maningil unta ko mam sa floorwax.',
'Babae: Agoy nong balik nalang ugma kay wala ang akong Bana.',
'Collector: Sigilang balik rako ugma',
'(Pag ka ugma gibalik ang Collector)',
'Collector: Ayooo.. mam nia na pud ko kay maningil sa floorwax',
'( Sa dihang gi tunga ang BABAE ng naka utang, nag soot lang ug '
'night gown askang nipisa murag halos makita na ang kalag)',
'Babae: Uy nong, pastilan wala pa gyud ako bana niuli. balik '
'nalang ka ugma.',
'Collector: Sigilang mam balik lang ko ugma.',
'( Ang Collector nakahuna huna ug dautan sa Babae, kay sa '
'pirmiro nga paningil nag short ug mobo kaayo, unya sa ikaduha '
'nag night gown, namasin ang Collector nga sa ikatulo niya nga '
'balik MAG HUBO na giyud, nag hinam hinam ang Collector bahala '
'dili siya bayaran maka TARI lang, tuod man gibalik siya ug '
'paningil)',
'Collector: (Huna huna) Ah karon hubo-on nako sa gisoot wala '
'giyuy mabilin para kay sayod ko mo tunga si MAM HUBO na gyud '
'tanan. Tood man wala nay sanina ang Collector unya gabitay '
'bitay ra ang iyang pikoy)',
'Collector: Ayooo…. mam…yohooo….mam balik nasad ko kay maningil '
'sa floorwax…ayooo… yohooo….nakadungog ang Collector nga naa '
'nay mo abri sa pultahan…..sobra kaayo ka excited ang '
'Collector…ana kay pag abri sa pultahan ang BANA man sa BABAE!)',
'Bana sa Babae: Hoy!!! nag unsa man ka diha nanuman nag hubo '
'ka!',
'Collector: Unsa man, mo bayad mo o dili? kay ako IHIAN inyo '
'balay.']},
89: {'dialect': 'bisaya',
'joke': ['SelBoy: Lo, sa tanang mga lolo ug lola kamo ra jud ang sweet '
'kaayo.',
'Lolo: Nganong nakaingon man ka ana dong?',
'SelBoy: Gatusan na gud mo katuig nag-uban pero hangtod ron '
'honey pa gihapon inyo tawagan.',
'Lolo: Ayaw saba dong. Secret lang nato. Nakalimot man gud ko '
'sa ngalan sa imong lola.']},
90: {'dialect': 'bisaya',
'joke': ['usahay kung maghilak ka, way nakakita sa imong mga luha…',
'kung malipayon ka, way nakakita sa imong pahiyom…',
'kung nasakitan ka, way nakakita sa imong kasakit…',
'pero testingig PANGUTOT, lingi lagi na sila tanan!']},
91: {'dialect': 'bisaya',
'joke': ['Mama: Iha, di maayo dugay ka muuli, mga Lalaki ra ba imo '
'kuyog.',
'Doter: Yaw lang kabalaka ma, semenarista man akong mga kauban.',
'Mama: Uy! basig nalimot ka, imong Papa obispo!']},
92: {'dialect': 'bisaya',
'joke': ['Bisyo',
'Girl: Kun kasal nata makabiya ka sa sigarilyo?',
'Boy: Yes!',
'Girl: Sa laag?',
'Boy: Yes!',
'Girl: Unsa pa may imong biyaan?',
'Boy: Ikaw!']},
93: {'dialect': 'bisaya',
'joke': ['Maestra: Mokyo, naa koy lima ka mansanas sa tu-o nga kamot og '
'napo sa wala. sa ato pa naa koy…..?',
'Mokyo: mam, naa kay dako nga kamot, mam!']},
94: {'dialect': 'bisaya',
'joke': ['ETC',
'Judge: tinood ba nga ikaw ang nagkawat sa alahas, tV, CD, DVD '
', MP3 ug ETC?',
'Juan: angkonon nako ang tanan pero wala jud ko nikawat sa ETC! '
'wala ko kakita ana',
'sa ilanga balay..']},
95: {'dialect': 'bisaya',
'joke': ['Pupil: Ma’am, unsa ng chicken fox?',
'Teacher: English na sa manok gikaon sa irong buang.',
'Pupil: Kanang birdflu?',
'Teacher: Ka bugo gyud nmo oi, past tense na sa fly!']},
96: {'dialect': 'bisaya',
'joke': ['Sa math class',
'Teacher: Juan, kung ako aduna’y 5 ka anak sa una ka bana, 11 '
'sa ika duha, ug 8 sa ika tulo, aduna ko’y?',
'Juan: PAGKAUWAGAN MA’AM!.']},
97: {'dialect': 'bisaya',
'joke': ['Doktor: Pedro, nganong hinay-hinay man jud ka anang pagdala '
'anang kahon nga tambal raman nah.',
'Pedro: perte jud nkong amping ani dok kay basin makamata ang '
'mga sleeping pills!!!']},
98: {'dialect': 'bisaya',
'joke': ['Husband: Love, nganong gahilak man ka?',
'Wife: Hon, gibugal-bugalan ko sa mga silingan. doberman kuno '
'kog nawong…huhuhuhu!!!',
'Husband: yawa! nganong wa nimo paaka!!!']},
99: {'dialect': 'bisaya',
'joke': ['Jeep puno kaayo ug karga na-flat ang ligid.',
'Lalaki ngadto sa drayber: Noy, flat lagi na imong ligid!',
'Drayber: Natural ra na dong, bug-at gud ug karga. flat gani '
'nang imong ilong nga kugmo ray gidala!']},
100: {'dialect': 'bisaya',
'joke': ['I thought Im sad until I saw a man without both',
'arms shaking his shoulders happily, I asked why',
'he’s happy. He replied “Dili ko hapi dong! Katol akong',
'itlog dili nako makalot!']},
101: {'dialect': 'bisaya',
'joke': ['Samtang naligo, si ERAP mitawag ni LOI',
'ERAP: Wala bay shampoo?',
'LOI: Daghang shampoo diha!',
'ERAP: Puros mani para dry hair basa na akong buhok!',
'Bisdak na diay si Erap karon?']},
102: {'dialect': 'bisaya',
'joke': ['Teacher: Kitang tanan descendants ni adan & Eve',
'Mokyo: Di na tinood mam! ingon si tatay descendants man ta sa '
'UNGGOY!',
'Teacher: Mokyo, wa man ta naghisgot diri sa imong pamilya!']},
103: {'dialect': 'bisaya',
'joke': ['Atty: Iha, mahimo bang ihulagway nimo sa korte ang itsura sa '
'naglugos nimo?',
'Biktima: Itumon, bugason, pislat ug ilong, pangag ug putot.',
'Suspek: Sigeee… Panaway gyud!']},
104: {'dialect': 'bisaya',
'joke': ['Titser: unsay past tense sa “hikap?”',
'Boy1: nahikapan mam',
'Titser: present tense?',
'Boy2: gihikap-hikap mam.',
'Titser: future tense?',
'Boy3: jer-jer na jud na mam ba!']},
105: {'dialect': 'bisaya',
'joke': ['G1: Haaay, pagka thoughtful sa akong BF! ispoko mo na, '
'kada-adlaw manawag, way sipyat!',
'G2: Unsa man sad inyo istoryahan?',
'G1: Ay dah, mangutana kung ge-dugo na ba ko!']},
106: {'dialect': 'bisaya',
'joke': ['Mister: Hon, asa man tong cheese nga giPapalit nako nimo?',
'Misis: Naa sa lamesa.',
'Mister: Floorwax man lagi ni?!',
'Misis: Cheese na oi. Ako lang gibutangan ug ‘Floorwax’ para '
'dili kitkiton sa ilaga. Wais sa?']},
107: {'dialect': 'bisaya',
'joke': ['Girl: Love, unsa diay imung mindle name?',
'Boy: Buloka pud nimu oi! Middle name man na! Buwag na ta kay '
'bulok man ka…',
'Girl: Ayaw pag ingon ana, love!',
'Boy: Buwag na gyud lagi ta! Ambi to ang welding ring!']},
108: {'dialect': 'bisaya',
'joke': ['2 ka buang sa mental hospital…',
'Buang1: Oi! Unsaon diay pag-ikyas diri?',
'Buang2: Gamit lang ta ug flashlight. Latay ta sa suga…',
'Buang1: Palungun pa lang nimo ang suga, mahulog ko! Unsay '
'pagtuo nimo nako, buang?']},
109: {'dialect': 'bisaya',
'joke': ['Usa ka bayot namatay. Gi istorya ni <NAME>:.',
'<NAME>: Dili man ka pwede diri sa langit.',
'Bayot: Hah? Ngano man <NAME>:?',
'<NAME>: Basta dili gyud pwede ang mga bayot diri sa '
'langit.',
'Bayot: Aw cge gud. Adto nlng ko sa rainbow mag slide2x.']},
110: {'dialect': 'bisaya',
'joke': ['Kustomer: Day, pila man ang kilo aning imong kamatis?',
'Tindera: ay, tag biente ang kilo ani Sir.',
'Kustomer: brato ra diay ba?',
'Tindera: Oo, brato ra gayod ni kay panahon man karon sa '
'kamatis.',
'Kustomer: Kung mao kana, ako na pakyawon tanan.',
'Tindera: Ayaw lang pud tawon Sir…',
'Kustomer: kay ngano man dili puwede?',
'Tindera: Nah… walay nakoy itinda?',
'Pastilan!']},
111: {'dialect': 'bisaya',
'joke': ['Apo: Lola, kaon na diha ba .. bisan duha ra ka kutsara..',
'Lola: Mga buang man cguro mo. Lugaw gani dli nako matulon, '
'kutsara pa kaha?!']},
112: {'dialect': 'bisaya',
'joke': ['Petra: Kaayo buwagan ani akong bana!',
'Takya: Why man?',
'Petra: Nakadaug siyag free trip to Hongkong for 2..',
'Takya: Unya?',
'Petra: Ang amaw nilargag kaduha. Wa jud ko paubana!']},
113: {'dialect': 'bisaya',
'joke': ['Doc: Paghubo na dai, ayaw kahadlok, i’ll not take advantage '
'of u. General check-up ra ni.',
'Girl: Asa nako ibutang akong panty?',
'Doc: Dinha lang tapad sa akong brief!']},
114: {'dialect': 'bisaya',
'joke': ['When the clerk of court read the case… The accused '
'shouted…………',
'Buang mong tanan. Usa ra akong gi-rape!!!',
'Nganong People of the Philippines man akong kontra.']},
115: {'dialect': 'bisaya',
'joke': ['Girl: asa ang inyong vibrator dire?',
'Clerk: naa sa bubong nakAdikit maam.',
'Girl.ok,cge paliton ko nang pula nga dako.',
'Clerk: maam FIRE EXTINGISHER mana.']},
116: {'dialect': 'bisaya',
'joke': ['Teban: Tigulanga nang Tatay Goliat sakop na sa pirata.',
'Goliat: Unsay pirata?',
'Teban: Nagpirat pirat nang mata.',
'Goliat: Ngeeeeek.']},
117: {'dialect': 'bisaya',
'joke': ['Jeep puno kaayo ug karga na-flat ang ligid.',
'Lalaki ngadto sa drayber: noy, flat lagi na imong ligid!',
'Drayber: Natural ra na dong, bug-at gud ug karga. flat gani '
'nang imong ilong nga kugmo ray gidala!']},
118: {'dialect': 'bisaya',
'joke': ['Pasyente: Dok, ngano man ni nga kong malibang ko naa may '
'plima?',
'Doktor:Ok lang na sya. Ang delikado kong inig sikma nimo naay '
'tae!']},
119: {'dialect': 'bisaya',
'joke': ['Lawyer: Who stabbed you?',
'Client: Mahimo binisay-on imo pangutana, Sir?',
'Judge: Interpreter, translate the question.',
'Interpreter: Kinsa kuno si Tabyo?']},
120: {'dialect': 'bisaya',
'joke': ['In the Paradise of Eden…',
'Eve: Adam, do you really love me?',
'Adam: Gaga! Naa pa ba diay lain!?']},
121: {'dialect': 'bisaya',
'joke': ['Operator: AT&T, How may I help you?',
'Pinoy: Heyloow. Ay wud like to long distans da Pilipins, '
'plis.',
'Operator: Name of the party you’re calling?',
'Pinoy: Aybegurpardon? Can you repit agen plis?',
'Operator: What is the name of the person you are calling?',
'Pinoy: Ah, yes, tenkyu and sori. Da name of my calling is '
'<NAME>. Sori and tenkyu.',
'Operator: Please spell out the name of the person you’re '
'calling phonetically.',
'Pinoy: Yes, tenkyu. What is foneticali?',
'Operator: Please spell out the letters comprising the name a '
'letter at a time and citing a word for each letter.',
'Pinoy: Ah, yes, tenkyu. Da name of <NAME> is '
'<NAME>.',
'I will spell his name foneticali, Elpidio: E as in Elpidio, L '
'as in lpidio, p as in pidio, i as in idio, d as in dio, i as '
'in io, and o as in o.',
'Operator: Sir, can you please use English words.',
'Pinoy: Ah, yes, tenkyu. Abanquel: A as in Airport agen, B as '
'in Because, A as in airport agen, N as in enemy, Q as in '
'Cuba, U as in Europe, E as in important, and L as in '
'elephant.']},
122: {'dialect': 'bisaya',
'joke': ['Anak nangayo ug money para pang Dota…..',
'Anak: Ma, ngau ta kug 500',
'Mama: Unsa? 400? Dakoa pud anang 300!',
'unsaon mana nimong 200?.',
'Abi nimog saun ra mangitag 100?',
'50 gani lisod kitaon.',
'20 pa kaha?…..',
'niay 5 oh!…….',
'Nice strategy!']},
123: {'dialect': 'bisaya',
'joke': ['Si Meyor ug Pito nya ka mga konsehal nangadto sa usa ka '
'imnanan/hubo2x….',
'sa wala pa sila nka sulod, nabasahan nila ang karatola na '
'naga-ingon…..',
'BELOW 18 NOT ALLOWED …',
'Meyor : ATRAS!…Di nalang ta madayun.',
'Mga konsehal : Ngano man Meyor?',
'Meyor : Walo ra ta!…']},
124: {'dialect': 'bisaya',
'joke': ['Teacher: Class, pagdala mo ug chip ahoy ha.',
'Student: Mam may “s” to and spelling maam…',
'Teacher: Ah, sorry. Chip ahoys!']},
125: {'dialect': 'bisaya',
'joke': ['Teacher: What is ur name?',
'Student: Dell.',
'Teacher: What is ur old?',
'intawon nibudlat ang mata sa Student kay wa to sa lesson']},
126: {'dialect': 'bisaya',
'joke': ['Inday gikan nag pa check-up sa Doktor',
'Ma’am: Inday unsa na may balita sa imong check up?',
'Inday : grabe ma’am kay kanser man',
'Ma’am: Ha, kanser unsa man imong balak karon?',
'Inday : Ay ako na lang pahibalu-on si ser kay kan’ Ser man '
'ning Bata.']},
127: {'dialect': 'bisaya',
'joke': ['Gitudluan sa akong sister kung unsaon siya pagtawag sa akong '
'2-year-old na pag-umangkon.',
'Sister: Mom<NAME> lagi. Dili kay Nene lang. Sige, sunod nako '
'ha. Mo–mmy. Ikaw.',
'Bata: Mo–mmy.',
'Sister: Ne–ne.',
'Bata: Ne–ne.',
'Sister: Mo–mmy Ne–ne.',
'Bata: ‘My Ne–ne.',
'Sister: Very good. Sige, tawaga daw ko beh.',
'Bata: Psssst!']},
128: {'dialect': 'bisaya',
'joke': ['Colegiala inside passenger jeepney. Covering her mouth with a '
'very dainty hanky. She appears really sossy and '
'sophisticated. Guy beside her trying to start a conversation.',
'Guy: Taga-Sanjo (San Jose) ka miga?',
'Colegiala: (irked, almost shouting): Of course ba?',
'(Nisiga ang mata sa passengers. Quiet chuckles ensued.)']},
129: {'dialect': 'bisaya',
'joke': ['In a youth gathering:',
'Guy 1: (catching his breath) Hahahaha! du^! Ay na sig katawa '
'du^! Di na ko kaginhawa du! Hahaha!',
'Guy 2: Gi lang du^! Tagaan lang tikag mouth to mouth breath!',
'Guy 1: HAHAHA! Bogo! Unsay mouth to mouth breath? Mouth to '
'mouth RECITATION na oi!']},
130: {'dialect': 'bisaya',
'joke': ['Me: Bai, naa lagi leak ang elbow sa tubo nga gitaod.',
'Plumber from MCWD: Factory effect na, sir.',
'Me: Factory defect siguro na, bai.',
'Plumber: Dili, sir, factory effect gyud na cya.']},
131: {'dialect': 'bisaya',
'joke': ['Ang akong 3 years old na pag-umangkon, iya jud gipugos ang '
'akong Auntie mo tan-aw ug TV.',
'Bata: Ma, tan-aw na lagi ta TV ba kay Lobo na.',
'Auntie: Unsa ba na ang Lobo ba?',
'Bata: Kana gud mahimo ug dog ang Babaye.',
'Auntie: Kinsa man na ang artista diha?',
'Bata: Si Angel Locsin.',
'Auntie: Unya, kinsa iya pares?',
'Bata: Si Lolo Pascual.']},
132: {'dialect': 'bisaya',
'joke': ['Nadawat nga text message gikan sa trabahante sa akong amiga '
'nga gisugo Papalit ug ticket para Bohol:',
'“Helo mam okey na mam alas syes sa bontag man adto sa pear '
'ono mam back and port nato mam unya inig abot nato sa '
'tagbilaran adto lang pa bock didto mam ang teket to a ni elvi '
'mam alas sayes mam.“']},
133: {'dialect': 'bisaya',
'joke': ['Girl 1: Sa imong tan-aw, naa pa siya’y na-feel sa ako?',
'Girl 2: He do! He do!',
'hahahahahha mao na jud ni!']},
134: {'dialect': 'bisaya',
'joke': ['Titser: Class, what are the different colors of bananas?',
'Juan: Mam, green, yellow, red, and brown.',
'Titser: Gago jud ka, naa bay brown nga saging?',
'Juan: Gaga sad ka, ang linung-ag diay piki na!?']},
135: {'dialect': 'bisaya',
'joke': ['Sexy Girl nangumpisal…',
'Pari: Iha, unsa may imong sala?',
'Girl: Father, ug makabati kog Lalaki nga mamalikas, di nako '
'mapugngan makig-sex niya!',
'Pari: Buang, ka leche gud anang sala-a, peste! Ataya nuh!']},
136: {'dialect': 'bisaya',
'joke': ['Si Pedro naulahig sulod sa klase.',
'Titser: Pedro, ulahi na pod ka..',
'Pedro: Ulahi man god ang akong relo Ma’am.',
'Titser: Problema ba na? i-adbans god.',
'Pedro: Sige Ma’am.',
'Taudtaod…',
'Titser: O, Pedro, asa man ka?',
'Pedro: Ting-uli na Ma’am!']},
137: {'dialect': 'bisaya',
'joke': ['Pulis gidakop ang prostitute',
'Prosti: Wala ko nagabaligya og sex oi',
'Police: Unsa man diay ng ginabuhat nimo?',
'Prosti: Saleswoman ko oi..nagabaligya og condom w/ free '
'demo.']},
138: {'dialect': 'bisaya',
'joke': ['Tindero: Oi suki! palit na og gatas sa baka tag dyes pesos '
'lng.',
'Manong: Ah! mahala gud! wala kay tagpiso lang diha?',
'Tindero: Naa man, pero ikaw pa ang mo supsop sa baka.']},
139: {'dialect': 'bisaya',
'joke': ['Unsay English sa cat?',
'Mitubag si Dodong,” cat”',
'nganong cat man ang english sa iring Ma.?',
'Mituba si Mama, “kay mocatcat man sa bongbong”',
'Nganong dog man ang English sa iro, Ma?',
'Kay mo dog man ang mga itoy sa inahan.',
'Bright diay ka ma',
'Ikaw bright pod ka Dong?',
'O, kay liwat man ka.']},
140: {'dialect': 'bisaya',
'joke': ['First day of work ni Huling as secretary sa office ni Atty. '
'Bukdoy',
'Diay mi abot nga ambongang laki.',
'Wait sa sir ha. dayon punit si Huling sa telepono. Pa cute '
'cute dayon ang baje, pa as if nga naay gika-istorya…',
'Hulat lang tawon ang laki, hilom nga naglingkod.',
'Pagkataod-taod, gibutang ni Huling ang phone. Lami kaayog '
'smile.',
'What can I do for you sir?',
'Ah, wala mam. Taoran lang unta nkug linya inyong telepono!']},
141: {'dialect': 'bisaya',
'joke': ['Naay pilosopo nga Lalaki ni palit og siopao',
'Kulas: Miss, isa ka siopao… kanang babae.',
'Waitress: Babae nga siopao?',
'Kulas: Oo. kanang naay papel nga hapin.. murag napkin..haha',
'Waitress: Ahh, mao ba? Lalaki mani nga siopao',
'Kulas: Lalaki?',
'Waitress:naa man gud niy itlog sa sulog sir.']},
142: {'dialect': 'bisaya',
'joke': ['Bugo: Pre tagnaa unsa akong kinaiya, nagsugod sa letter A',
'Pare: Approachable?',
'Bugo: Mali',
'Pare: Amiable',
'Bugo: Hehe mali japun',
'Pare: O sige na sirit na',
'Bugo: Anest pre.']},
143: {'dialect': 'bisaya',
'joke': ['Asawa: Dong, maunsa ka kng ma2tay ko?',
'Bana: Tingalig mamatay sad ko!',
'Asawa: Ha! ngano man?',
'Bana: Usahay makapatay baya nang sobra sa KALIPAY!..ahaha']},
144: {'dialect': 'bisaya',
'joke': ['2 ka mg amiga nagpasikatay…',
'Girl1: Our family spent d whole summer in Europe, it was '
'great! jw bout u? wr dd u spend ur summer vcation?',
'Girl2: I just spnt it hir n d philippines..',
'Girl1: Rilly? uggghh! wer? hw poor!',
'Girl2: At ur Boyfrnd’s house..he was great! =)']},
145: {'dialect': 'bisaya',
'joke': ['Tagalog vs Cebuano… Mas grabe gyud ta!',
'Kon sa tagalog ang LIBANG nalingaw pa, diri sa ato pwerte '
'nang baho-a!',
'Kon sa tagalog ang LANGGAM nagkamang pa, diri sa ato naglupad '
'na!',
'Pero ang pinakagrabe gyud…',
'Kon sa tagalog ang BUNGI ang ngipon nangatangtang ra '
'(pangag), diri sa ato ang ngabil nangawaksi na!',
'Chino: kahinay ba sa akong kita sa akong nigosyo uy!',
'Janno: unsaman diay imong nigosyo bai?',
'Chino: Bay and Sil ra uy..',
'Janno: aw diay! Kusog mana bai nga nigosyo.',
'Chino: BAYabas and SILi bah….',
'Janno: butangi…..']},
146: {'dialect': 'bisaya',
'joke': ['Tatay: Anak pa ningkamut dyud ug iskwila.. unsa man d ay '
'imung corz na kuhaun anak?',
'Anak: Gusto ko mag seaman tay!',
'Tatay: Kana jud anak.. puhun puhun makla human ka sa imung '
'kurso anak. Naa ku barkada nga pwede mag pasakay sa imuha',
'Anak: Tinuud tay kinsa mana imung barkada tay.',
'Tatay: Akung barkada nga ukoy anak pa sakyun ka niya sa iyang '
'buko buko puhun puhun.',
'Anak: Nyahahaha tatay jud ohhh']},
147: {'dialect': 'bisaya',
'joke': ['Donya: Kay bag-o man ka dinhi, gusto ko masayod ka nga ang '
'pamahaw diri alas sais impunto!',
'Katabang: Way problema Sinyora! Kung tulog pa ko anang orasa, '
'una nalang mo og ka-on!!']},
148: {'dialect': 'bisaya',
'joke': ['Tulokumare ang nag-istorya.',
'Mare1: Sus, ako mare pwerte gyud nako ka limtanon kay ang '
'akong pustiso ako mang isud sa ref.',
'Mare2: Ay wala ra ka nako mare. Mas limtanon pa ko nimo kay '
'kon moagi gani ko sa hagdan, inig abot nako sa tunga-tunga '
'makalimot ko kon pasaka ba ko o panaog.',
'Mare3: (Dang pangyam-id) Sus, ako mga mare, simbako lang '
'gyud(dang tuktok sa bungbong as in ‘knock-on-wood’ effect) di '
'ra gyud pod ko intawon limtanon! (Dayong talikod). Kadyot usa '
'mga mare ha, kay ako usang ablihan ang pultahan kay naay '
'nanuktok.']},
149: {'dialect': 'bisaya',
'joke': ['Iro1: Pre, ingon sila kaning atong laway naay rabies, ang '
'rabies makamatay.',
'Iro2: Unya, unsay problema?',
'Iro1: Gitulon naku akong laway. Kulbaan ko!']},
150: {'dialect': 'bisaya',
'joke': ['Ang una namatay c “DA KING”..',
'Sunod namatay c “DA Boy”',
'sunod nsad c “DA MASTERRAPPER”..',
'wala kaha ma kulbaan si..',
'DAGUL? AHAHA….']},
151: {'dialect': 'bisaya',
'joke': ['Dear Tay,',
'Padad-i ko ug brief ky akong brief buslot na.',
'ang 2bag:',
'Anak, agwantahi lng usa ky ako gani garter nlng! 😀']},
152: {'dialect': 'bisaya',
'joke': ['Lolo: Kaniadto, akong 5 pesos inig adto nako sa department '
'st0re,makadala nakog gatas, pan, medyas, polo, ug pantalon.',
'Apo: Karon diay lo?',
'Lolo: Lisod na karon kay naay survelance CAMERA.:>']},
153: {'dialect': 'bisaya',
'joke': ['One day… sa tindahan..',
'Bata: Ayooh!',
'Tindera: Unsa man?',
'Bata: Naa moyload?',
'Tindera: Oh! naa!',
'Bata: Pateksa ko beh!…ehehe']},
154: {'dialect': 'bisaya',
'joke': ['Pari: Muapil ka sa Army of God ?',
'Juan: Member naku ana, Padre .',
'Pari: Ngano wla man ka sa misa permi ?',
'Juan: Secret Agent man gud koh pader .!',
'Pari: Atay rah !']},
155: {'dialect': 'bisaya',
'joke': ['Apo: Lo kaon na intawon, bisag duha lang ka kutsara.',
'Lolo: Atay! Giango-ango na mo? Lugaw gani maglisod? kog '
'tulon, kutsara pa kaha? Duha pa jud kabook!']},
156: {'dialect': 'bisaya',
'joke': ['Nag conduct og evaluation ang Doctor aron mahibal-an kung '
'duna bay improvement sa iyang mga Pasyente sa Mental '
'Hospital…',
'Ang doctor nag drawing og purtahan sa pader. Ni ingon dayon '
'ang doctor.',
'O kinsa tong na nga ayo na hala pwede na mo gawas ablihi lang '
'ninyo ning purtahan…',
'So ang mga boang nga ganahan na mo uli nag inilugay og adto '
'sa pader unya namatikdan sa doctor nga naay usa nga wla ni '
'duol sa gi drawing nga purtahan sa pader. Naka ingon ang '
'doctor nga arang-arang naa gyuy usa nga tarong. Gipa ngutana '
'sya sa doctor…',
'Ngano wla mn ka ni duol sa pader dli ka gnahan mo gawas diri?',
'Og kalit nga gi tubag sa buang ang maong Doctor…',
'Ayaw ko ilara doc…kabalo ko oi….nga naa nimo ang yabi…',
'Dakong dismaya sa Doctor kay buang lang diay gihapon…']},
157: {'dialect': 'bisaya',
'joke': ['Samtang nagklase si Maam Isyat, siya nagkanayon…',
'Maam Isyat: Kinsa ninyo ganahan muadto sa langit?!',
'…Ug ang tanang mga estudyante niisa sa ilang tuong kamot, '
'gawas lang kang Pedro…',
'Maam Isyat: O Pedro, nganong dili man ka ganahan muadto sa '
'langit?!',
'Pedro: Maam, nitugon man gud ‘to si mama nga paulion ko niya '
'ug sayo!']},
158: {'dialect': 'bisaya',
'joke': ['Mga Pinoy, Intsek ug Hapon sa Saudi nag pustahay kon',
'kinsa ang maka pronawns sa pulong nga “Bulaklak at Paroparo”. '
'Ang mga',
'Pinoy mipusta ug dako Sa ilang paisano.',
'Intsek: “Bulaklak at Palopalo”, ang intsek pildi kay dili '
'maka pronawns ug litra nga “R”.',
'Hapon: “Burakrak at Paruparu”, ang hapon pildi kay dili '
'makalitok ug litra nga “L”.',
'Pinoy: “Buyakyak at Payopayo”, labaw pang napildi, kay taga '
'Surigao man diay ang kontestant.']},
159: {'dialect': 'bisaya',
'joke': ['Babaye: Naglagot ko sa photographer !',
'Lalaki: Ngano man??',
'Babaye: Kay nagpapicture ko nagsandig lubi.',
'Lalaki: Unya?',
'Babaye: Kalagot oy ko… Kay pagkadevelop… Nagkagod naman ko ug '
'lubi!']},
160: {'dialect': 'bisaya',
'joke': ['Nakit an sa ko barkada ang iyang ex-boyfriend …',
'Ge pangutana sya sa iyang ex-boyfriend na…',
'Love pa ba kaha ko nimo??',
'Gi butang sa iyang ex-boyfriend ang kamot sa ako barkada sa '
'doghan…',
'Gi sagpa bitaw sa sa ko barkada kay naa nay boobs ang '
'inatay!!!!']},
161: {'dialect': 'bisaya',
'joke': ['Inahan: Daghana og lung-ag, apila ang iring ug iro..',
'Anak: OK, Ma.',
'(pagkataud-taud)',
'Inahan: Hoi Inatay! Nganong duna’y iring sa nilung-ag?',
'Anak: Apil gani unta ang iro, wala lang jud masud..']},
162: {'dialect': 'bisaya',
'joke': ['Lets learn Japanese!',
'rice-HUKARA,',
'…lubi-KAGURA,',
'pagka0n-KUTSARAA,',
'cute-AKURA,',
'pangit-GABASA',
'Ehehe']},
163: {'dialect': 'bisaya',
'joke': ['Manager: Dawat kana. unang buwan nimo nga sweldo Php.5k. '
'After 6 mos. 15k na.',
'Applicant: Sir tenk yu kaayo! Sir, after 6 mos. Na lng ko '
'sulod?? Hahaha…']},
164: {'dialect': 'bisaya',
'joke': ['Anak: Tay.. Tay… Urine Test nako ugma…',
'Tatay: Ahw… Maayo na nak',
'Anak: Unsa man buhaton nako ana tay??…',
'Tatay: Bulok… Unsa pa man diay? Pag study na didto']},
165: {'dialect': 'bisaya',
'joke': ['Grabe ang ilongo kon manghagad og kaon “KAON TA ANAY”',
'Kng d ka mo sogot ingnon ka “KAON KA BALA”',
'Unia ug mo sugot ka “CGE KAON TA-E”',
'Nia kng nagkaon nka ingnon ka “KAON KA BALA”',
'…hahaha…unsa man gyud???']},
166: {'dialect': 'bisaya',
'joke': ['Boy: Naay pilok natagak oh! Make a wish dayon.',
'Girl: Sana magkaroon ako ng maraming wishes!',
'Tentenenen……',
'Natagak tanang pilok! 😀']},
167: {'dialect': 'bisaya',
'joke': ['Nanay: Hagbong na sad ka??? Ngano di man nimo sundon si '
'Pedro:. Permanente honor!',
'Anak: Unfair sad kaayo nay kung imo ming ikompara.',
'Nanay: Kay ngano man?…',
'Anak: Bright baya to iyang inahan.']},
168: {'dialect': 'bisaya',
'joke': ['Bata: Pa2x,naay man0k sa kusina dli magpabugaw..',
'Papa: Hadloka dong!',
'Bata: Hala ka manok!naay kalaha dha!!']},
169: {'dialect': 'bisaya',
'joke': ['3 kah vampire nisod ug bar ug ni order',
'v1: Ahmmmm fresh blood lang ako beh',
'Waiter:ok heres your fresh blood',
'v2:dugo-dugo lang ako beh kay kuwang ako kwatra kung mag '
'fresh bolld pah koh!!',
'Waiter: w8 for ah minute giluto pah ikaw sir unsa man imo???',
'v3:tubig init lang ako beh kay nakapunit kug napkin mag tea '
'lang ko………..lolxD? bisaya jokes, bisaya, binisaya']},
170: {'dialect': 'bisaya',
'joke': ['Kinutlo sa basahon ni EMPERADOR BRANDY kapitulo GENEROSO '
'BEERsikulo KULAFO hangtod REDHORSE…',
'Ang tawo nga dili mosubay sa matarong nga dalan…..',
'.”HUBOG”.. . oH yeah!.']},
171: {'dialect': 'bisaya',
'joke': ['Ang TAGAY murag salida..',
'Kung naay Mohilak, DRAMA.',
'Kung naay Magsumbagay, ACTION.',
'Kung naay Mangatawa, COMEDY.',
'Kung naay Mawa, SUSPENSE.',
'Kung naay Moligid, HORROR.',
'Kung naay Magchula, ROMANCE.',
'Kung naay Magkwarto? Aw wai lain, SCANDAL nana.',
'Nya kung manguli na ang tanan']},
172: {'dialect': 'bisaya',
'joke': ['Usa ka Foreigner nasakpan sa Citom naay traffic violation.',
'Citom: (nag gunit ug ballpen ug ticket, pinaisog) name sir!',
'Foreigner: <NAME>',
'Citom: ahhh… cge next time, be careful ha….']},
173: {'dialect': 'bisaya',
'joke': ['Singing Contest',
'1st contestant: Akong kantahun “dahong laya”',
'2nd contestant: Akong kantahun “d falling leaves”',
'3rd contestant: Dli ko mukanta!',
'Judges: Nganu man..?',
'3rd contestant: manilhig nalang ko kay daghan sagbot!']},
174: {'dialect': 'bisaya',
'joke': ['Sa mall of Asia naay foreinger na nag shopping ug mga tshirt, '
'tapos gi duol daun cya sa usa ka bisayang dakong saleslady',
'Foreigner: How much is this hatton shirt Miss?',
'SalesladyWa katubag) Ahhh, uuuhmmmmm',
'Foreigner: How much is this?',
'Saleslady: Wait sa sir, ill think the match first! (nag '
'huna2x kng unsa pasabot sa amerikano)',
'Foreigner: How much?',
'Saleslady: Yes is it is no match for our bokser manny pakyaw '
'sir, hatton no much (match) with manny pram gen san sir.']},
175: {'dialect': 'bisaya',
'joke': ['Anak: Tay, tinuod ba ang ” FIRST LOVE NEveR DIES” ?',
'Tatay: Korek! ka jan nak! tan-awa nang imong NANAY hantud '
'karon wala pajud namatay ang ANIMAL!!']},
176: {'dialect': 'bisaya',
'joke': ['Mare 1: Grabe na jud ko ka kalimtanon oi, misaka gani ko ug '
'hagdan, mo-hunong ko kay malimot ko kung paingon bako taas o '
'sa ubos.',
'Mare 2: Ako? Simbako lang…(with matching knock 3 times on '
'wood), dili jud ko limtanon. Excuse me sa ha kai murag naay '
'nanuktok!!!']},
177: {'dialect': 'bisaya',
'joke': ['After sex with a college Girl…',
'Mayor: Hmmmmm how much?',
'Girl: P200 pesos lang sir.',
'Mayor: What? how can you live with P200 pesos?',
'Girl: Ay sir, sideline ra ni nako, blackmail man jud ako '
'business…bantay ka ni mam ha?']},
178: {'dialect': 'bisaya',
'joke': ['Kapayason 1: Pre, grabe kau na akong Papa.. kabalo ka anang '
'Pacific Ocean? sya nag kalot ana!!!',
'Kapayason 2: Layo rana sa akong Papa pre.. kabalo ka anang '
'Dead Sea? sya nag patay ana!!']},
179: {'dialect': 'bisaya',
'joke': ['Adik: will u marry me?',
'Burikat: YES! i do! madawat ra nimo nga naa koi past?',
'Adik: cge lang.. wala man sad koi future..']},
180: {'dialect': 'bisaya',
'joke': ['GF: Leche ka! kit-an taka naa kuyog ug nag holding hands '
'pamo! kiss daun! gi binuangan rako nimo..',
'BF: nah! basta.. wala jud taka binuangi.. kato akong kauban '
'ako gi binuangan.']},
181: {'dialect': 'bisaya',
'joke': ['Sa usa ka layo nga baryo…',
'Bata: Tang, asa man paingon ni nga dalan?',
'Lolo: Ambot Dong, sukad2 wala man ko kakita nga nilakw ng '
'dalana.']},
182: {'dialect': 'bisaya',
'joke': ['Studyante: Noy, plete oh.',
'Driver: Asa gikan.',
'Studyante: Gikan nako.',
'Driver: Asa padong.',
'Studyante: Padong nimo bogo!']},
183: {'dialect': 'bisaya',
'joke': ['Pare 1: Pre, dakoag ngisi nimu gud?',
'Pare 2: Nagdamgo ko gabie pre! Nagkuyog kuno ta!',
'Pare 1: Nya unsa may naa ana?',
'Pare 2: Wa ra gud! GIKILIG RA KO!',
'wahahaha! 😀']},
184: {'dialect': 'bisaya',
'joke': ['BF: Naa koy ihatag nga gift sa imo, pero tag-ana sa kung '
'unsa!GF: Sige, gai ko ug clue…',
'BF: Kinahanglan ni sa imong liog….',
'GF: Kwintas?',
'BF: Dili… LUGOD!!!']},
185: {'dialect': 'bisaya',
'joke': ['Teacher: what s d capital of d phils?Chinese',
'Student: mam, kahit ako intsik, ako alam pilipinas,pilipinas, '
'wala capital, pulo utang! ahahahha…']},
186: {'dialect': 'bisaya',
'joke': ['ANg bana nitok2 sa pultahan:',
'Mr:Luv abot nko! ablihing pultahan!',
'Mrs: dko. wla koy gisuot. naghubo ko.',
'Mr: Ok rah. wla btaw koh uban…..Mrs: ikaw wla. ako naa!']},
187: {'dialect': 'bisaya',
'joke': ['Pasyente : Dok! naga’cremate pa ba mo ug patay diri?',
'Dok : Oo, pero 30 thousand amo singil.',
'Pasyente : Ha? Unya kay 15 thousand ra mani akong dala na '
'kwarta dok?',
'Dok : Puede man gihapon, i’half cook lang na nato. Ikaw na '
'lang human sa inyong oven.']},
188: {'dialect': 'bisaya',
'joke': ['Anak: Nay gipa.tumbling ko sa skwelahan ganiha',
'nanay: Gaga,gusto ra nila makit.an imung panty!',
'Anak: Kahibawu ko,mao bitaw ahu gitaguan sa bag akong panty']},
189: {'dialect': 'bisaya',
'joke': ['Dok : Mam, naa man diay ka breast cancer.',
'Pasyente : Ha? taka ka diha dok uy, I’m healthy! I’m healthy! '
'Basig naa pa kay lain option dok?',
'Dok : Bati pa jud ka ug nawong! tuo ka o dili?']},
190: {'dialect': 'bisaya',
'joke': ['Teacher: Class, use DERMATOLOGIST in a sentence.',
'Juan: <NAME>, TOLO DIYES na karon ang itlog sa manok, upat '
'diyes kung gagmay.',
'Teacher: Juan…get out!!!!!!!!!']},
191: {'dialect': 'bisaya',
'joke': ['Anak: Tay, magpalit ta ug de lata tay.',
'Tatay: Anak, ayaw ra gud pag’ingon ug de lata kay mura ka ug '
'taga bukid ana.',
'Anak: Aw, unsa man diay na tay?',
'Tatay: KANGGUDS!']},
192: {'dialect': 'bisaya',
'joke': ['Baye : Dok, unsa may akong buhaton na niwang man ko kaayo?',
'Dok : Pag’maintain lang ug 3 meals kada adlaw day, tapos '
'after 1 month, balik ka diria sa ako.',
'(after 1 month, nibalik ang babae sa Doktor)',
'Dok : Hoy! Naunsa na man hinuon ka na perte naman hinuon '
'nimong niwanga!',
'Baye : Alangan dok, nag’ingon man ka na mag’maintain ko ug 3 '
'ka Lalaki kada adlaw.',
'Dok : Nabuang nah! Kaon akong pasabot dili Lalaki! Kinsa may '
'dili mag’niwang ana labi na ug makasugat ka ug Lalaki na '
'dagkug karga!',
'ay yay yay!']},
193: {'dialect': 'bisaya',
'joke': ['Titser: Kitang tanan descendants ta ni ADAN & EVA.',
'Juan: D na tinuod mam uy, ingon ni tatay descendants daw ta '
'sa UNGGOY!']},
194: {'dialect': 'bisaya',
'joke': ['Boy1: Bai, itom man kaayo ka sa una bai, pero karon puti na '
'man kaayo ka! unsa may sekreto nimo bai??',
'Boy2: Aw sayon rah! Wala ra naq gitambalan aq ap-ap bai! '
'hek3….']},
195: {'dialect': 'bisaya',
'joke': ['Tatay: Bogo ka anaka ka! Tan’awa ng card nimo puro F and '
'nakabutang!!! FAILED KA!!!! BAGSAK KA! BAGSAK KA!',
'Anak: Pataka raman ka diha tay uy! FASAR man tawon ng F sa '
'CARD… bogo jud!']},
196: {'dialect': 'bisaya',
'joke': ['Nagpa-blood test si Jose…. Gikuha-an xa ug sample sa nurse….. '
'kay wala may coton, gisup’sup sa nurse ang tudlo niya…',
'Ingon ni Jose: Magpa’Urine test nalang pud ko daUn mam… '
'hehehe']},
197: {'dialect': 'bisaya',
'joke': ['Cus2mer: AyoOoO! Papalita ko ug safeguard!!!!!!!!',
'Tindero: Ayaw cgeg syagit syagit dihaA dong kay di ko bungol! '
'Unsa man na simkard? Globe o Smart?']},
198: {'dialect': 'bisaya',
'joke': ['Carlo: Hoy yaku! nganung cge man ka ug katawa diha?',
'Yaku: Hehehehe… kabalo naq sa imo pin number. hehehe…',
'Carlo: Unsa man aq pin number b?',
'Yaku: Upat ka asterisk… hehehe']},
199: {'dialect': 'bisaya',
'joke': ['2 ka mg amigo:',
'Boy1: Pre ngano ng hilaka ka man?',
'Boy2: Huhu kay gibiya.an ko sakong uyab pre.',
'Boy1: Aw mao bah? asa man diay paingon emu uyab pre?']},
200: {'dialect': 'bisaya',
'joke': ['Warden: Karong adlawa, kamong tnan naay BAG-ONG BRIEF!',
'Priso: Yeheeey! pagka buotan ba lamang ni Warden oi!',
'Warden: Ok..Selda A and Selda B.. EXCHANGE BRIEF!',
'hahahahahhahahhaha']},
201: {'dialect': 'tagalog',
'joke': ['Boy: Tandaan mo lahat ng sasabihin ko dahil importante ito?',
'Girl: Ok! ano ba sasabhin mo?',
'Boy: Mahal na mahal kita lagi mong tandaan na nandito lang '
'ako, lagi sa tabi mo!',
'Boy: ano natandaan mo ba?',
'Girl: (kinilig) ah oo naman',
'Boy: Good! pakisabi yan sa bestfriend mo ah. Thank You! '
'Wahahaha']},
202: {'dialect': 'tagalog',
'joke': ['Teacher: Ang unang maka sagot ng tanong ko, makakauwi agad.',
'Juan: (hinagis ang bag sa labas)',
'Teacher: Kaninong bag yon?',
'Juan: Sa akin po mam! Bye guys!']},
203: {'dialect': 'tagalog',
'joke': ['Pakiusap huwag mo na akong BILUGIN ~ Kulangot', '']},
204: {'dialect': 'tagalog',
'joke': ['Isang araw si Pedro umuwi ng bahay.',
'Pedro: Itay, (padabog) pinapatawag daw po kayo sa school!',
'Itay: Bakit Pedro? may ginawa kananamang kalokohan noh?',
'Pedro: Ako po ba? baka po kayo Itay, ikaw nga po pinapatawag '
'di ba? lagot ka']},
205: {'dialect': 'tagalog',
'joke': ['Ang gulo, change number na naman ako.',
'Nakakabadtrip, sinong namigay ng number ko?',
'May laging nagtetext at nagmessage sa akin at tinawagan ko..',
'Ang sinagot lang ay. “hello, ako budoy!”.']},
206: {'dialect': 'tagalog',
'joke': ['Gwapong nagtext: Hi babe, paload naman P100.',
'Beking Jowa: Ok Babe',
'(nagmamadaling maghanap ng loading area)',
'Beking Jowa: nareceive mo na babe?',
'Gwapong nagtext: Hu U? nyahahaha']},
207: {'dialect': 'tagalog',
'joke': ['Hindi ko naman hinahangad na ipagmalaki mo ko!',
'Ang kinasasama lang nang loob ko...',
'ay sa harap ng ibang tao ganun mo na ako kung itanaggi! ~ '
'Utot',
'',
'']},
208: {'dialect': 'tagalog',
'joke': ['Teacher: Juan?',
'Juan: yes ma’am?',
'Teacher: 1+3?',
'Juan: 4 ma’am..',
'Teacher: very good! How about you Pedro?',
'Pedro: yes ma’am?',
'Teacher: 3+1?',
'Pedro: ayan ka na naman Ma’am, kapag mahirap yung tanong, ako '
'ipapasagot niyo!']},
209: {'dialect': 'tagalog',
'joke': ['Sa party, nilapitan ng isang gwapong lalaki ang isang babaeng '
'nkaupo sa isang tabi:',
'Lalaki : sasayaw ka ba ?',
'(tuwang tuwa ang babae at tumayo)',
'babae : oo, sasayaw ako!',
'lalaki : hay salamat! paupo ako ah! XD Hahaha',
'']},
210: {'dialect': 'tagalog',
'joke': ['Dionisia: Manny anak, sabi ng mga tambay sa labas, pangit daw '
'ako.',
'Manny: Ma, alam mo ang kagandahan ay nasa loob. Kaya huwag ka '
'ng labas ng labas! XD',
'Hahaha']},
211: {'dialect': 'tagalog',
'joke': ['boy:may pick up ako sayo',
'babae:ano..? (kinikilig)',
'boy:gus2 ko lng ngayong pasko PSP mo',
'babae:bkit...?',
'boy:Pasko Sa Piling mo']},
212: {'dialect': 'tagalog',
'joke': ['sperm1 : pag nkalabas aq d2. ang kukunin kong course ay '
'doctor.',
'sperm2 : ako nmn pag nkalabas ako d2 ang kukunin kong course '
'ay seaman.',
'sperm3 : asa pa kau na ma22pad yan mga pangarap nyu. eh nsa '
'bunganga kau...']},
213: {'dialect': 'tagalog',
'joke': ['Teacher: ano ang ating pambansang hayop? Nagsisimula sa '
'letter K ',
'Student: Kuto?',
'Teacher: mali, nagtatapos sa letter W!',
'Student: Kutow!',
'Teacher: mali, may sungay to.',
'Student: Demonyong kutow!',
'Teacher: GET OUT!']},
214: {'dialect': 'tagalog',
'joke': ['ANAK: nay! nay! si kuya nagbigti sa cr!',
'(tumakbo ang nanay sa cr ngunit wala ang kuya).',
'NANAY: nako anak wag kang magbibiro ng ganyan!',
'ANAK: hehe joke joke sa sala siya nagbigti.',
'']},
215: {'dialect': 'tagalog',
'joke': ['Nanay : Anak, bumili ka nga ng asin sa kanto.',
'Anak : Yoko nga! Ang dilim kaya. Nakakatakot na lumabas.',
'Nanay : Wag ka mag-alala, kasama mo naman angel mo e.',
'Anak : Eh di siya na lang utusan mo! WALANJO, dalawa pa kame? '
'Parang asin lang bibilhin?',
'Nanay : Aba! Bastos kang bata ka ah?',
'Anak : Ang bastos nakahubad!',
'Nanay : *hinimatay*.',
'Anak : Yan ang bastos! Kinakausap mo, tutulugan ka. Umayos ka '
'nay ah? Baka di kita matantiya! Argghhhh!']},
216: {'dialect': 'tagalog',
'joke': ['NANAY: Anak ang bait mo naman simula ng makalabas ka sa '
'Mental Hospital pinuno mo na ng tanim itong bakuran natin. '
'Bakit mo nga pala pinupuno ng halaman ang bakuran natin?',
'ANAK: A HUGE WAVE OF ZOMBIES IS APPROACHING!',
'']},
217: {'dialect': 'tagalog',
'joke': ['sa farm my manok na manyakis kahit kabayu kambing patu '
'kalabaw lahat ay kanyang kinakastahan (isang araw nangingisay '
'ang manok sa lupa) o yan buti nga sayu manyakis ka kasi,, '
'nalasun ka mamatay kana karma mayakis (sabi ng ni pabo) '
'manok; anung pinag sasabi mong mamatay ha? lason mag hintay '
'ka ha pag natapus ako dito sa bulati ikaw nanaman wag kang '
'lalayu']},
218: {'dialect': 'tagalog',
'joke': ['The Japan’s prime minister, Yoshihiko Noda was poor in '
'English language. Hence one month before going to USA,to '
'visit President Obama, he was given some Basic English '
'Conversation training.',
'The instructor told Yoshihiko, “ Prime Minister, when you '
'shake hands with President Obama, please say, ‘How are you?’',
'Then Mr. Obama will say, ‘I’m fine, and you?’',
'Now you should say, ‘Me too.’',
'Afterwards we, translators, will do all the work for you.”',
'It looked quite simple and Yoshihikio was quite confident.',
'When he met Obama, he mistakenly said, “Who are you?”',
'Mr.Obama was obviously shocked but still managed to react '
'with humor: “Well, I am Michele’s husband, hahahahaha…..”',
'Then Yoshihiko replied confidently, “Me too, hahahaha…. '
'hahaha….”',
'Then there was a long silence in the meeting room.']},
219: {'dialect': 'tagalog',
'joke': ["BOY: Miss,pwde ko bng ipasok ang 'MATIGAS' kong",
"'Pag-ibig'",
'sa MADULAS at MALAWAK na butas ng iyong',
"'pagmamahal'",
'',
'at isagad ang aking pagka',
"'seryoso'",
'at handa aqng iputok at isabog sa loob nang NAPAKALAKI mong',
"'PUSO'",
'ang KATAS ng aking',
"'PAG-IBIG'",
'GIRL: Sobra ka naman kung manligaw, nakakabuntis :',
'']},
220: {'dialect': 'tagalog',
'joke': ['Girl: Ano ba yang Boyfriend mo ang Pangit eh ikaw ang ganda '
'mo!',
'Gf: Hindi ka ba nanonood ng Beauty and the Beast?? Magiging '
'Gwapo din yan!!',
'(Nainis si Bf at sumagot)',
'',
"Bf: Hindi ka rin ba nanonood ng 'shrek' ??? Papangit ka "
'rin!!']},
221: {'dialect': 'tagalog',
'joke': ['AMA: Hoy Brando! Huwag kang babakla-bakla ha!',
'ANAK: Di po itay. Punta nga ako sa basketball court ngayon.',
'AMA: Yan, astig!',
'ANAK: Mama, nakita mo pompoms ko?']},
222: {'dialect': 'tagalog',
'joke': ['LASING 1: Pare,ang bilog ng buwan!',
'LASING 2: Di yan buwan, araw yan!Tanungin natin sa ale.Ms, '
'araw ba',
'',
'yan araw o buwan?',
'GIRL:Di po ako tagarito!!']},
223: {'dialect': 'tagalog',
'joke': ['GURO: Imagine na kayo ay MILYONARYO. Isulat ang iyong '
'activities.',
'ALL: Yes mam!',
'GURO: Juan bat di ka nagsusulat?',
'JUAN: Intay ko po ang SECRETARY ko']},
224: {'dialect': 'tagalog',
'joke': ['PEDRO: Dear pwede k b ngayon?',
'SEXY: Di pwede pagod ako!',
'PEDRO: Is that your final answer?',
'SEXY: Final answer!',
'JUAN:Ok, can i call a friend?']},
225: {'dialect': 'tagalog',
'joke': ['KILLER: Pangalan mo Mrs?',
'MRS: Inday po!',
'KILLER: Kapangalan mo inay ko, di na kita papatayin! Ikaw '
'Mr?']},
226: {'dialect': 'tagalog',
'joke': ['TELEPONO: Krrringg! Krrringg!',
'AMO:Inday sagutin mo ang telepono baka yung chicks na naman '
'ng sir mo yan!',
'INDAY: Si Maam talaga, pinapagselos ako!',
'MR: Juan po, but my friends call me Inday!']},
227: {'dialect': 'tagalog',
'joke': ['JUAN: Tuwing magdadala ako ng GF s bahay,di nagugustuhan ni '
'inay!',
'PEDRO:Mgdala ka ng kamukha ng inay mo!',
'JUAN: Natry ko na,ayaw naman ni itay!']},
228: {'dialect': 'tagalog',
'joke': ['JUAN:Nay,ako lang po nakakuha ng line of 9 sa test namin!',
'NANAY:Wow, yan ang anak ko! Ilan b nakuha ng mga klasmeyts '
'mo?',
'JUAN: Lahat po 100!']},
229: {'dialect': 'tagalog',
'joke': ['JUAN: Alam mo, ayaw na ayaw kong makakita ng nakatayong babae '
'sa bus',
'habang ako eh nakaupo!',
'PEDRO: Kaya pinapaupo mo?',
'JUAN: Hindi, natutulog ako!']},
230: {'dialect': 'tagalog',
'joke': ['JUAN: Dok, ako po yung pasyente nyo LAST YEAR!',
'DOC: Oo naaalala ko! may problema ba?',
'JUAN: Itatanong ko lng po sana kung pwede na akong maligo!']},
231: {'dialect': 'tagalog',
'joke': ['Maitim na nga,grabe pa mag pulbo..tsk crinkles kaba?']},
232: {'dialect': 'tagalog',
'joke': ['Mga uri ng gamot sa mga broken💔',
'OPTIEN - Para Sa Nabulag Na Pag-Ibig',
'PLEMEX - Para Sa Mga Alaalang Bumabara Sayong Isip',
'ALAXAN - Para Sa Sakit Na Nararamdaman',
'BIOFLU - Para Muling Makabangon',
'MEFENAMIC - Para Sa Pusong Kumikirot',
'NEOZEP - Para Sa Naipong Sipon Dahil Sa Pag Iyak',
'At Higit Sa Lahat',
'BIOGESIC - Para Kung Mag Mamahal Ka Ulit Kailangan Mo Ng Mag '
"'INGAT'"]},
233: {'dialect': 'tagalog',
'joke': ['Wag ka maghanap ng',
'taong makakaintindi',
'sayo. Ang hanapin mo',
'yung taong kahit hindi',
'ka naiintindihan, hindi',
'ka pa rin iniiwan.',
'']},
234: {'dialect': 'tagalog',
'joke': ['DAPAT PALITAN NA',
"YUNG 'SEEN' NG",
"'IGNORED'",
'',
'Para magising ako sa',
'katotohanang',
'binabalewala lang ako :(',
'',
'']},
235: {'dialect': 'tagalog',
'joke': ['Nag simula kami. sa simpleng asaran.',
'',
'Hindi nagtagal,',
'',
'SINAPAK KO NA']},
236: {'dialect': 'tagalog',
'joke': ['Peralyzed',
'',
'walang pera at di makagala dahil walang ipon',
'']},
237: {'dialect': 'tagalog',
'joke': ['Kahit hindi mo na',
'tuparin yung peksman.',
'n” mamatay ka nalang']},
238: {'dialect': 'tagalog',
'joke': ['‘Alam mo ba kung bakit',
'hindi ka niya type?',
'Una, hindi ka keyboard.',
'Pangalawa, mukha kang mouse. ',
'']},
239: {'dialect': 'tagalog',
'joke': ['May nakapagsabi',
'na ba sayo na',
'ang CUTE mo?..',
'Kung wala pa,',
'',
'Eh wala tayong',
'',
'magagawa',
'ganun talaga']},
240: {'dialect': 'tagalog',
'joke': ['“Saan tayo kakain?!”',
'“KAHIT SAAN.”',
'',
'Pag ako naging mayaman, papagawa ako ng',
'fast food chain na “Kahit Saan”',
'',
'“Bahala na.” “Yung mura.” “Kahit ano.”',
'Benta siguro ‘yon ‘no?',
'']},
241: {'dialect': 'tagalog',
'joke': ['Anong goat ang pinaka maliit ? Edi kapirangGOAT ! ']},
242: {'dialect': 'tagalog', 'joke': ['Sementeryo nasunog lahat patay! ']},
243: {'dialect': 'tagalog', 'joke': ['Barko lumubog di nakatiis lumutang !']},
244: {'dialect': 'tagalog',
'joke': ['MAY DALAWANG MAGKASINTAHANG PIPI ANG NAG-AAWAY.',
'BF:',
'GF:',
'BF:',
'GF:',
'BF:',
'GF:',
'HOW SAD..BREAK NA SILA..XD']},
245: {'dialect': 'tagalog',
'joke': ['PROF: CLASS DO YOU SEE GOD?',
'CLASS: NO.',
'PROF: HAVE YOU TOUCHED GOD?',
'CLASS: NO.',
"PROF: THAT'S WHY THERE IS NO GOD.",
'JUAN: BUT SIR!',
'PROF: YES JUAN.',
'JUAN: DO YOU SEE YOUR BRAIN?',
'PROF: NO.',
'JUAN: HAVE YOU TOUCHED YOUR BRAIN?',
'PROF: NO',
"JUAN: OK CLASSMATES LET'S GO HOME. OUR PROF HAS NO BRAIN.",
'NGANGA SI PROF..']},
246: {'dialect': 'tagalog',
'joke': ['DAUTHER: dad pwede n po b ako mgkabf?',
'DAD: no , ur to YOUNG to have a boy friend.',
'DAU:hmm.. Ok dad',
'--------------------------------',
'brown out',
'DAU: dad im scared, samahan m k sa room.',
'Dad: ano k b nmn ang LAKILAKI mo nA tkot k p sa dlim',
'DAU: :3']},
247: {'dialect': 'tagalog',
'joke': ['Juan:Pedro, anong pangalan mo?',
'Pedro:Pedro.',
'Juan:Ah akala ko Pedro.',
'Haha ano daw?']},
248: {'dialect': 'tagalog',
'joke': ["'Hindi ko pa naranasang magmahal kahit minsan!' -FISHBALL. "
'Php 0.50 since 1992.',
'']},
249: {'dialect': 'tagalog',
'joke': ['Pera ka ba? Naghihirap ksi ako pag wla ka.']},
250: {'dialect': 'tagalog',
'joke': ["May ishe'share lang ako. Nung",
'nakaraan galing ako sa isang Mall.',
'Nakakita ako stuff toy na pokemon,',
"life-size 'yon. Tinanong ko agad yung",
'nasa counter. Miss, magkano yung',
"Pokemon'g malaki? Nagalit na",
"naman sa'kin. Ano bang ginawa ko??"]},
251: {'dialect': 'tagalog',
'joke': ['JUAN, NAGPA-VULCANIZE NG GULONG..',
'VULCANIZER: NAKU JUAN, WLA NA AKONG PANTAPAL NG BUTAS PARA SA '
'GULONG MO.',
'JUAN: AKO NALANG ANG IPANTAPAL NYO. TUTAL PANAKIP-BUTAS LANG '
'NAMAN AKO.']},
252: {'dialect': 'tagalog',
'joke': ['(Kidnaper, tumawag kay Juan)..',
'Kidnaper: Juan, hawak ko ang anak mo! Kung gusto mo siyang '
'makuha, bigyan mo muna ako ng ransom!',
'Juan: Ah Eh.. 500,000 , pwede na?',
'Kidnaper: Hndi! Gusto ko ung may milyon!',
'Juan: Kalahating milyon.',
'Kidnaper: Cge. Salamat!']},
253: {'dialect': 'tagalog',
'joke': ['When the tears begins to fall,', 'Sipon will follow...']},
254: {'dialect': 'tagalog',
'joke': ['Boyfriend : Mahal, nakikita mo ba',
'yung babaeng yun?',
'Girlfriend : Oo, bakit?',
'Boyfriend : Iyan ang ex-girlfriend ko.',
'Girlfriend : Hindi naman sexy, at ang',
'pangit-pangit!',
'Boyfriend : Talagang ganun, ganyan',
'talaga ang kahinaan ko noon pa',
'man…']},
255: {'dialect': 'tagalog',
'joke': ['Mare 1: Ayoko na uminom, mare.',
'Mare 2: Bakit naman?',
'Mare 1: Kasi nakakalaki ng tiyan ang',
'alak.',
'Mare 2: Hindi naman ata, mare.',
'Mare 1: Oo mare, huling beses na',
'nalasing ako, nabuntis ako.']},
256: {'dialect': 'tagalog',
'joke': ['Judge:Ano ba talaga nangyari?',
'Erap : ?????? (di nagsasalita)',
'Judge: Sumagot ka sa tanong.',
'Erap: Naman eh!!! Kala ko ba hearing',
'lang to? Bakit may speaking?']},
257: {'dialect': 'tagalog',
'joke': ['Dahil sa hirap ng buhay,',
'Pasahero: Mamang tsuper, may',
'bayad po ba kapag bata?',
'Driver: Wala',
'Pasahero: Kapag kandong?',
'Driver: Wala din',
'Pasahero: Ok anak umupo kana at kakandong ako.']},
258: {'dialect': 'tagalog',
'joke': ['BOY: I LOVE U.',
'GIRL: SHUT UP.',
'BOY: I MISS U.',
'GIRL: SHUT UP!',
'BOY: YOURE SO BEAUTIFUL.',
'GIRL: OH? REALLY? 🙂',
'BOY: SHUT UP!']},
259: {'dialect': 'tagalog',
'joke': ['Ang pag-ibig ay parang utot, kahit anung gawin mo ay '
'napakahirap itago. At pag –ibinuga mo ang kinimkim na '
'damdamin, maamoy ng lahat kahit hindi ka man umaamin,']},
260: {'dialect': 'tagalog',
'joke': ['Mahal mo kasi maputi? It’s not love, it’s Dove!']},
261: {'dialect': 'tagalog',
'joke': ['Niligawan ka sa text tapos sinagot mo, asan yung love dun? '
'Nasa simcard?']},
262: {'dialect': 'tagalog',
'joke': ['Kung tinanong ka ng manliligaw mo kung chocolates o flowers… '
'Be practical! Bigas men, bigas!']},
263: {'dialect': 'tagalog',
'joke': ['Pag may mahal ka ipaglaban mo. Pag dalawa naman mahal mo '
'paglabanin mo, matira matibay kamo.']},
264: {'dialect': 'tagalog',
'joke': ['Mahal ka niya, mahal mo siya. Pero mas mahal ang tuition, '
'ga-graduate ka ba?']},
265: {'dialect': 'tagalog',
'joke': ['Pag gusto may paraan, pag ayaw ilibre mo muna, sasama na '
'yan!']},
266: {'dialect': 'tagalog',
'joke': ['Para kang dessert. Ang sweet mo sakin, ang sweet mo sa kanya, '
'ang sweet mo sa lahat. Anong flavor ka?',
'Crema de Puta?']},
267: {'dialect': 'tagalog',
'joke': ['Ang bilis ng panahon no? Parang last year lang nene ka pa, '
'ngayon nanay ka na? Landi mo talagang bata ka eh!']},
268: {'dialect': 'tagalog',
'joke': ['Sa panahon ngayon mas tumatagal pa ang UTANG kesa sa '
'RELASYON.']},
269: {'dialect': 'tagalog',
'joke': ['Once there was an angel who wants to take everything away '
'from me, then nakita ko sya tumingin sayo…”Oist”',
'Pag yan ginalaw mo gagawin ko shuttlecock ang pakpak mo!']},
270: {'dialect': 'tagalog',
'joke': ['Kung wala kang maisip na iregalo sa taong mahal mo',
'Halikan mo na lang. Tapos sabihin mo… “Kung ayaw mo, ibalik '
'mo na lang.’']},
271: {'dialect': 'tagalog',
'joke': ['Bakit pag umiinom tayo ng isang basong tubig',
'parang ang hirap? Pero pag umiinom tayo ng redhorse',
'kahit isang case parang kulang pa? Bakit ganon?']},
272: {'dialect': 'tagalog',
'joke': ['Ang pagmamahal ko sa mga EX ko ay parang ulam',
'namin kanina… UBOS NA!']},
273: {'dialect': 'tagalog',
'joke': ['Masakit sabihin ang “I hate you”',
'Mahirap sabihin ang “I’m sorry”',
'Lalo na ang “I love you”',
'Pero pinakamahirap sabihin ang…',
'“iskibiritsiboooop',
'iskiribaaboap',
'blooopikiribitkiribit””',
'Ikaw nga?']},
274: {'dialect': 'tagalog',
'joke': ['Nag-aaway na naman ang utak at pusoa.. Sabi ng utak sa puso, '
'Kalimutan mo na sya.. “”T@ng@ mo talaga!”” sagot ng puso… '
'“”Kala ko ba matalino ka? Paano ko kakalimutan eh lagi mong '
'iniisip!”””']},
275: {'dialect': 'tagalog',
'joke': ['Ang PAG-IBIG ay parang utot. Kahit anong gawin ay napakahirap '
'itago. At pag-ibinuga mo ang kimkim na damdamin, maaamoy ng '
'lahat kahit hind ka man umamin!']},
276: {'dialect': 'tagalog',
'joke': ['Kung ayaw mong mainlove ng todo',
'Ay huwag mo ng susubukang tingnan pa ako,',
'dahil baka mabaliw ka ng husto!!!']},
277: {'dialect': 'tagalog',
'joke': ['Lahat naman tayo may kapintasan,',
'Lahat tayo hindi perpekto…',
'Kaya wag kang mag-alala kung ganyan',
'ka pinanganak…',
'Normal lang yan…',
'Hindi mo naman kasalanan na maging kamukha mo si…',
'KOKEY.']},
278: {'dialect': 'tagalog',
'joke': ['Dahan-dahan ka sa pagpili',
'ng MAMAHALIN mo ..',
'Baka kasi MALAGPASAN mo ako!']},
279: {'dialect': 'tagalog',
'joke': ['Nakakainis kayo lagi niyo na lang ako tinatapakan.',
'Hindi na ba magbabago ang pagtingin niyo sakin? -Doormat']},
280: {'dialect': 'tagalog',
'joke': ['Bakit pag late ka, pumapasok yung prof mo?',
'Pero pag hindi ka late wala naman yung prof mo?',
'Bakit ganon???']},
281: {'dialect': 'tagalog',
'joke': ['Wala naman talagang taong panget,',
'nagkataon lang na ang mukha nila ay di pa',
'uso sa panahon ngayon.']},
282: {'dialect': 'tagalog',
'joke': ['Paano ba nasusukat ang ang pag-ibig?',
'Paano ba malalaman kung mahal mo ang isang tao?',
'Pano ko malalaman kung siya na ba talaga?',
'Hindi ko alam pero dapat tandaan mo na wag kang',
'sisigaw pag nakasalubong mo si Sadako.']},
283: {'dialect': 'tagalog',
'joke': ['Pag masaya ka, masaya rin ako.. pag badtrip ka, badtrip din '
'ako,',
'Pag malungkot ka, malungkot din ako..pag nasa2ktan ka, '
'nasa2ktan dn ako..',
'wala lang….gusto lang kitang gayahin ahihihi…']},
284: {'dialect': 'tagalog',
'joke': ['Ang bawat piso ay pinaghihirapan',
'Dugo’t pawis ang puhunan.',
'Mahalaga ang bawat piraso',
'kaya sana…',
'Magreply ka naman pag nagtetext ako!',
'Mahiya ka sa parents ko na nagtitiyagang magload saken!']},
285: {'dialect': 'tagalog',
'joke': ['Tom, nagitim ka ha, nag-outing ka siguro no?',
'Hindi po, nagsimento po ako maghapon.',
'Ang sipag ah! Ano naman sinimento mo?',
'Relasyon po namin, baka sakaling tumibay.']},
286: {'dialect': 'tagalog',
'joke': ['bakit laging challenging ang pasko? kase laging may hamon ']},
287: {'dialect': 'tagalog',
'joke': ['Anong tawag sa babaeng cowboy ? edi I-HAAAAAAA! ']},
288: {'dialect': 'tagalog',
'joke': ['Q: Bakit laging nakayuko ang mga biik ? ',
'A: Kase ang taba ng nanay nila ']},
289: {'dialect': 'tagalog',
'joke': ['May Joke ako about sa mayonnaise! , Kaso ayaw ko baka '
'i-spread nyo eh ']},
290: {'dialect': 'tagalog',
'joke': ['Kapitan : Lulubog na ang barko kumapit kayo ',
'Pasahero: (Lumapit lahat sa kapitan) ',
'Kapitan : Oh bat lahat kayo nalapit sakin ? ',
'Pasahero: Wow diba kapitan ka ? ']},
291: {'dialect': 'tagalog',
'joke': ['Q: Anong tawag sa damit na maraming bulaklak ?',
'A: Floral',
'Q: Paano naman tawag sa damit na isa lang bulaklak?',
'A: Singular']},
292: {'dialect': 'tagalog',
'joke': ['May joke ako about unemployment, Kaso baka di mag work']},
293: {'dialect': 'tagalog',
'joke': ['May joke ako about business, Kaso baka di bumenta']},
294: {'dialect': 'tagalog',
'joke': ['May joke ako about basura, Kaso baka ikalat mo']},
295: {'dialect': 'tagalog',
'joke': ['May joke ako about sa medyas, But it really socks']},
296: {'dialect': 'tagalog',
'joke': ['May joke ako about sa slow, Kaso baka tamaan PLDT.']},
297: {'dialect': 'tagalog',
'joke': ['May joke ako about Tito Sotto, Kaso baka kopyahin niya.']},
298: {'dialect': 'tagalog',
'joke': ['may joke ako about sa grades ko, kaso baka hindi pumasa']},
299: {'dialect': 'tagalog',
'joke': ['May joke ako about pizza, Kaso baka di ko ma-deliver']},
300: {'dialect': 'tagalog',
'joke': ['may joke ako about chemistry, kaso baka walang mag react']},
301: {'dialect': 'tagalog',
'joke': ['Boy: miss taga saan ka:',
'Girl: bakit:',
'boy: gusto ko lang malaman kung saan ka nakatira,ibig kitang '
'haranahin mamayang gabi...',
'Girl: naku hindi na uso yun....',
'Boy: ano na uso ngayun:',
'Girl: halika hatid mo ako sa sogo..']},
302: {'dialect': 'tagalog',
'joke': ['GIRL: Hubarin mo na bra ko...',
'BOY: O, ayan...',
'GIRL: Hubarin mo na panty ko...',
'BOY: O, ayan, hinubad na...',
"GIRL: Sige, next time, 'wag mo na isusuot mga gamit ko ha!"]},
303: {'dialect': 'tagalog',
'joke': ['Si Pedro bumili ako ng cond0m sa mini stop.',
'Cashier(girl): Sir, ipaplastik ko pa po ba?',
'Pedro: Hindi na, susuotin ko na e.']},
304: {'dialect': 'tagalog',
'joke': ['Ayoko nang madidi-dikit sa pintuan...',
'sawang-sawa na akong tawaging boy next door.']},
305: {'dialect': 'tagalog',
'joke': ['Boy: alam mo, para kang albatros deodorizer.',
'Girl: bakit naman?',
'Boy: kasi binigyan mu ng halimuyak ang mala-inodoro kong '
'buhay..']},
306: {'dialect': 'tagalog',
'joke': ['Pedro: Nakabili na ko ng hearing aid. Grabe! ang linaw na ng '
'pandinig ko!',
'Juan: Talaga?! Magkano bili mo?',
'Pedro: Kahapon lang']},
307: {'dialect': 'tagalog',
'joke': ['Banat ng nanay sa anak',
'Aanuhin mo pa ang alak kung sa akin pa lang tatamaan ka '
'na!!!']},
308: {'dialect': 'tagalog',
'joke': ['Girl: Hatid mo ko.',
'Boy: Ayoko. Wala ako pera ngayon.',
'Girl: Ayaw mo?! Bahala ka! Wala pa naman tao sa bahay ngayon.',
'Boy: Aba! Tingnan mo nga naman. May naipit pa pala akong '
'bente sa wallet.']},
309: {'dialect': 'tagalog',
'joke': ['Q: Ano ang similarity ang UTOT at TULA?',
'A: Pareho silang nagmula sa POET']},
310: {'dialect': 'tagalog',
'joke': ['Q: Ano ang pwede mong gawin sa GABI na hindi mo pwedeng gawin '
'sa UMAGA?',
'A: eh di MAGPUYAT.']},
311: {'dialect': 'tagalog',
'joke': ['Q: Ano ang pagkakaiba ng Biology at Sociology?',
'A: ‘Pag ang sanggol kamukha ng tatay Biology yun, Pag kamukha '
'naman ng kapitbahay ninyo ang sanggol, sociology yun.']},
312: {'dialect': 'tagalog',
'joke': ['Q: May tatlong lalake ang tumalon sa tubig, ilan ang nabasa '
'ang buhok?',
'A: eh di..,,wala kalbo silang lahat eh..,,ngeekkkk..!!!']},
313: {'dialect': 'tagalog',
'joke': ['Q: Ano ang maraming sakay jeepney o ambulansya?',
'A: Syempre ang ambulansya! Kasi, ang jeepney ay 10-10 lang '
'ang bawat side; samantalang sa ambulansya, madalas na 50-50 '
'ang sakay.']},
314: {'dialect': 'tagalog',
'joke': ['Q: Bakit gising magdamag ang mga bampira?',
'A: Kasi nag-aaral sila para sa kanilang blood test!']},
315: {'dialect': 'tagalog',
'joke': ['Q: Ano ang makukuha mo sa baboy na magaling mag karate?',
'A: Eh di PORK CHOP!']},
316: {'dialect': 'tagalog',
'joke': ['Q: Bakit kailangang lagyan ng gulong ang rocking chair ni '
'lola?',
'A: Para makapag-rock and roll siya!']},
317: {'dialect': 'tagalog',
'joke': ['Q: Ano ang binibigay ng doctor sa ibon na may sakit?',
'A: Eh di TWEETMENT!']},
318: {'dialect': 'tagalog',
'joke': ['Q: Ano ang mas nakakadiri sa uod na nakita mo sa iyong '
'prutas?',
'A; Eh di yung kalahating uod nalang! pwe! pwe!pwe!']},
319: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there',
'Meatloaf',
'Meatloaf who',
'Sa yong ngiti akoy nahuhu MALING']},
320: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'UST',
'UST who?',
'UST call me on my cellphone late night when you need my '
'love']},
321: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'This guy’s in love with you pare',
'If ever your in my arms again, this guy’s in love with you '
'pare']},
322: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'My thoughts',
'My thoughts who?',
'My thoughts.. my knees.. my shoulder.. my head.']},
323: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Angono',
'Angono who?',
'Angono swing from the chandelieeeeeer']},
324: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Silver swan',
'Silver swan who',
'My mama dont like you, she likes silver swan']},
325: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Nanay ni wally',
'Nanay ni wally who?',
'Nanay ni wally na ko sa forever.']},
326: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Bwisit to!',
'Bwisit to! Who',
'Bwisit to! Late now to say sorry']},
327: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Yemen',
'Yemen who',
'What do yemen when you nod your head yes but you wanna say '
'no']},
328: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'ginabi sa road',
'Ginabi sa road who?',
'why you ginabi sa road? Dont you know im human too.']},
329: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Gangbang who',
'Gangbang into the room i know you want it lol SPG']},
330: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'pekpek',
'Pekpek who',
'You look so pekpek standing there in my american apparel '
'underwear hhahaha mukhang pekpek ampota']},
331: {'dialect': 'tagalog',
'joke': ['Knock knock',
'Whos there?',
'Mayonnaise',
'Mayonnaise who?',
'My toes, mayonnaise, my shoulder, my head hahahaha okey']},
332: {'dialect': 'tagalog',
'joke': ['Q: Bakit maswerte ang kalendaryo?',
'A: Dahil marami siyang date.']},
333: {'dialect': 'tagalog',
'joke': ['Question: Kung ang suka ay vinegar, ano naman ang Inggles ng '
'toyo?',
'Answer: Baliw! XD']},
334: {'dialect': 'tagalog',
'joke': ['Q: Bakit malungkot ang kalendaryo?',
'A: Kasi bilang na ang araw niya.']},
335: {'dialect': 'tagalog',
'joke': ['Q: Anong puno ang hindi pwedeng akyatin?',
'A: eh di yung nakatumba!']},
336: {'dialect': 'tagalog',
'joke': ['Q: Anong isda ang bumabaril ', 'A: Edi BANG-us!!']},
337: {'dialect': 'tagalog',
'joke': ['Q: Ano ang maraming sakay jeepney o ambulansya?',
'A: Syempre ang ambulansya! Kasi, ang jeepney ay 10-10 lang '
'ang bawat side; samantalang sa ambulansya, madalas na 50-50 '
'ang sakay.']},
338: {'dialect': 'tagalog',
'joke': ['Question: Ano ang karaniwang kasunod ng kidlat?',
'Answer: Sunog! XD']},
339: {'dialect': 'tagalog',
'joke': ['Question: Kung vegetarian ang tawag sa kumakain ng gulay, ano '
'ang tawag sa kumakain ng tao?',
'Answer: Humanitarian? XD']},
340: {'dialect': 'tagalog',
'joke': ['Question: Kung vegetarian ang tawag sa kumakain ng gulay, ano '
'ang tawag sa kumakain ng tao?',
'Answer: Humanitarian? XD']},
341: {'dialect': 'tagalog',
'joke': ['Question: Ano ang tinatanggal sa itlog bago ito kainin?',
'Answer: Buhok? XD']},
342: {'dialect': 'tagalog',
'joke': ['Question: Ano ang tawag mo sa anak ng taong grasa?',
'Answer: Baby oil? XD']},
343: {'dialect': 'tagalog',
'joke': ['Question: Saan nakukuha ang sakit na AIDS?',
'Answer: Sa motel? XD']},
344: {'dialect': 'tagalog',
'joke': ['Question: Saan karaniwang ginagawa ang mga sweets na '
'ginagamit sa halu-halo?',
'Answer: Sweetserland? XXD']},
345: {'dialect': 'tagalog',
'joke': ['Question: Sinong cartoon charcater ang sumisigaw ng yabba '
'dabba doo?',
'Answer: Si scooby dooby doo? XD']},
346: {'dialect': 'tagalog',
'joke': ['Q: Anong fish ang may lahing insecto?',
'A: eh di i-FISH (Ipis)']},
347: {'dialect': 'tagalog',
'joke': ['Q: Anong buwan ang fiesta ng mga fish?',
'A: eh di May 1, kasi FISH-tang Dagat.']},
348: {'dialect': 'tagalog',
'joke': ['Q: Sinong fish ang pumapalit pag wala ang Boss?',
'A: eh di Ang o-FISH-er in charge']},
349: {'dialect': 'tagalog',
'joke': ['Q: Saang bansa ang paboritong pasyalan ng mga fish?',
'A: eh di FIN-land']},
350: {'dialect': 'tagalog',
'joke': ['Q: Bakit pumupunta ang mga fish sa pari?',
'A: eh di Para magkum-FISH-al']},
351: {'dialect': 'tagalog',
'joke': ['Q: Anong tawag sa fish na peke?', 'A: eh di Arti-FISH-al']},
352: {'dialect': 'tagalog',
'joke': ['Patient: Doc tulungan niyo po ako kasi naiisip ko po I’m a '
'king ',
'Doc: Talaga anong pangalan mo!!! ',
'Patient: JOE po bakit doc? ',
'Doc: Ha!!? You’re must be JOEking.']},
353: {'dialect': 'tagalog',
'joke': ['Inday: Mam, lahat pu pala ng nakalibing ditu.. Ginahasa.. ',
'Amo: Pano mo naman nalaman Inday? ',
'Inday: Tegnan nyu pu ung Lapeda.. Nakasulat.. RIP']},
354: {'dialect': 'tagalog',
'joke': ['Tatay : Anak! anu tong F sa card mo ha! Anak : (*nag-iisip*) '
'Tatay… Fasado po ibig sabihin nyan. . . . . . . . . ',
'Tatay : Ahh… kala ko Ferpect! ']},
355: {'dialect': 'tagalog',
'joke': ['BIRD OF PRIEST',
'Isang araw nawala ang bird ng pari, dahil sa sobrang mahal '
'niya ito nanawagan siya sa kanyang misa.',
'Pari : Anyone got a bird?',
'Lahat ng mga lalaki tumayo.',
'Pari : I mean, anyone seen a bird?',
'Lahat ng babae tumayo.',
'Pari : I mean anyone seen my bird?',
'Lahat ng madre tumayo.',
' ']},
356: {'dialect': 'tagalog',
'joke': ['THERMOMETER',
'Nars : Doc bakit po may thermometer kayo sa tenga?',
'Doktor : Naku! Kaninong pwet kaya ng pasyente naiwan ko ang '
'ballpen ko.',
'Nyahahahahaha.',
' ']},
357: {'dialect': 'tagalog',
'joke': ['YAYA AT ANG ALAGA',
'Alaga : Yaya look, boats!',
'Yaya : Dows are not boats, dey’re yatchts.',
'Alaga : Yaya, spell yatch',
'Yaya : Yor rayt, they are boats.',
' ']},
358: {'dialect': 'tagalog',
'joke': ['<NAME>',
'Barbero : Sir, anong klase gupit po?',
'Lalaki : Yung uka-uka, masagwa at hindi pantay.',
'Barbero : Sir anu po yun? Hindo ko alam yun.',
'Lalaki : Anung hindi, ganun ang ginupit mo sa akin last '
'time!!!',
' ']},
359: {'dialect': 'tagalog',
'joke': ['<NAME>',
'Mister : Honey, pwede ka ba ngayon?',
'Misis : Hindi, pagod ako!',
'Mister : Is that your final answer?',
'Misis : Final answer!',
'Mister : Can i call a friend?',
' ']},
360: {'dialect': 'tagalog',
'joke': ['JUAN IN ENGLISH SUBJECT',
'Teacher : Juan, give me a sentence.',
'Juan : My teacher is beautiful, isn’t she?',
'Teacher : Very good!! Please translate in Tagalog.',
'Juan : Ang aking guro ay maganda, hindi naman di ba?',
' ']},
361: {'dialect': 'tagalog',
'joke': ['JUAN NAKAKUHA NG 99% SA EXAM',
'Teacher : Ang score ni Juan sa exam ay 99%.',
'Juan : Ohh anu!!! Kaya niyo yan? Hindi pa ako nag-rereview '
'nyan. Huwag na kayo mag-aral kung ako sa inyo umuwi na lang '
'kayo. Low IQ!! Mga utak manok kayo! Nangingitlog na naman '
'kayo, sinasayang niyo lang tuition niyo. (mayabang na sabi ni '
'Juan sa kanyang mga kaklase).',
'Teacher : The rest 100%',
' ']},
362: {'dialect': 'tagalog',
'joke': ['ANAK SA LABAS (pinoy jokes)',
'Pedro : pare anung gagawin mo kapag nalaman mong may anak ka '
'sa labas?',
'Juan : huh?? Anung klaseng tanung yan pare? Syempre papasukin '
'ko sa loob ng bahay.',
' ']},
363: {'dialect': 'tagalog',
'joke': ['NAWALANG BATA',
'Nanay : Oh! Anak kahit anu mangyari huwag kang bumitaw sa '
'pagkakapit sa palda ko.',
'Mahigit ng isang oras ng mapansin ng nanay na wala na ang '
'kanyang anak.',
'Nanay : Manong may nakita po ba kayong bata?',
'Sekyu : Ano po ba ang itsura.',
'Nanay : May dalang palda po.',
' ']},
364: {'dialect': 'tagalog',
'joke': ['YAYA NAGPAALAM SA AMO',
'Yaya : Ma’am magpapaalam po sana akong magbaksayon sa aming '
'probinsiya.',
'Amo : Oh sige, nakapagpaalam kana ba sa sir mo?',
'Yaya : Nauna na po siya, doon na lang daw po kami magkikita.',
' ']},
365: {'dialect': 'tagalog',
'joke': ['GLOBE',
'Lalaki : Miss, Globe ka ba?',
'Babae : Ay alam ko na yan, kasi ako lang ang mundo mo.',
'Lalaki : Makikitext lang ako! Tanga! ang landi mo.',
' ']},
366: {'dialect': 'tagalog',
'joke': ['<NAME>KE',
'Pedro : Tara pare, harlem shake tayo.',
'Juan : Ay!! Ayoko.',
'Pedro : bakit naman?',
'Juan : baka mahal eh, coke float na lang.',
' ']},
367: {'dialect': 'tagalog',
'joke': ['PIZZA TIME',
'Si Juan umorder ng pizza',
'Clerk : Sir ilang slice po ang gagawin naming sa pizza mo, 6 '
'or 8?',
'Juan : 6 slice na lang, baka hindi ko maubos kapag 8 kasi.',
' ']},
368: {'dialect': 'tagalog',
'joke': ['SIKAT NA SI MANNY PACQUIAO',
'Pedro : Alam mo pare sobrang sikat na talaga sa Manny '
'Pacquiao noh?',
'Juan : Bakit naman pare?',
'Pedro : bumili kasi ako ng brand new cellphone, may option na '
'send to many.',
'Juan : Ang bobo mo talaga, matagal nay an ngayon mo lang '
'napansin? Hindi naman nagrereply yan eh.',
' ']},
369: {'dialect': 'tagalog',
'joke': ['<NAME>',
'Bf : Kainis si Juan, mukha daw akong magsasaka kapag katabi '
'kita.',
'Gf : Hahaha, huwag muna pansinin,nag bibiro lang yun. Bakit '
'niya naman daw nasabi?',
'Bf : Kasi mukha ka daw kalabaw.',
' ']},
370: {'dialect': 'tagalog',
'joke': ['MILYONARYO',
'Teacher : Class imagine niyo na kayo ay MILYONARYO, isulat '
'niyo sa papel ang inyong mga activities.',
'Students : Yes Ma’am...',
'Teacher : Juan bakit hindi ka pa nagsusulat?',
'Juan : Ma’am, inaantay ko pa po ang secretary ko.',
' ']},
371: {'dialect': 'tagalog',
'joke': ['JUAN SA BUS',
'Juan : Nay alam niyo pinatayo ako ni itay sa bus kanina kasi '
'may pinaupo siyang babae.',
'Nanay : Anak magandang asal yun.',
'Juan : Kahit nakakandong po ako kay itay?',
' ']},
372: {'dialect': 'tagalog',
'joke': ['SAD STORY',
'Gf : Babe, mamaya na tayo magchat kasi ingungudngod na daw ni '
'papa ang mukha ko kapag hindi pa ako umalis dito.',
'Bf : Ayoko pa gusto pa kita kausap.',
'Gf : Babe mamaya na, baka ingudngod na ako dito ni tatay '
'sa...mawdjkhsh vldhfdeifcjcisjlo hsdhhdhcliosdhjklj '
'klhsdhldhkdshcjkdshd bbmbmbnmbbbbm bbbwbeakbdjbj '
'bmbmuuwa,mabm',
' ']},
373: {'dialect': 'tagalog',
'joke': ['<NAME> (pinoy jokes)',
'Dalawang magka officemate nag-uusap sa loob ng opisina.',
'Juan : Mauna na akong umuwi pare at gustong-gusto ko na '
'HUBARIN ANG PANTY ng misis ko.',
'Pedro : Wow! Pare, sobrang hot na hot ka ngayon ah.',
'Juan : Hindi noh! Sobrang masikip sa akin eh!',
' ']},
374: {'dialect': 'tagalog',
'joke': ['<NAME>',
'Dear Diary,',
'I’m so happy talaga. Nahuli kasi ako ng crush ko nakatingin '
'sa kanya. Minura niya ako, oh my gosh! Narinig ko na din ang '
'boses niya. Ang gwapo niya talaga, minsan nga tinulak niya '
'ako, dumugo ang ilong ko kasi sinadya ko siyang banggain, '
'nakakakilig diba? At least nagkadikit ang aming katawan. '
'Lumapit siya sa akin at humingi ng picture ko, ipapasalvage '
'niya daw ako, so sweet!! At ang pinaka the best pa, sabi ko '
'sa kanya “love you”, sagot niya “puck you”, shet na-horny '
'ako. Hahahaha',
' ']},
375: {'dialect': 'tagalog',
'joke': ['ANG MADRE AT ANG SAKRISTAN',
'Madre: Iho, anu ang apelyido mo?',
'Sakristan : Alam niyo na po yun sister, ang lagi niyo '
'hinahawakan.',
'Madre : Susmaryosep! BAYAG ba ang apelyido mo?',
'Sakristan : Sister naman, Rosario po.',
' ']},
376: {'dialect': 'tagalog',
'joke': ['BATANG MATIGAS ANG ULO',
'Isang araw may isang batang lalaki na ubod ng kulit. Kahit '
'Anung gawin ng ama hindi pa din ito tumitino. Sa sobrang '
'galit ng ama pinasok sa sako ang anak at binitin sa sanga ng '
'puno.',
'Bata : Tay!',
'Tatay : Ano? (naawa sa anak)',
'Bata : Taaaaayyy!!!',
'Tatay : (Lumapit sa sakong nakabitin) Ano??? Magbabago kana '
'ba?',
'Bata : Paki swing naman...',
' ']},
377: {'dialect': 'tagalog',
'joke': ['<NAME> SA MISTER',
'Misis : Hon, wala na akong bra, bilhan mo naman ako',
'Mister : Hon, huwag kana magbra kasi maliit lang naman ang '
'dede mo.',
'Misis : Eh! Bakit ikaw nagbebrief??',
' ']},
378: {'dialect': 'tagalog',
'joke': ['KURTINA',
'Tindera : Sir, bili na po kayo ng kurtina..',
'Juan : Ale, pabili nga ako ng isa, para sa computer ko.',
'Tindera : Sir, bakit po para sa computer niyo?',
'Juan : Ang computer ko kasi may windows eh!!',
' ']},
379: {'dialect': 'tagalog',
'joke': ['ANTIQUE VASE',
'Si Juan nakabasag ng malaking vase sa museum, nataranta ang '
'attendant.',
'Attendant : Naku po sir, more 50years na po ang vase na yan.',
'Juan : Hay! Salamat naman, akala ko kasi bago eh!!',
' ']},
380: {'dialect': 'tagalog',
'joke': ['BOOBS (pinoy jokes)',
'Misis : Sweety, may mga kaibigan ako na nagpa-enhance ng '
'boobs, gusto mo magpadagdag din ako?',
'Mister : Ewan ko, parang hindi ata bagay sayo ang tatlong '
'suso.',
' ']},
381: {'dialect': 'tagalog',
'joke': ['DALAWANG ESTUDYANTE SA LOOB NG CLASSROOM',
'Naiwan sa classroom ang dalawang estudyante (babae at lalaki)',
'Lalaki : Wala na ang mga classmate natin, tayo na lang ang '
'naiwan dito. Anu tara?',
'Babae : Anong tara?',
'Lalaki : Sus!! Anu bay an bilisan mo na!!',
'Babae : Ahhh, ganun! Bakit dito? Sige na nga! (nagmamadaling '
'nagtanggal ng uniporme). Tara na....',
'Lalaki : Ohhh!! Bakit ka nakahubad?? Tara uwi na din tayo!! '
'Tanga!!!',
' ']},
382: {'dialect': 'tagalog',
'joke': ['SEX WITH GHOSTS',
'Teacher : Sino sa inyo nakaexperience having sex with GHOSTS?',
'Itinaas ni Pedro ang kanyang kamay..',
'Teacher : Talaga!?? Ano feeling having sex with ghosts??',
'Pedro : Ay Putcha!!! Akala ko goats.',
' ']},
383: {'dialect': 'tagalog',
'joke': ['GF KINAPKAPAN NG BF',
'Isang gabi habang naglalakad sa park ang magsyota.',
'Girlfriend : Love, ihi muna ako ha.',
'Boyfriend : Dyan ka na lang sa damuhan umihi, madilim naman '
'eh.',
'Habang umiihi kinapkap ni boyfriend ang legs ni girlfriend, '
'nang may makapang mahaba sa gitna nito.',
'Boyfriend : Putcha!!! Bakla ka??',
'Girlfriend : Sira!!! Nagbago na ang isip ko..Tumatae na ako.',
' ']},
384: {'dialect': 'tagalog',
'joke': ['VAMPIRE IN RESTO (pinoy jokes)',
'Rich Vampire : Waiter, order ako ng fresh blood.',
'Ordinary Vampire : Sa akin isang order ng dinuguan.',
'Poor Vampire : Waiter, sa akin hot water na lang.',
'Waiter : Huh! Bakit hot water lang po?',
'Poor Vampire : Nakapulot kasi ako ng napkin sa kanto, '
'mag-tsaa na lang ako.',
' ']},
385: {'dialect': 'tagalog',
'joke': ['<NAME>',
'Misis : Love, malapit na tayong maging tatlo dito sa bahay.',
'Mister : Talaga love?? Magiging daddy na ako?',
'Misis : Hindi love, dito na titira nanay ko.',
' ']},
386: {'dialect': 'tagalog',
'joke': ['<NAME>',
'Girl : <NAME>',
'Boy : Ganun ba',
'Girl : May gusto ka ba sa akin?',
'Boy : Wala.',
'Umiyak si girl habang paalis, hinabol siya ni boy at niyakap.',
'Boy : Hindi mo pa nga tinatanung kung mahal kita.',
'Girl : (nabuhayan) Bakit? Mahal mo ba ako?',
'Boy : Hindi rin! Sige iyak kana ulit.',
' ']},
387: {'dialect': 'tagalog',
'joke': ['HULI KA!!!',
'Girl : Sweety, nasaan ka?',
'Boy : Dito lang sa haws sweety nagpapahinga, patulog na din. '
'Ikaw?',
'Girl : Nandito sa BAR, pinagmamasdan ka. Sige sayaw pa kayo '
'ng babae mo.',
' ']},
388: {'dialect': 'tagalog',
'joke': ['WRONG SEND',
'Juan : (tinext ang syota) Break na tayo!',
'Tikla : (reply kay Juan) Huh? Bakit love? Huhuhuhu',
'Juan : Ayyy!! Sorry love wrong send lang. Love you',
'Tikla : ayyy! Akala ko ako! Hehehe.. okay lang love, love u '
'more.',
' ']},
389: {'dialect': 'tagalog',
'joke': ['PICK UP LINE',
'Simon : Miss, talon ka na dyan!',
'Ana : Bakit naman?',
'Simon : Para mahulog ka sa akin',
'Ana : Ahhh!!! Ganun!!! Alam mo para kang manhole.',
'Simon : Naks!! Gumaganti ah. Bakit naman?',
'Ana : Tanga lang ang mahuhulog sayo.',
' ']},
390: {'dialect': 'tagalog',
'joke': ['ULAM SA LAMESA',
'Juan : Alam mo pare ang bait ng pusa namin.',
'Simon : Bakit naman pare??',
'Juan : Kahit na pinapabayaan lang namin ang ulam sa lamesa, '
'hindi ginagalaw.',
'Simon : Wow! Ang bait nga, ano ba ulam niyo?',
'Juan : Asin..',
' ']},
391: {'dialect': 'tagalog',
'joke': ['TRANSLATE TO ENGLISH',
'Teacher : Juan, itranslate mo ito sa English.',
'Juan : Wat ma’am?',
'Teacher : Ang uwak ay hinang hinang naglakad.',
'Juan : The wak wak..weak weak...wok wok...',
' ']},
392: {'dialect': 'tagalog',
'joke': ['MABAIT NA ANAK',
'Nanay : Mare, swerte ko sa anak kong lalaki, ang bait.',
'Kapitbahay : Naninigarilyo ba sya?',
'Nanay : Hindi, mare.',
'Kapitbahay : Umiinom ba sya ng alak?',
'Nanay : Hindi din.',
'Kapitbahay : Pero umuuwi siya ng late?',
'Nanay : Hindi eh.',
'Kapitbahay : Tama ka nga, ang bait ng anak mo. Ilan taon na '
'ba siya mare?',
'Nanay : Mag pitong buwan na siya bukas.',
' ']},
393: {'dialect': 'tagalog',
'joke': ['MASAKIT SA BUONG KATAWAN',
'Pasyente : Dok, kahit saan ako humawak masakit po.',
'Doktor : Anong ibig mo sabihin?',
'Pasyente : Kapag hinawakan ko ang balikat ko, ang sakit. '
'Kapag sa tuhod ko, araayyy ang sakit din, kahit pos a ulo ko '
'sobrang sakit.',
'Doktor : alam ko na kung ano ang problema, nabali ang kamay '
'mo.',
' ']},
394: {'dialect': 'tagalog',
'joke': ['INIWAN',
'Pedro : Ang sakit sakit tol!! Iniwan niya ako.',
'Juan : Bakit? Saan ba dapat kayo pupunta?',
' ']},
395: {'dialect': 'tagalog',
'joke': ['MASAKIT ANG MATA',
'Pasyente : Doctor sa tuwing iinom po ako ng kape, ang sakit '
'ng mata ko.',
'Doktor : Tanggalin mo muna ang kutsara sa tasa mo, bago ka '
'humigop.',
' ']},
396: {'dialect': 'tagalog',
'joke': ['UMIIYAK DAHIL SA ELEPANTE',
'Pedro : Bakit ka umiiyak?',
'Juan : Namatay kasi ang elepante.',
'Pedro : Bakit alaga mo ba yon?',
'Juan : Hindi, pero ako ang huhukay para sa libingan niya.',
' ']},
397: {'dialect': 'tagalog',
'joke': ['TAMAD MAG-ARAL',
'Nanay : Anak, kamusta ang first day mo sa school?',
'Anak : Ibig sabihin po nay, babalik pa ako bukas????',
' ']},
398: {'dialect': 'tagalog',
'joke': ['SAAN GALING ANG ASUKAL',
'Teacher : Juan, saan probinsya ka galing?',
'Juan : Negros po ma’am',
'Teacher : Anung produkto meron sa negros?',
'Juan : Hindi ko po alam.',
'Teacher : Siyempre alam mo yun, saan kayo kumukuha ng asukal?',
'Juan : Humihingi lang po kami sa kapitbahay.',
' ']},
399: {'dialect': 'tagalog',
'joke': ['GOOD NEWS AT BAD NEWS',
'Si Juan nakatanggap ng tawag galing sa doctor.',
'Doktor : Meron akong ibalita sayong good news at bad news.',
'Juan : Anu po ang good news?',
'Doktor : May 24 hours ka pa na mabuhay.',
'Juan : Huh!! Good news na ba yan? Anu po ang bad news.',
'Doktor : Ang bad news, nakalimutan kitang tawagan kahapon '
'tungkol dito.',
' ']},
400: {'dialect': 'tagalog',
'joke': ['MAPA NG PILIPINAS',
'Teacher : Jose, ituro mo sa mapa ang Pilipinas.',
'Jose : Ito po ma’am (sabay turo ng Pilipinas).',
'Teacher : magaling, mga bata sino ang nakatagpo ng Pilipinas?',
'Mga bata : Si Jose po ma’am.',
' ']},
401: {'dialect': 'tagalog',
'joke': ['TUMAMBLING SA SKOL',
'Anak : Tay, tumambling po ako sa skol.',
'Tatay : Di ba sinabi ko sayo na huwag kang tumambling kasi '
'makita panty mo.',
'Anak : Hindi naman eh!! Nilagay ko kaya sa bag ang panty ko.',
' ']},
402: {'dialect': 'tagalog',
'joke': ['PAUTANG',
'Juan : Tol pahiram naman dyan 200 pesos.',
'Jose : 100 pesos lang dala ko tol.',
'Juan : Ohh sige!! 100 pesos na lang muna, basta may utang ka '
'pa sa akin 100 pesos ha.',
' ']},
403: {'dialect': 'tagalog',
'joke': ['GUMAWA NG EKSENA',
'Nabalitaan ni Juan na may namatay sa kanto, dali dali siyang '
'pumunta pero hindi niya makita ang namatay dahil sa dami ng '
'tao. Gumawa si Juan ng eksena.',
'Juan: Tabi...tabi...tumabi kayo, kapatid ko ang namatay.',
'Agad na nagsitabi ang mga tao, hanggang sa nakita na ni Juan '
'ang namatay...',
'...isang duguang unggoy...',
' ']},
404: {'dialect': 'tagalog',
'joke': ['DUMALAW SI GMA',
'Isang araw dumalaw si GMA sa mental hospital, pagdating na '
'pagdating ni GMA pumalakpak agad ang mga pasyente maliban sa '
'isa na nasa sulok.',
'GMA : Anung nangyari sa isang pasyente bakit hindi siya '
'pumalakpak?',
'Doktor : Magaling na po kasi siya.',
'More top best pinoy jokes:']},
405: {'dialect': 'tagalog',
'joke': ["Binata: Tandaan mo ang mga sasabihin ko sa iyo ha. 'Wag mong "
'kalimutan, importante.',
'Dalada: Ah, bakit ano ba ang sasabihin mo?',
'Binata: Ah, mahal na mahal kita, lagi mong tandaan yan. '
'Andito lang ako para sa iyo. Dito lang ako sa tabi mo palagi.',
'Binata: Ano natandaan mo ba? ',
'Dalaga: (kinikilig) Oo naman...',
'Binata: Mabuti naman. Paki sabi yan sa best friend mo ha. '
'Salamat!']},
406: {'dialect': 'tagalog',
'joke': ["Pedro: Ah di ko po alam ma'am e. ",
'Guro: Ay sus, bata ka, simpleng English word lang di mo '
'alam... Ang aso may 4 nito at ako may 2 nito.',
'Pedro: Ahhh.... Dede?']},
407: {'dialect': 'tagalog',
'joke': ['Mare1: Alam mo mars kapag nakita ko ang patatas, naaalala ko '
'ang bayag ng mister ko.',
'Mare2: Ohh talaga!! Bakit kasinglaki ba ng patatas ang bayag '
'ng mister mo?',
'Mare1: Hindi mars, ganyan kalibag!!!',
' ']},
408: {'dialect': 'tagalog',
'joke': ['Teacher: Class, sino sa inyo nakakilala kay Jose Rizal? Ikaw '
'Pedro?',
'Pedro: Hindi po teacher.',
'Teacher: Eh! Ikaw Simon, kialla mob a si <NAME>?',
'Simon: Hindi din po Teacher.',
'Juan: Baka nasa kabilang section yan teacher!',
' ']},
409: {'dialect': 'tagalog',
'joke': ['Juan: Tao ba to?',
'Pedro: Hindi.',
'Juan: Lugar ba to?',
'Pedro: Hindi.',
'Juan: Bagay ba to?',
'Pedro: Oo..oo',
'Juan: Ginagamit sa loob ng bahay?',
'Pedro: Oo..oo',
'Juan: Makikita sa kusina?',
'Pedro: Oo..',
'Juan: Matalim bai to?',
'Pedro: Oo..oo',
'Juan: Ginagamit pang-hiwa sa sibuyas, bawang at mga gulay?',
'Pedro: Oo…oo…oo',
'Juan: Pass!!!',
'Toink…..',
' ']},
410: {'dialect': 'tagalog',
'joke': ['Pedro: Pabili ng apo…',
'Juan: Ano?',
'Pedro: Kendi ng apo.',
'Juan: Wala.',
'Pedro: May softdrinks kayo?',
'Juan: Wala!!! Nakita mong Ice for sale lang ang tinda naming, '
'kung anu-ano pa binibili mo. Tanga mo talaga..',
'Pedro: Ikaw ang tanga!! Alam mo naman na yelo lang pala tinda '
'niyo nagtanong kapa kung ano bibilhin ko.',
' ']},
411: {'dialect': 'tagalog',
'joke': ['Amo: Inday huwag mong pakialaman ang condom naming ni sir mo '
'ha!',
'Inday: Ma’am huwag po kayo mag-alala hindi po kami sanay '
'gumamit ni sir ng condom.',
'Ouch….',
' ']},
412: {'dialect': 'tagalog',
'joke': ['Pulis: Hoy! Umuwi kana lasing ka..',
'Lasing: Hindi pa ako lasing noh!!',
'Pulis: Kilala mob a kung sino ako??',
'Lasing: Oo naman, pulis ka. Oh di ba kilala kita. Eh ako '
'kilala mob a?',
'Pulis: Hindi..',
'Lasing: Ikaw ang umuwi, ikaw pala ang lasing eh…',
' ']},
413: {'dialect': 'tagalog',
'joke': [' Pedro: Grabe ang sakit tol, iniwan niya ako.',
'Juan: Huh! Bakit saan ba dapat kayo pupunta?',
' ']},
414: {'dialect': 'tagalog',
'joke': ['Pedro: Pre, bakit ka pala nakabraces?',
'Juan: Ah! Ba tol, pangait kasi ang ngipin ko kaya kailangan '
'ayusin.',
'Pedro: Bakit ngipin mo lang nakabraces? Hindi ang buong mukha '
'mo??',
' ']},
415: {'dialect': 'tagalog',
'joke': ['Juan: Pre, mauna na akong umuwi sayo at gusting-gusto ko nang '
'hubarin ang panty ng misis ko.',
'Pedro: Wow!!! Pare libog na libog kana noh!!!',
'Juan: Hindi pre, masyadong masikip kasi sa akin eh!!',
' ']},
416: {'dialect': 'tagalog',
'joke': ['Gf: Hindi na nagwo-work ang relationship natin, mas mabuting '
'pang maghiwalay na tayo.',
'Bf: Huwag mo akong iwan parang awa mon a..',
'Gf: Sorry, pero break na tayo.',
'Bf: Sige! Kapag iniwan mo ako maglalaslas ako.',
'Gf: Sus!!! Magpatuli nga hindi mo kaya, maglaslas pa kaya!!! ',
' ']},
417: {'dialect': 'tagalog',
'joke': ['Si Juan at ang Kumpare niyang Unano',
'Juan: Pre, bakit hindi ka pala lumaki?',
'Unano: Baby pa lang ako ng mamatay ang mga magulang ko.',
'Juan: Huh!!! Anong koneksyon non?',
'Unano: Pre naman, wag ka naman tanga. Natural walang '
'nagpalaki sa akin.',
' ']},
418: {'dialect': 'tagalog',
'joke': ['Balut Box',
'Juan: Tay, ano po ba ang balut box?',
'Tatay: Nak naman! Simple simple hindi mo Alam? Eh di lagayan '
'ng balut.',
' ']},
419: {'dialect': 'tagalog',
'joke': ['Love na Love ako ni Tatay',
'Pedro: Tay sino ang mas mahal niyo? Ako o si nanay?',
'Tatay: Syempre, ikaw anak!',
'Pedro: Sabi ko na nga ba ako eh!',
'Tatay: Bakit mo naman nahulaan anak?',
'Pedro: Tuwing madaling araw nilalagyan moa ko ng kumot. '
'Samantala si inay hinuhubaran niyo.',
' ']},
420: {'dialect': 'tagalog',
'joke': ['Bintana',
'Tanong: Bakit binubuksan ang bintana tuwing umaga?',
'Sagot: Natural na buksan kasi sarado. Magulat ka kung bukas '
'na tapos buksan pa!',
' ']},
421: {'dialect': 'tagalog',
'joke': ['Marka sa Grado',
'Tatay: Nak, ano ang ibig sabihin ng “F” sa card mo?',
'Anak: fasado yan tay!',
'Tatay: Ah! Akala ko Ferpect eh!!',
' ']},
422: {'dialect': 'tagalog',
'joke': ['Sino si <NAME>',
'Teacher: Juan, kilala mo ba si j<NAME>al?',
'Juan: Hindi po mam.',
'Teacher: Ikaw! Pedro kilala mob a si <NAME>?',
'Hindi din po mam eh.',
'Teacher: Simon, siguro naman kilala mo si <NAME>.',
'Simon: Naku! Mam hindi din po eh. Baka nasa kabilang seksyon '
'po siya mam.',
' ']},
423: {'dialect': 'tagalog',
'joke': ['Ano ang Gagawin kapag Milyonaryo',
'Mam: Class, isipin niyo isa kayong milyonaryo. Isulat niyo '
'ang mga gagawin niyo kapag sobrang yaman kayo.',
'Nagsimula ng magsulat ang mga estudyante, maliban kay Juan na '
'nakatunganga pa din.',
'Mam: Juan, bakit hindi ka nagsusulat?',
'Juan: Inaantay ko pa kasi mam ang secretary ko.',
' ']},
424: {'dialect': 'tagalog',
'joke': ['Nanay Pinatawag sa Skul',
'Simon: Nay, punta ka daw sa skul bukas.',
'Nanay: Huh! Ano na naman bang kalokohan ang ginawa mo doon?',
'Simon: Ano??? Bakit ako tanungin mo? Baka ikaw, kasi ikaw ang '
'pinatawag eh!',
' ']},
425: {'dialect': 'tagalog',
'joke': ['Pasahero sa Loob ng Dyip',
'Juan: Kuya magkano po ang pamasahe?',
'Tsuper: Otso pesos.',
'Juan: Naku patay limang piso lang ang pera ko dito. Ano kaya '
'dapat ko gawin? ( Sa loob loob ni Juan, halatang kinakabahan '
'na).',
'Maya-maya napansin ni Juan na duling pala ang drayber.',
'Juan: Aha! Alam ko. Ibibigay ko sa kanya ang apat nap iso '
'kasi sigurado doble ang bilang niya. ( Tuwang-tuwa saad ni '
'Juan sa sarili).',
'Ibinayad n ani Juan ang apat nap iso.',
'Drayber: Boos, kulang ang pamasahe mo.',
'Juan: Ha? Paano kulang? Ots pesos naman yan ah.',
'Drayber: Otso pesos nga. Dalawa kayo eh!!!',
' ']},
426: {'dialect': 'tagalog',
'joke': ['Late na naman si Pedro',
'Titser: Aba! Pedro anong oras na. Lagi ka na lang huli sa '
'klase.',
'Pedro: Pasensya na po mam, trapik po kasi.',
'Titser: Feeling mo kasi ang tali-talino mo. Sino ang '
'Pambansang Bayani natin, sige nga?',
'Pedro: Si Jose Rizal po.',
'Titser: Himala! Nakatsamba ka ah.',
'Pedro: Ikaw mam, kilala mo ba si '
'Tanya? ',
'Titser: Hindi, sino yon?',
'Pedro: Yan puro ka kasi turo at aral. Kabit yon ng asawa mo!',
' ']},
427: {'dialect': 'tagalog',
'joke': ['Unang Makasagot, Unang Makauwi',
'Titser: Kung sinuman sa inyo ang unang makasagot sa tanong '
'ko. Maaari ng umuwi.',
'Bigla tinapon ni Juan ang kanyang bag sa labas ng pinto.',
'Titser: Kaninong bag yon?',
'Juan: Sa akin po mam. Bye mam, bye klasmeyt. See you '
'tomorrow.',
' ']},
428: {'dialect': 'tagalog',
'joke': ['Teacher at ang Nanay',
'Titser: Misis, pinatawag ko kayo kasi salbahe ang anak niyo.',
'Misis: Alam niyo po, salbahe din yan sa bahay. Pero kahit '
'kelan hindi ko naman kayo pinatawag.',
' ']},
429: {'dialect': 'tagalog',
'joke': ['Natinik si Pedro',
'Pedro: Nay natinik po ako.',
'Nanay: Ano?? Itong saging kainin mo.',
'Pedro: Nay, ganun pa din po eh. Ayaw matanggal po.',
'Nanay: Oh! Eto pa ang saging, ubusin mo yan.',
'Pedro: Andyan pa rin nay, kumakapit pa din.',
'Nanay: Halika nga dito. Ngumanga ka para masilip ko.',
'Pedro: Dito po nay sa paa ko!',
' ']},
430: {'dialect': 'tagalog',
'joke': ['Ang Lamok',
'Ben: Tay! Ang daming lamok',
'Tatay: Patayin mo ang ilaw para hindi ka makita.',
'Pinatay ni Ben ang ilaw. Bigla naman dumating ang mga '
'alitaptap.',
'Ben: Tay!!! Andyan na naman ang mga lamok may dalang '
'flashlight.',
' ']},
431: {'dialect': 'tagalog',
'joke': ['Huli sa Balita',
'Misis: Walanghiya ka! May kabit ka pala na 18 years old? Ang '
'kapal din ng mukha mo.',
'Mister: Naku! Huli kana sa balita, 25 years old na siya '
'ngayon.',
' ']},
432: {'dialect': 'tagalog',
'joke': ['Pari o Judge',
'Pedro: Ana, oras na ikinasal tayo. Saan mo gusto sa pari o sa '
'judge.',
'Ana: Ay ang ingot mo naman Pedro. Syempre sayo, bakit irereto '
'mo pa ako sa iba???',
' ']},
433: {'dialect': 'tagalog',
'joke': ['Sa work..',
'<NAME> ako',
'Tamarind',
'Gusto mo ba ng tamarind?',
'Ayoko baka tamarind ako !']},
434: {'dialect': 'tagalog',
'joke': ['saan nakakabili ng lip stick joke',
'boy sablay:Saan ba nakakabili ng lipstick?',
'girl:Sa HBC try mo',
'boy sablay:Ano HBC, di ba bangko yun? yung may mga credit '
'cards',
'girl: HBC pre, HBC hinde HSBC. HSBC bangko nga yun',
'boy sablay: Ahhh.. nagtitinda na din pala ng lipstick dun. '
'Akala ko nagdedeliver lang sila sa LBC',
'girl: grrr']},
435: {'dialect': 'tagalog',
'joke': ['Saan lumiliko ang spacehip?', 'Sa universe ']},
436: {'dialect': 'tagalog',
'joke': ['Magkano ang Kilo',
'Lalaki: Ate magakano po sa lansones',
'Tindera: Trenta ang kalahating kilo',
'Lalaki: Pwede bang 30 na lang ang isang kilo',
'Tindera: kuya siguro makasalanan kayo',
'Lalaki: huh, bakit naman? (medyo galit)',
'Tindera: Eh ang laki nyo kasi humingi ng tawad eh. ']},
437: {'dialect': 'tagalog',
'joke': ['[Baliw tumawag sa mental]',
': Hello, may tao pa po ba sa ROOM 1043?',
': Wala na, bakit?',
': Wala naman, chineck ko lang kung nakatakas talaga ko. ',
': sh*t']},
438: {'dialect': 'tagalog',
'joke': ['*Naharangan yung Blackboard* ',
'America: Excuse me.',
'Philippines: Tanginang ulo yan.']},
439: {'dialect': 'tagalog',
'joke': ['Chechekan daw yung test paper pero pagbalik sakin puro ekis '
'parang tanga lang e.']},
440: {'dialect': 'tagalog',
'joke': ['Mama: Nak, may facebook na ako',
'Me:HAHAHA edi congrats ma',
'Mama: Iaccept moko para friends tayo at makita ko mga post mo',
'Me: WTF',
'Mama: Anong WTF?',
'Me: Welcome To Facebook ma.']},
441: {'dialect': 'tagalog',
'joke': ['Nakita ni pedro si juan sa gilid ng kalsada na may '
'streetlight.',
'Pedro: juan anong ginagawa mo jan?',
'Juan: hinahanap ko kasi yung nahulog kong pera.',
'Pedro: saan ba nahulog?',
'Juan: dun sa madilim.',
'Pedro: e bat ka jan naghahanap?',
'Juan: e kasi dto maliwanag! Kung dun ako maghahanap madilim e '
'kaysa dto may ilaw.']},
442: {'dialect': 'tagalog',
'joke': ['Pedro: Pare kamusta na kayo ng gf mo?',
'Juan: Yun Last month pinakilala ko sya sa lolo kong '
'milyonaryo',
'Pedro: Oh boto ba nman lolo mo?',
'Juan: Oo, Ayun Lola kona sya ngayon ']},
443: {'dialect': 'tagalog',
'joke': ['PEDRO: Kapag natulog ba ako sa tabi ng misis mo, mag kumpare '
'pa rin tayo?',
'JUAN: Hmmm...Hindi.',
'PEDRO: Mag kaaway na?',
'JUAN: Hindi din.',
'PEDRO: Eh, ano na?',
'JUAN: Quits na tayo!']},
444: {'dialect': 'tagalog',
'joke': ['Nasa restaurant',
'Waiter: Are you done, sir?',
"Ako: No, i'm a singer."]},
445: {'dialect': 'tagalog',
'joke': ['BARBER SHOP',
'Barbero : Sir, anong klase gupit po?',
'Lalaki : Yung uka-uka, masagwa at hindi pantay.',
'Barbero : Sir anu po yun? Hindo ko alam yun.',
'Lalaki : Anung hindi, ganun ang ginupit mo sa akin last '
'time!!!']},
446: {'dialect': 'tagalog',
'joke': ['Nasa restaurant',
'Waiter: Are you done, sir?',
"Ako: No, i'm a singer."]},
447: {'dialect': 'tagalog',
'joke': ['Girl: May sasabihin ako sayo babe.',
'Boy: ano yun babe ?',
'Girl: Christian ako dati.',
'Boy: okay lang babe , kahit anong relihiyon mo.',
'Girl : Ano ? hindi pangalan ko yun dati.']},
448: {'dialect': 'tagalog',
'joke': ['Juan: Mga kaibigan mag ingat kayo sa pag kain ng balot!',
'Pedro: Bakit naman?',
'Juan: kasi yung kaibigan ko nung kumain ng balot nabulag',
'Pedro: Nabulag?!',
'Juan: oo!',
'Pedro: paano nangyari yon? ikwento mo nga.',
'Juan: ganito kasi yon, bumili siya ng balot.',
'Pedro: o bumili siya.',
'Juan: binasag niya.',
'Pedro: binasag.',
'Juan: hinigop ung sabaw',
'Pedro: hinigop',
'Juan: sinilip,tinuka ayon bulag']},
449: {'dialect': 'tagalog',
'joke': ['HOLDAPAN',
'Parent: magkano ba ang kailangan nyo para maibalik lang ang '
'anak ko??',
'Holdaper: kahit ilan',
'Parent: 500,000 pesos',
'Holdaper: hindi pwede..dapat may million..',
'Parent: kalahating MILLION',
'Holdaper: sige pwede na.']},
450: {'dialect': 'tagalog',
'joke': ['Boy: password ka ba?',
'Girl: alam ko na yan kasi Hindi mo ako makakalimutan ?',
'Boy : Mali.. Kasi papalitan na kita']},
451: {'dialect': 'tagalog',
'joke': ['KAMATAYAN: hawakan mo ang kamay ko',
'BOY: ayoko, Alam kung ikaw si kamatayan, and i know that if i '
"touch your hand I'll die ",
'KAMATAYAN: wow ang talino mo maman!',
'BOY: ako pa !',
'KAMATAYAN: apir !',
'BOY: apir !']},
452: {'dialect': 'bisaya',
'joke': ['Sa sementeryo usa ka gabie…',
'Gard: sus! Maryusep..abi kog kalag ka. unsa imong gitiltil sa '
'lapida?',
'Nagtiltil: Ang amaw! wrong spelling ako pangalan.',
'Ang kalag sa gard nidagan sa kahadlok!']},
453: {'dialect': 'bisaya',
'joke': ['Unsay may eningles sa saging ma.',
'Mitubag ang inahan,” ardaba”',
'Mitubag ang Bata, ” dili uy ingon ni Mam.',
'Unsa man?',
'Banana tubag sa Bata',
'Ang tinuod Dong ang banana kanang hinog nga saging.']},
454: {'dialect': 'bisaya',
'joke': ['Boy: Bindesyoni ko padre kai ako nakasala',
'Padre: Unsa mai imong sala?',
'Boy: Nanglili ko padre',
'Padre: Hah!! unya pagkahuman??',
'Boy: Naghubo siya sa iyang blouse padre. puti kaayo padre. '
'unya naghukas syaa sa iyang b**.. uhhmm. dako kaayo og b***s '
'padre. unya iya n pod gihubo ang p***y……………..',
'Padre: Hah!!! (excited kaayo) unya naunsa man?????',
'Boy: Na ningkalit man og brown out padre..',
'Padre: Letse lage ning meralco…..']},
455: {'dialect': 'bisaya',
'joke': ['Babae: nong! Sakay ko!',
'Driver: cge! Asa man ka?',
'Babae: diha lang sa kanto! Naay bayad ang Bata?',
'Driver: ay libre lang kay duol man.',
'Babae: ah, ang mosabak naay bayad?',
'Driver: wala gihapon!',
'Babae: sige anak! Sabaka ko…']},
456: {'dialect': 'bisaya',
'joke': ['Asawa: Nganong gidala man nimo dre sa balay kanang trabaho '
'nimo',
'Bana: Rush man gud ni pangga mao nga gidala ko dre sa balay',
'Asawa: A pisti ba uy ! ikaw ray embalsamador nga nagdala ug '
'trabaho sa balay']},
457: {'dialect': 'bisaya',
'joke': ['Pasahero: n0y! dha ra [textspelling] ihun0ng tapad anang iro!',
'Driver: ok sir!',
'Pasahero: wla mn lage ka ninghun0ng n0y?',
'Driver: Unsa0n pghun0ng ngcge mn nglakaw ang iro!']},
458: {'dialect': 'bisaya',
'joke': ['Blood donation',
'(Mag-uyab ng-away)',
'BF: Wala kay utang kabubot-on, abi kay naulian nka human ka '
'naaksidente, ngdonate pa ko ug dugo nimo pra lng mabuhi ka.. '
'Unya kron kusog nka mangaway. Bawi-on nko tong akong dugo '
'gidonate beh..',
'GF: I-uli lagih nko pero installment lang.. O, dawata ning '
'Napkin, sakto ky gidugo ko kron….. Modess pa rba na.']},
459: {'dialect': 'bisaya',
'joke': ['Bana: Magbuwag ta?!Pastilan wala tay anak…',
'Asawa: O, ako pay hadlokon!',
'Bana: Sige tungaon nato ang APLIANCES',
'Imo ning NIGO ako ning KAGURAN…']},
460: {'dialect': 'bisaya',
'joke': ['Once there was a fr0g.',
'He jumped into the lake.',
'nlang0y',
'nilang0y palau..',
'layo na kaau ang baki..',
'ma0 2..',
'babay baki.. !',
'hehe']},
461: {'dialect': 'bisaya',
'joke': ['Mag-asawa nag-away…',
'(suko kaayo ang bana)',
'Bana: Wla nako kasabot aning kahimtanga, pag-uli nako gikan '
'sa trabaho walay linung-ag, walay hinlo ang balay, sige lang '
'ka panirang sa silingan.',
'Asawa: Sori na gud luv.',
'Bana: Ah! Di na matabang ug sorisori. Di na nako kaya! Mypa '
'mgbulag ta, dad-on nako ning duha nato ka anak.',
'(nakagawas na sa balay ang bana ug duha kaanak nila, '
'hingkalit ug singgit ang asawa…)',
'Asawa: Hoy! Imo man ng gihurot ug dala ng duha kaBata, nga '
'isa ra may imo ana.']},
462: {'dialect': 'bisaya',
'joke': ['Anak: Ma, 18th b-day na nko ugma, December 15. !, himo-e baya '
'ko ug message ha, kanang makahilak ko.',
'(Pagka-ugma)',
'Mama: Nak! adopted ra bya ka..hapi brthday!',
'ning-tuwad ug hilak uy!']},
463: {'dialect': 'bisaya',
'joke': ['Bata: Nay, unsay atong sud-an?',
'Inahan: Christmas tree ug lansang, Dong.',
'Bata: Ha, christmas tree ug lansang?',
'Inahan: Kamunggay ba, nga gisubakan ug buwad bulinaw. Meri '
'krismas nak!']},
464: {'dialect': 'bisaya',
'joke': ['Mama: (knock knock)',
'Anak: (excited kaayo) Basi si Santa Klaus na ni! Who’s there?',
'Mama: Mama nimo!',
'Anak: Mama nimo who?',
'Mama: Leche! Ablihi ko uy! Pa-who who pa ka diha!! Bunalan '
'tika ron!']},
465: {'dialect': 'bisaya',
'joke': ['Inday1: day, kuyugi ko be kay mangita kog ka chat karon sa '
'internetan. Hapit na human 2011 wa pako uyab nga merkano.',
'Inday2: manglaba paman ko unya day oie..',
'Inday1: koyugi ko kay mas kamao man ka mo english kay sa '
'nako.']},
466: {'dialect': 'bisaya',
'joke': ['Sa internetan…',
'Inday1: Kani, dai makadagit na jud kog amerikano karon',
'Inday2: Cegeg tabi oi, ingni na kuno na ug hi..',
'Inday1:(ni chat sa amerikano) Hi',
'Inday2: Nawa mo baws mana ug hello',
'Amerikano: Hello beautiful, can i see your tits?',
'Inday1: Biliba gyud nimo day oi, kabalo man ka mo ingon sya '
'ug hello, nya ingon pa jud gwapa ko, pero usa maning tits '
'day?',
'Inday2: Ai mo tan. Aw daw sya sa imong ngipon, bogo.a sad '
'anang amerikanoha oie..di kabalo mo spelling. Plural na gani '
'nang tit butangan pa jud og s..']},
467: {'dialect': 'bisaya',
'joke': ['Sa usa ka krismas party sa kanto…',
'Juan: Se, tikasan ka da, sige lang ka imoha. Ako na poy '
'tagayi.',
'Jose: O sige, para patas ta. Imoha ning baso. Akoa ning '
'pitser!']},
468: {'dialect': 'bisaya',
'joke': ['Pinoy sakay sa eroplano para mag krismas tour sa Europe may '
'kaabay siya nga duha ka Asian, Japon ug Insik. Sa ilang '
'biyahe nagkaila silang tulo, naghisgot sila sa mga bag-ong '
'teknolohiya nga gikan sa ilang nasod. bisaya, bisdak, '
'binisaya, bisaya jokes',
'Matod pas Japon – “kining akong ball pen camera ni”,',
'Ingon sab ang Insik – “kini sab ako sing-sing alarm watch ni,',
'Sus, kay ang Pinoy na-atol man sab nga nagda-ut iyang tiyan '
'ug nangutot ug may turbo sound pa gyod [purrrrorrrrrot].',
'Nakurat Japon ug Insik ug nagdungan pag-ingon – “unsa man '
'ron?” Tubag pas Pinoy – “mao toy bag-ong teknolohiya nga '
'bag-ong produkto sa Pilipinas ang paks masin na-a sa sulod sa '
'human body.”']},
469: {'dialect': 'bisaya',
'joke': ['Exam sadto nga adlaw sa isa ke pre-school…',
'Teacher: Leofil, what are the different parts of a tree?',
'Pupill: Ma’am leaf, uhm… fruit…trunk, roots…',
'Teacher: One last part? What is it?',
'Pupil: (daw indi makadumdum) uhmm… Ano gani man???',
'Teacher: Ok.gaan ta ka clue ha sang beginning sound.It begins '
'with [b]…The teacher pronounces the beginning letter b but '
'still the pupil couldn’t get it. So the teacher continued by '
'saying the consonant cluster ” br”. But still there was no '
'answer from the child. So the teacher proceeded …” [brrr…a..] '
'(the sound of a is slightly pronounced as in short vowel/shwa '
'sound.',
'Pupil: (Laughs maliciously covering his mouth)He he he… Miss '
'ang kahoy may bra?',
'Teacher: (Amazed and confused but disclosing her laughter). '
'No… it’s not that. The other part of the tree is branch.']},
470: {'dialect': 'bisaya',
'joke': ['Gikarga ni Juan ang baboy likod sa jeep dipsayroan ug dayon '
'lingkod sa front sit..',
'Pag abot sa iyang destinasyon, dayon kining plete sa '
'conduktor..',
'Conduktor; Juan, kulang man ning imong plete?',
'Juan: (tingala ug dala kasuko mood)… unsay kulang, pagtarong '
'dha ha?',
'Conduktor: Kulang lagi Juan, apil bya ang baBoy pletehan…',
'Juan: (tingala ug dala kasuko mood napud)… ha? unsay apil '
'pletehan ang baBoy?.. sus, kung kabalo pa lang ko nga apil '
'pletehan ang baBoy, mypa ako nalang nang gipaFRONT SIT..',
'hahahahahahaha…']},
471: {'dialect': 'bisaya',
'joke': ['Usa ka adlaw ni reklamo ang iyang amahan sa iyang '
'anak..Tungod sa tanum:',
'Amahan: Anak pag ka daghan ba nimong gi tanum sa atubangan sa '
'atong balay..dili man ta farmville nak..',
'Anak: Eh..di mani farmville tay..gud..',
'Amahan: Unya..unsa man diay ni??…',
'Anak: ehehehe…Tay..” A HUGE WAVE OF ZOMBIES ARE '
'APPROACHING..”']},
472: {'dialect': 'bisaya',
'joke': ['Genie: Ihatag nako usa sa imong mga wishes!',
'Aling Dionisia: Ay Diay! Sige, gusto nako ma byutipol ko.',
'Genie: Abrehi palihug ang botilya?',
'Aling Dionisia: ug ma byutipol nako? Aaaaaaaaaaaaaaay!',
'Genie: Di. Mubalik na lang ko sulod sa botilya.']},
473: {'dialect': 'bisaya',
'joke': ['Manny: ngutana titser, ngano daw ang eggplant walay egg?',
'Aling Dionisia: Ingna imo titser ha nga kung dunay egg, turta '
'na. TURTA!']},
474: {'dialect': 'bisaya',
'joke': ['Reporter: Manny ngayon panalo ka na naman, anong pasalubong '
'mo kay jinky?',
'Manny: syempre ibon. Mahilig sya doon.',
'Reporter: Ibon? Anong klaseng ibon?',
'Manny: Yung mga lipstek, pangmik up ba. Mga ibon prodaks yo '
'know.']},
475: {'dialect': 'bisaya',
'joke': ['Dionisia: Doktor gusto magpabutang ug brest.',
'Doctor: (nakuratan) Magpaseksi ka na?',
'Dionisia: Brest sa ngipon ba. Para magnindot akong ngipon! De '
'ba uso na karon?']},
476: {'dialect': 'bisaya',
'joke': ['Pacquiao: Wala, pildi ka na maskin unsa pay imong buhaton!',
'Hatton: Panindutay tag inahan!',
'Pacquiao: aaaaaaaaaaaaaah way ing-anaay! I mean yo now…']},
477: {'dialect': 'bisaya',
'joke': ['Jinky: Kung manganak napud ko, unsay ipangan nato sa atong '
'anak?',
'Manny: Aaaaaah eh di ikumbayn nato atong ngalan! '
'….”MANKY”…..']},
478: {'dialect': 'bisaya',
'joke': ['Si Kurdapya ug ang iyang mga amiga nangadto sa bar…',
'dayon kita siya sa sign na nakabutang:',
'below 18 not allowed',
'ingun si kurdapya ngehhhh……uli nalang ta oiiiii,,,,,,,,,,,',
'10 raman ta kabook,,,,,,,,,,,,,,,']},
479: {'dialect': 'bisaya',
'joke': ['Katabang: padre, ge texan ko sa akong amo nga naa ron sa '
'abroad, nga pamisahan kuno ang ilang iro nga namatay,',
'Pari: inday, tawo ra intawon ang misahan, walay labot ang '
'iro! naboang na sila?',
'Katabang: na! unya unsaon ta man ni rong gipadala nga $10,000 '
'para sa misa?',
'Pari: aw! wala manka moingon nga katoliko diay ning iro!,, '
'dad a dire!']},
480: {'dialect': 'bisaya',
'joke': ['Different color of banana',
'Teacher: Class what are the different colors of bananas?',
'Boknoy: Green, yellow, pula og brown mam!',
'Teacher: Boang! naa ba diay brown ba saging?',
'Boknoy: Bogok man ka mam? ang nilung ag diay peke na?']},
481: {'dialect': 'bisaya',
'joke': ['Toknoy: Noy, unsa manang sulat na imong gi-basa,',
'Nonoy: Gikan ni sa akong tatay Boknoy,',
'Toknoy: Basaha kuno,',
'Nonoy: Dear Nonoy, gi sulat ko kini ug lunes nahuman pagka '
'martes, gipadala ko ni pagka myerkules arun madawat nimo inig '
'ka huwebes, maski pagka byernes arun mabasa ko nimu inig ka '
'sabado, imong amahan dominggo.']},
482: {'dialect': 'bisaya',
'joke': ['Mama: Anak, mao nang ako kang gi hampak kay mahal ka naku,',
'Anak: Ana diay na nay?(gi sagpa ang mama)',
'Mama: Way Batasan ning Bataa, ngano imo man kong gi sagpa?',
'Anak: I love you to nay! hehehhehehe']},
483: {'dialect': 'bisaya',
'joke': ['Interviewer: unsay imong buhaton pag mata nimu na naay usa ka '
'million dollar?',
'Kano: go on a world tour………..',
'Hapon: put up a business……….',
'Pinoy: tulog napud para mahimong $2M!! 🙂']},
484: {'dialect': 'bisaya',
'joke': ['Amerkano: Is this house for rent?',
'Guard: Tinuod gyud ka mao ni ang balay ni poren,',
'Amerkano: What did you say?',
'Guard: kana si jose igsoon na ni poren pero una siyang '
'namatay,',
'Amerkano: Are you foolish?',
'Guard: nah! nayabag na ni ron, security ra ta himoon tang '
'pulis,',
'Amekano: tommorow i’ll be back,',
'Guard: tinood jud ka ang gikamatyan ni poren tumor og hubak! '
'ehehehehe']},
485: {'dialect': 'bisaya',
'joke': ['Boy: Miss pwd mangutana?.unsa ng orasa?',
'Girl: Mangutana ka unsa nang orasa? Unya mangutana ka sa '
'ak0ng ngalan, unya mangayo ka sa ak0ng #, unya manguyab ka, '
'unya after 1 m0nth sugt0n taka, unya manghagad ka og date, '
'unya dad-on day0n ko nim0 bsan asa, unya naay mahitabo nato '
'mabuntis day0n ko!.unya pakasal ta, unya kulatah0n ko '
'nim0!.di ko m0hatag ui!',
'Boy: Ahhhhh!.ka advance ba anang RELOHA!']},
486: {'dialect': 'bisaya',
'joke': ['Boy: Nganong di man ka nko? bati ba ko ug dagway?',
'Girl: D man. gwapo man ka, GKAN LAPALAPA HANGTOD LIOG!!']},
487: {'dialect': 'bisaya',
'joke': ['Boy: Maypa ID na lang ka Miss!',
'Girl: (natingala mode) Ngano man?',
'Boy: Para kung mawala ko, kahibaw sila nga aku ka.']},
488: {'dialect': 'bisaya',
'joke': ['Boy: Mura man ka ug kabayo Miss?!!!',
'Girl: (angry mode) Ha?!!! Kay ngano man?!!',
'Boy: Kada makakita ko nimo, mu-tigidig ang heart nako!']},
489: {'dialect': 'bisaya',
'joke': ['Boy: Mura man kag kaldero bayota ka!!!',
'Bayot: Ngano man?!!!',
'Boy: Lagum kaayo ka ug lubot!!!']},
490: {'dialect': 'bisaya',
'joke': ['Boy: Mura man ka ug ampalaya Miss…',
'Girl: Ngano man?!!!',
'Boy: Bisag pait kaayo ka, ikaw gihapon gahatag ug sustansiya '
'sa akong kinabuhi!']},
491: {'dialect': 'bisaya',
'joke': ['Tatay: Pangayo ug asin sa pikas nga balay',
'Anak: Oo tay…..',
'Tatay: Kadugay man lageh nimo.',
'Anak: Amaw man diay ka tay..nalibot najud nko tibuok baryo '
'wala man ko kita ug pikas nga balay pulos man jud tibuok…']},
492: {'dialect': 'bisaya',
'joke': ['nak: Tay, herbal ang makahiya?',
'Tatay: Oo, herbal na nak.',
'Anak: Sa asa man na nga tambal tay?',
'Tatay: Tambal na sa mga Baga’g nawong…. 😉']},
493: {'dialect': 'bisaya',
'joke': ['In a job interview the employer ask the applicant…',
'Employer: What can you contribute to our company?',
'Applicant: Aw naa diay amutan sir?']},
494: {'dialect': 'bisaya',
'joke': ['Tatay: Dong, sak-a ang mangga ug hikapa kung hinog na ba.',
'Anak: (nisaka) Tay, hinog na!',
'Tatay: Na hala, kanaog na diha kay atong sunkiton.']},
495: {'dialect': 'bisaya',
'joke': ['Girl: I nid space!',
'Boy: Cge, irog ko gamay aron naay space.',
'Girl: I mean, magbuwag ta!',
'Boy: Cge, diha ka agi sa left, dri ko sa right.',
'Girl: Si mama nako di ganahan nimo.',
'Boy: Labaw pakong di ganahan niya, di ko mupatol ug tiguwang '
'oy.',
'Girl: Hahay, BREAK nata ba!',
'Boy: Ah maypa kay gutom nasad ko. 🙂',
'Nah, wa juy buwag mahitabo. 😀']},
496: {'dialect': 'bisaya',
'joke': ['Duha ka irong buang nag-istoryahanay:',
'Iro 1: Brad, tinuod ba nga ang laway nato naa’y RABIES ug '
'makamatay?',
'Iro 2: O, ngano diay? Unsa’y problema?',
'Iro 1: Natulon man gud nako. Nakulbaan ko. (^_^)']},
497: {'dialect': 'bisaya',
'joke': ['Pupil 1: Bay, nakahimo kag assignment?',
'Pupil 2: Wala lage. Papel ra ako gi-pass kay lisod. Ikaw?',
'Pupil 1: Nah! Papel ra pud akong gi-pass.',
'Pupil 2: Hala! Ingnon na pud ta ani nga nagkinupyahanay! '
'Tsk!']},
498: {'dialect': 'bisaya',
'joke': ['Teacher: Class if basura ko asa man ko ninyo ilabay, '
'nabubulok o di nabubulok?',
'Dingdong: Ay sa di nabubulok ma’am.',
'Teacher: Ngano man DD?',
'Dingdong: Ay, alangan kay plastic man ka ma’am.']},
499: {'dialect': 'bisaya',
'joke': ['Mama: Nak, nganung ngkabulingit man ka?',
'Anak: kabantay ka anang kanal dri ma?',
'Mama: Oo nak, kbantay ko. Nganu man?',
'Anak: ako wla.']},
500: {'dialect': 'bisaya',
'joke': ['Boy: Nganu mura man kag nhadlok?',
'Girl: Hapit kako ma rape diha sa unahan, may gani koy kwarta.',
'Boy: Unya emu na lang gihatag imung kwarta?',
'Girl: Wala ui ng hotel mi. lain sad kau diha rmmi sa '
'daplin.']},
501: {'dialect': 'bisaya',
'joke': ['Judge: Pedro, unsa man jud ang nahitabo?',
'Pedro: (wala ni tingog)',
'Judge: Tubaga ang question!',
'Pedro: Ingon hearing lang ni! Ngano naay Speaking?']}}
| 2.15625
| 2
|
raids/urls.py
|
SvenStahlmann/Early-Bird
| 0
|
12775819
|
from django.urls import path
from raids import views
urlpatterns = [
path('encounter', views.encounter, name='raids_encounter'),
path('dispatch', views.dispatch_loot_system, name='raids_dispatch'),
path('search', views.search, name='raids_search'),
# Ajax
path('ajax/autocomplete', views.ajax_autocomplete_search, name='raids_autocomplete_search'),
]
| 1.53125
| 2
|
scopes/assignment_operator.py
|
padmacho/pythontutorial
| 0
|
12775820
|
a = 0
def fun1():
print("fun1: a=", a)
def fun2():
a = 10 # By default, the assignment statement creates variables in the local scope
print("fun2: a=", a)
def fun3():
global a # refer global variable
a = 5
print("fun3: a=", a)
fun1()
fun2()
fun1()
fun3()
fun1()
| 4.15625
| 4
|
doc/source/user/plots/matplotlib3.py
|
leonarduschen/numpy
| 5
|
12775821
|
<gh_stars>1-10
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = Axes3D(fig)
X = np.arange(-5, 5, 0.15)
Y = np.arange(-5, 5, 0.15)
X, Y = np.meshgrid(X, Y)
R = np.sqrt(X**2 + Y**2)
Z = np.sin(R)
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap='viridis')
plt.show()
| 2.640625
| 3
|
osg_configure/configure_modules/siteinformation.py
|
mmascher/osg-configure
| 0
|
12775822
|
""" Module to handle attributes related to the site location and details """
import re
import logging
from osg_configure.modules import utilities
from osg_configure.modules import configfile
from osg_configure.modules import validation
from osg_configure.modules.baseconfiguration import BaseConfiguration
__all__ = ['SiteInformation']
# convenience
MANDATORY = configfile.Option.MANDATORY
MANDATORY_ON_CE = configfile.Option.MANDATORY_ON_CE
OPTIONAL = configfile.Option.OPTIONAL
class SiteInformation(BaseConfiguration):
"""Class to handle attributes related to site information such as location and
contact information
"""
# The wlcg_* options are read by GIP directly
IGNORE_OPTIONS = ['wlcg_tier', 'wlcg_parent', 'wlcg_name', 'wlcg_grid']
def __init__(self, *args, **kwargs):
# pylint: disable-msg=W0142
super(SiteInformation, self).__init__(*args, **kwargs)
self.logger = logging.getLogger(__name__)
self.log('SiteInformation.__init__ started')
self.options = {'group':
configfile.Option(name='group',
required=MANDATORY,
default_value='OSG',
mapping='OSG_GROUP'),
'host_name':
configfile.Option(name='host_name',
required=MANDATORY_ON_CE,
default_value='',
mapping='OSG_HOSTNAME'),
'site_name':
configfile.Option(name='site_name',
required=OPTIONAL,
default_value='',
mapping='OSG_SITE_NAME'),
'sponsor':
configfile.Option(name='sponsor',
required=MANDATORY_ON_CE,
mapping='OSG_SPONSOR'),
'site_policy':
configfile.Option(name='site_policy',
required=OPTIONAL,
default_value='',
mapping='OSG_SITE_INFO'),
'contact':
configfile.Option(name='contact',
required=MANDATORY_ON_CE,
mapping='OSG_CONTACT_NAME'),
'email':
configfile.Option(name='email',
required=MANDATORY_ON_CE,
mapping='OSG_CONTACT_EMAIL'),
'city':
configfile.Option(name='city',
required=MANDATORY_ON_CE,
mapping='OSG_SITE_CITY'),
'country':
configfile.Option(name='country',
required=MANDATORY_ON_CE,
mapping='OSG_SITE_COUNTRY'),
'longitude':
configfile.Option(name='longitude',
opt_type=float,
required=MANDATORY_ON_CE,
mapping='OSG_SITE_LONGITUDE'),
'latitude':
configfile.Option(name='latitude',
opt_type=float,
required=MANDATORY_ON_CE,
mapping='OSG_SITE_LATITUDE'),
'resource':
configfile.Option(name='resource',
required=OPTIONAL,
default_value='',
mapping='OSG_SITE_NAME'),
'resource_group':
configfile.Option(name='resource_group',
default_value='',
required=OPTIONAL)}
self.config_section = "Site Information"
self.enabled = True
self.log('SiteInformation.__init__ completed')
def parse_configuration(self, configuration):
"""Try to get configuration information from ConfigParser or SafeConfigParser object given
by configuration and write recognized settings to attributes dict
"""
self.log('SiteInformation.parse_configuration started')
self.check_config(configuration)
if not configuration.has_section(self.config_section):
self.enabled = False
self.log("%s section not in config file" % self.config_section)
self.log('SiteInformation.parse_configuration completed')
return
self.get_options(configuration, ignore_options=self.IGNORE_OPTIONS)
self.log('SiteInformation.parse_configuration completed')
# pylint: disable-msg=W0613
def check_attributes(self, attributes):
"""Check attributes currently stored and make sure that they are consistent"""
self.log('SiteInformation.check_attributes started')
attributes_ok = True
if not self.enabled:
self.log('Not enabled, returning True')
self.log('SiteInformation.check_attributes completed')
return attributes_ok
# OSG_GROUP must be either OSG or OSG-ITB
group = self.opt_val("group")
if group not in ('OSG', 'OSG-ITB'):
self.log("The group setting must be either OSG or OSG-ITB, got: %s" %
group,
option='group',
section=self.config_section,
level=logging.ERROR)
attributes_ok = False
host_name = self.opt_val("host_name")
# host_name must be a valid dns name, check this by getting it's ip adddress
if not utilities.blank(host_name) and not validation.valid_domain(host_name, True):
self.log("hostname %s can't be resolved" % host_name,
option='host_name',
section=self.config_section,
level=logging.ERROR)
attributes_ok = False
if not utilities.blank(self.opt_val("site_name")):
self.log("The site_name setting has been deprecated in favor of the"
" resource and resource_group settings and will be removed",
section=self.config_section,
option="site_name",
level=logging.WARNING)
latitude = self.opt_val("latitude")
if not utilities.blank(latitude) and not -90 < latitude < 90:
self.log("Latitude must be between -90 and 90, got %s" %
latitude,
section=self.config_section,
option='latitude',
level=logging.ERROR)
attributes_ok = False
longitude = self.opt_val("longitude")
if not utilities.blank(longitude) and not -180 < longitude < 180:
self.log("Longitude must be between -180 and 180, got %s" %
longitude,
section=self.config_section,
option='longitude',
level=logging.ERROR)
attributes_ok = False
email = self.opt_val("email")
# make sure the email address has the correct format
if not utilities.blank(email) and not validation.valid_email(email):
self.log("Invalid email address in site information: %s" %
email,
section=self.config_section,
option='email',
level=logging.ERROR)
attributes_ok = False
sponsor = self.opt_val("sponsor")
if not utilities.blank(sponsor):
attributes_ok &= self.check_sponsor(sponsor)
self.log('SiteInformation.check_attributes completed')
return attributes_ok
def check_sponsor(self, sponsor):
attributes_ok = True
percentage = 0
vo_names = utilities.get_vos(None)
if vo_names == []:
map_file_present = False
else:
map_file_present = True
vo_names.append('usatlas') # usatlas is a valid vo name
vo_names.append('uscms') # uscms is a valid vo name
vo_names.append('local') # local is a valid vo name
cap_vo_names = [vo.upper() for vo in vo_names]
for vo in re.split(r'\s*,?\s*', sponsor):
vo_name = vo.split(':')[0]
if vo_name not in vo_names:
if vo_name.upper() in cap_vo_names:
self.log("VO name %s has the wrong capitialization" % vo_name,
section=self.config_section,
option='sponsor',
level=logging.WARNING)
vo_mesg = "Valid VO names are as follows:\n"
for name in vo_names:
vo_mesg += name + "\n"
self.log(vo_mesg, level=logging.WARNING)
else:
if map_file_present:
self.log("In %s section, problem with sponsor setting" % \
self.config_section)
self.log("VO name %s not found" % vo_name,
section=self.config_section,
option='sponsor',
level=logging.ERROR)
vo_mesg = "Valid VO names are as follows:\n"
for name in vo_names:
vo_mesg += name + "\n"
self.log(vo_mesg, level=logging.ERROR)
attributes_ok = False
else:
self.log("Can't currently check VOs in sponsor setting because " +
"the /var/lib/osg/user-vo-map is empty. If you are " +
"configuring osg components, this may be resolved when " +
"osg-configure runs the appropriate script to generate " +
"this file later in the configuration process",
section=self.config_section,
option='sponsor',
level=logging.WARNING)
if len(vo.split(':')) == 1:
percentage += 100
elif len(vo.split(':')) == 2:
vo_percentage = vo.split(':')[1]
try:
percentage += int(vo_percentage)
except ValueError:
self.log("VO percentage (%s) in sponsor field (%s) not an integer" \
% (vo_percentage, vo),
section=self.config_section,
option='sponsor',
level=logging.ERROR,
exception=True)
attributes_ok = False
else:
self.log("VO sponsor field is not formated correctly: %s" % vo,
section=self.config_section,
option='sponsor',
level=logging.ERROR)
self.log("Sponsors should be given as sponsor:percentage "
"separated by a space or comma")
if percentage != 100:
self.log("VO percentages in sponsor field do not add up to 100, got %s" \
% percentage,
section=self.config_section,
option='sponsor',
level=logging.ERROR)
attributes_ok = False
return attributes_ok
def module_name(self):
"""Return a string with the name of the module"""
return "SiteInformation"
def separately_configurable(self):
"""Return a boolean that indicates whether this module can be configured separately"""
return True
def get_attributes(self, converter=str):
"""
Get attributes for the osg attributes file using the dict in self.options
Returns a dictionary of ATTRIBUTE => value mappings
Need to override parent class method since two options may map to OSG_SITE_NAME
"""
self.log("%s.get_attributes started" % self.__class__)
attributes = BaseConfiguration.get_attributes(self, converter)
if attributes == {}:
self.log("%s.get_attributes completed" % self.__class__)
return attributes
if ('OSG_SITE_NAME' in attributes and
self.options['resource'].value is not None and
not utilities.blank(self.options['resource'].value)):
attributes['OSG_SITE_NAME'] = self.options['resource'].value
self.log("%s.get_attributes completed" % self.__class__)
return attributes
| 2.21875
| 2
|
covid_berlin_scraper/utils/parse_utils.py
|
jakubvalenta/covid-berlin-scraper
| 1
|
12775823
|
import datetime
from typing import Dict, Optional
import dateparser
import regex
def get_element_text(el) -> str:
return ''.join(el.strings).strip()
def parse_int(
s: str, numbers_map: Dict[str, int], thousands_separator: str
) -> int:
m = regex.search(r'\d+', s.strip().replace(thousands_separator, ''))
if not m:
s_clean = s.lower()
for substr, value in numbers_map.items():
if substr == s_clean:
return value
raise Exception(f'Failed to parse number "{s}"')
return int(m[0])
def parse_int_or_none(
s: str, regex_none: regex.Regex, *args, **kwargs
) -> Optional[int]:
if regex_none.search(s):
return None
return parse_int(s, *args, **kwargs)
def parse_datetime(s: str, default_tz: datetime.tzinfo) -> datetime.datetime:
dt = dateparser.parse(s)
if not dt:
raise Exception(f'Failed to parse datetime "{s}"')
if not dt.tzinfo:
return dt.replace(tzinfo=default_tz)
return dt
| 3.515625
| 4
|
sroka/api/athena/athena_api.py
|
jacekbj/sroka
| 66
|
12775824
|
<reponame>jacekbj/sroka<filename>sroka/api/athena/athena_api.py
from configparser import NoOptionError
from urllib.parse import urlparse
import boto3
from botocore.exceptions import ClientError, EndpointConnectionError
import sroka.config.config as config
from sroka.api.athena.athena_api_helpers import poll_status, download_file, return_on_exception, \
input_check
def query_athena(query, filename=None):
if not input_check(query, [str]):
return return_on_exception(filename)
if not input_check(filename, [str, type(None)]):
return return_on_exception(filename)
if filename == '':
print('Filename cannot be empty')
return return_on_exception(filename)
try:
s3_bucket = config.get_value('aws', 's3bucket_name')
key_id = config.get_value('aws', 'aws_access_key_id')
access_key = config.get_value('aws', 'aws_secret_access_key')
region = config.get_value('aws', 'aws_region')
except (KeyError, NoOptionError) as e:
print('No credentials were provided. Error message:')
print(e)
return return_on_exception(filename)
session = boto3.Session(
aws_access_key_id=key_id,
aws_secret_access_key=access_key
)
athena = session.client('athena',
region_name=region)
s3 = session.resource('s3')
if not s3_bucket.startswith('s3://'):
output_s3_bucket = 's3://' + s3_bucket
else:
output_s3_bucket = s3_bucket
s3_bucket = s3_bucket.replace('s3://', '')
try:
result = athena.start_query_execution(
QueryString=query,
ResultConfiguration={
'OutputLocation': output_s3_bucket,
}
)
except ClientError as e:
if e.response['Error']['Code'] == 'InvalidRequestException':
print("Please check your query. Error message:")
else:
print('Please check your credentials including s3_bucket in config.ini file. Error message:')
print(e)
return return_on_exception(filename)
except EndpointConnectionError as e:
print('Please check your credentials including aws_region in config.ini file and Internet connection.',
'Error message:')
print(e)
return return_on_exception(filename)
query_id = result['QueryExecutionId']
result = poll_status(athena, query_id)
if result is None:
return return_on_exception(filename)
elif result['QueryExecution']['Status']['State'] == 'SUCCEEDED':
s3_key = query_id + '.csv'
return download_file(s3, s3_bucket, s3_key, filename)
else:
print('Query did not succeed. Reason:')
print(result['QueryExecution']['Status']['StateChangeReason'])
return return_on_exception(filename)
def done_athena(query_id, filename=None):
if not input_check(query_id, [str]):
return return_on_exception(filename)
if not input_check(filename, [str, type(None)]):
return return_on_exception(filename)
try:
s3_bucket = config.get_value('aws', 's3bucket_name')
key_id = config.get_value('aws', 'aws_access_key_id')
access_key = config.get_value('aws', 'aws_secret_access_key')
region = config.get_value('aws', 'aws_region')
except (KeyError, NoOptionError) as e:
print('All or part of credentials were not provided. Please verify config.ini file. Error message:')
print(e)
return return_on_exception(filename)
if s3_bucket.startswith('s3://'):
s3_bucket = s3_bucket.replace('s3://', '')
session = boto3.Session(
aws_access_key_id=key_id,
aws_secret_access_key=access_key
)
s3 = session.resource('s3')
athena = session.client('athena',
region_name=region)
result = poll_status(athena, query_id)
if result is None:
return return_on_exception(filename)
if result['QueryExecution']['Status']['State'] == 'SUCCEEDED':
s3_key = urlparse(result['QueryExecution']['ResultConfiguration']['OutputLocation']).path[1:]
return download_file(s3, s3_bucket, s3_key, filename)
else:
print('Query did not succeed. Reason:')
print(result['QueryExecution']['Status']['StateChangeReason'])
return return_on_exception(filename)
| 2.15625
| 2
|
django_project/blog/migrations/0007_add_snippet_to_post_model.py
|
jsolly/shower-thought-blog
| 1
|
12775825
|
# Generated by Django 3.2.7 on 2022-03-07 15:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0006_change_content_field_to_richtextuploading'),
]
operations = [
migrations.AddField(
model_name='post',
name='snippet',
field=models.CharField(blank=True, max_length=500, null=True),
),
]
| 1.578125
| 2
|
django_formset_vuejs/threads.py
|
fatse/django-formsets-vuejs
| 11
|
12775826
|
import threading
import time
from django_formset_vuejs.models import Book
def start_cleanup_job():
def cleanup_db():
while True:
time.sleep(60*60)
print('hello')
Book.objects.all().delete()
thread1 = threading.Thread(target=cleanup_db)
thread1.start()
| 1.867188
| 2
|
tools/DeployTool/python/generatesdkmodule.py
|
stepanp/luna2d
| 30
|
12775827
|
#-----------------------------------------------------------------------------
# luna2d DeployTool
# This is part of luna2d engine
# Copyright 2014-2017 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#-----------------------------------------------------------------------------
import argparse
import shutil
import os
import utils
import sdkmodule_android
def main(args):
if args.debug_clear_project == "true":
shutil.rmtree(args.project_path, ignore_errors=True)
elif os.path.exists(args.project_path):
print("Cannot create project in \"" + args.project_path + "\". Directory already exists.")
exit(1)
luna2d_path = utils.get_luna2d_path()
template_path = luna2d_path + "/templates/" + args.template
constants = {
"LUNA_SDKMODULE_TYPE" : args.module_type,
"LUNA_SDKMODULE_NAME" : args.name,
"LUNA_PACKAGE_NAME" : args.package_name,
"LUNA_CLASS_NAME" : args.class_name,
"LUNA2D_PATH" : luna2d_path,
}
ignored_files = []
if args.platform == "android":
sdkmodule_android.apply_constants(args, constants)
ignored_files = sdkmodule_android.get_ignored_files(args, template_path)
utils.make_from_template(template_path, args.project_path, constants, ignored_files)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--project_path", required=True)
parser.add_argument("--module_type", required=True)
parser.add_argument("--template", required=True)
parser.add_argument("--name", required=True)
parser.add_argument("--platform", required=True)
parser.add_argument("--package_name", default="")
parser.add_argument("--class_name", default="")
parser.add_argument("--strip_git", default=False)
parser.add_argument("--debug_clear_project", default=False)
return parser.parse_args()
main(parse_args())
| 1.648438
| 2
|
question_bank/binary-tree-zigzag-level-order-traversal/binary-tree-zigzag-level-order-traversal.py
|
yatengLG/leetcode-python
| 9
|
12775828
|
<filename>question_bank/binary-tree-zigzag-level-order-traversal/binary-tree-zigzag-level-order-traversal.py
# -*- coding: utf-8 -*-
# @Author : LG
"""
执行用时:36 ms, 在所有 Python3 提交中击败了91.91% 的用户
内存消耗:13.8 MB, 在所有 Python3 提交中击败了5.59% 的用户
解题思路:
深度优先搜索
然后进行翻转
"""
class Solution:
def zigzagLevelOrder(self, root: TreeNode) -> List[List[int]]:
record = {}
def dfs(root, d): # 深度优先,使用字典记录每层的节点值
if root:
if d in record:
record[d].append(root.val)
else:
record[d] = [root.val]
dfs(root.left, d + 1)
dfs(root.right, d + 1)
dfs(root, 0)
result = []
reverse = False
for d in range(len(record)): # 隔一层翻转一次
if reverse:
result.append(record[d][::-1])
else:
result.append(record[d])
reverse = not reverse
return result
| 3.875
| 4
|
tests/test_522.py
|
sungho-joo/leetcode2github
| 0
|
12775829
|
<filename>tests/test_522.py
#!/usr/bin/env python
import pytest
"""
Test 522. Longest Uncommon Subsequence II
"""
@pytest.fixture(scope="session")
def init_variables_522():
from src.leetcode_522_longest_uncommon_subsequence_ii import Solution
solution = Solution()
def _init_variables_522():
return solution
yield _init_variables_522
class TestClass522:
def test_solution_0(self, init_variables_522):
assert init_variables_522().findLUSlength(["aba", "cdc", "eae"]) == 3
def test_solution_1(self, init_variables_522):
assert init_variables_522().findLUSlength(["aaa", "aaa", "aa"]) == -1
| 2.6875
| 3
|
Code/utils.py
|
sirebellum/catz_contest
| 4
|
12775830
|
<filename>Code/utils.py<gh_stars>1-10
import tensorflow as tf
import numpy as np
from PIL import Image
from scipy.ndimage import imread
from glob import glob
import os
import random
import cv2
import constants as c
from tfutils import log10
##
# Data
##
def normalize_frames(frames):
"""
Convert frames from int8 [0, 255] to float32 [-1, 1].
@param frames: A numpy array. The frames to be converted.
@return: The normalized frames.
"""
new_frames = frames.astype(np.float32)
new_frames /= (255 / 2)
new_frames -= 1
return new_frames
def denormalize_frames(frames):
"""
Performs the inverse operation of normalize_frames.
@param frames: A numpy array. The frames to be converted.
@return: The denormalized frames.
"""
new_frames = frames + 1
new_frames *= (255 / 2)
# noinspection PyUnresolvedReferences
new_frames = new_frames.astype(np.uint8)
return new_frames
def clip_l2_diff(clip):
"""
@param clip: A numpy array of shape [c.TRAIN_HEIGHT, c.TRAIN_WIDTH, (3 * (c.HIST_LEN + 1))].
@return: The sum of l2 differences between the frame pixels of each sequential pair of frames.
"""
diff = 0
for i in range(c.HIST_LEN):
frame = clip[:, :, 3 * i:3 * (i + 1)]
next_frame = clip[:, :, 3 * (i + 1):3 * (i + 2)]
# noinspection PyTypeChecker
diff += np.sum(np.square(next_frame - frame))
return diff
class data():
"""
Loads all train data into numpy arrays in memory.
"""
def __init__(self, path):
# Set up image dirs
cat_dirs = glob(path + "*")
random.shuffle(cat_dirs)
# load all images
self.images = np.zeros(
(len(cat_dirs), c.FULL_HEIGHT, c.FULL_WIDTH, 3 * (c.HIST_LEN + 1)))
for i in range(0, len(cat_dirs)):
input_imgs = glob(cat_dirs[i] + "/cat_*")
imgs = [imread(img, mode='RGB') for img in sorted(input_imgs)]
self.images[i] = normalize_frames(np.concatenate(imgs, axis=2))
self.instances = len(self.images)
# datset
self.mode = 'test'
if 'train' in path:
self.mode = 'train'
self.i = 0
def get_batch(self, batch_size=None):
# Get all images if no batch_size supplied
if batch_size is None:
batch_size = self.instances
# Shuffle if we've gone through the database once
if self.i >= self.instances:
self.i = 0
np.random.shuffle(self.images)
i = self.i
self.i += batch_size
batch = np.take(self.images, range(i, i+batch_size), axis=0, mode='wrap')
# perform random data alterations
if self.mode == 'train':
# horizontal flip
indices = np.random.randint(0, batch_size, (batch_size//3))
batch[indices] = np.fliplr(batch[indices])
# crop and resize
indices = np.random.randint(0, batch_size, (batch_size//3))
lcrops = np.random.randint(
0,
int(c.FULL_HEIGHT*0.2),
(batch_size//2))
hcrops = np.random.randint(
int(c.FULL_HEIGHT*0.8),
c.FULL_HEIGHT,
(batch_size//2))
for x, i in enumerate(indices):
new = batch[i,
lcrops[x]:hcrops[x],
lcrops[x]:hcrops[x],
:]
new = cv2.resize(new, (c.FULL_HEIGHT, c.FULL_WIDTH)).copy()
if np.amax(new) > 1 or np.amin(new) < -1: # If we get a bad image, discard
continue
batch[i] = new
return batch
##
# Error calculation
##
# TODO: Add SSIM error http://www.cns.nyu.edu/pub/eero/wang03-reprint.pdf
# TODO: Unit test error functions.
def perceptual_distance(gen_frames, gt_frames):
# Preprocess back to normal images
y_pred = gen_frames + 1
y_true = gt_frames + 1
y_pred *= (255 / 2)
y_true *= (255 / 2)
rmean = (y_true[:, :, :, 0] + y_pred[:, :, :, 0]) / 2
r = y_true[:, :, :, 0] - y_pred[:, :, :, 0]
g = y_true[:, :, :, 1] - y_pred[:, :, :, 1]
b = y_true[:, :, :, 2] - y_pred[:, :, :, 2]
return tf.reduce_mean(tf.sqrt((((512+rmean)*r*r)/256) + 4*g*g + (((767-rmean)*b*b)/256)))
def psnr_error(gen_frames, gt_frames):
"""
Computes the Peak Signal to Noise Ratio error between the generated images and the ground
truth images.
@param gen_frames: A tensor of shape [batch_size, height, width, 3]. The frames generated by the
generator model.
@param gt_frames: A tensor of shape [batch_size, height, width, 3]. The ground-truth frames for
each frame in gen_frames.
@return: A scalar tensor. The mean Peak Signal to Noise Ratio error over each frame in the
batch.
"""
shape = tf.shape(gen_frames)
num_pixels = tf.to_float(shape[1] * shape[2] * shape[3])
square_diff = tf.square(gt_frames - gen_frames)
batch_errors = 10 * log10(1 / ((1 / num_pixels) * tf.reduce_sum(square_diff, [1, 2, 3])))
return tf.reduce_mean(batch_errors)
def sharp_diff_error(gen_frames, gt_frames):
"""
Computes the Sharpness Difference error between the generated images and the ground truth
images.
@param gen_frames: A tensor of shape [batch_size, height, width, 3]. The frames generated by the
generator model.
@param gt_frames: A tensor of shape [batch_size, height, width, 3]. The ground-truth frames for
each frame in gen_frames.
@return: A scalar tensor. The Sharpness Difference error over each frame in the batch.
"""
shape = tf.shape(gen_frames)
num_pixels = tf.to_float(shape[1] * shape[2] * shape[3])
# gradient difference
# create filters [-1, 1] and [[1],[-1]] for diffing to the left and down respectively.
# TODO: Could this be simplified with one filter [[-1, 2], [0, -1]]?
pos = tf.constant(np.identity(3), dtype=tf.float32)
neg = -1 * pos
filter_x = tf.expand_dims(tf.stack([neg, pos]), 0) # [-1, 1]
filter_y = tf.stack([tf.expand_dims(pos, 0), tf.expand_dims(neg, 0)]) # [[1],[-1]]
strides = [1, 1, 1, 1] # stride of (1, 1)
padding = 'SAME'
gen_dx = tf.abs(tf.nn.conv2d(gen_frames, filter_x, strides, padding=padding))
gen_dy = tf.abs(tf.nn.conv2d(gen_frames, filter_y, strides, padding=padding))
gt_dx = tf.abs(tf.nn.conv2d(gt_frames, filter_x, strides, padding=padding))
gt_dy = tf.abs(tf.nn.conv2d(gt_frames, filter_y, strides, padding=padding))
gen_grad_sum = gen_dx + gen_dy
gt_grad_sum = gt_dx + gt_dy
grad_diff = tf.abs(gt_grad_sum - gen_grad_sum)
batch_errors = 10 * log10(1 / ((1 / num_pixels) * tf.reduce_sum(grad_diff, [1, 2, 3])))
return tf.reduce_mean(batch_errors)
| 2.59375
| 3
|
Lib/glyphNameFormatter/rangeProcessors/latin_extended_b.py
|
peterennis/glyphNameFormatter
| 69
|
12775831
|
def process(self):
self.edit("LATIN")
self.replace("CAPITAL LETTER D WITH SMALL LETTER Z", "Dz")
self.replace("CAPITAL LETTER DZ", "DZ")
self.edit("AFRICAN", "african")
self.edit("WITH LONG RIGHT LEG", "long", "right", "leg")
self.edit('LETTER YR', "yr")
self.edit("CAPITAL LETTER O WITH MIDDLE TILDE", "Obar")
self.edit("CAPITAL LETTER SMALL Q WITH HOOK TAIL", "Qsmallhooktail")
self.edit("LETTER REVERSED ESH LOOP", "eshreversedloop")
self.edit("CAPITAL LETTER L WITH SMALL LETTER J", "Lj")
self.edit("CAPITAL LETTER N WITH SMALL LETTER J", "Nj")
self.edit("LETTER INVERTED GLOTTAL STOP WITH STROKE", "glottalinvertedstroke")
self.edit("LETTER TWO WITH STROKE", "twostroke")
self.edit("CAPITAL LETTER LJ", "LJ")
self.edit("CAPITAL LETTER NJ", "NJ")
self.edit("CAPITAL LETTER AE WITH", "AE")
self.edit("CAPITAL LETTER WYNN", "Wynn")
self.edit("LETTER WYNN", "wynn")
self.edit("WITH PALATAL", "palatal")
self.edit("DENTAL", "dental")
self.edit("LATERAL", "lateral")
self.edit("ALVEOLAR", "alveolar")
self.edit("RETROFLEX", "retroflex")
self.replace("LETTER CLICK", "click")
self.forceScriptPrefix("latin", "CAPITAL LETTER GAMMA", "Gamma")
self.forceScriptPrefix("latin", "CAPITAL LETTER IOTA", "Iota")
self.forceScriptPrefix("latin", "CAPITAL LETTER UPSILON", "Upsilon")
self.processAs("Helper Diacritics")
self.processAs("Helper Shapes")
self.handleCase()
self.compress()
if __name__ == "__main__":
from glyphNameFormatter.exporters import printRange
printRange("Latin Extended-B")
| 3.078125
| 3
|
location_parsers/kern.py
|
rajbot/vaccinebot
| 2
|
12775832
|
<gh_stars>1-10
# Parse vaccination site locations for Kern County
# Run manually: python3 -m location_parsers.kern
import csv
import os
import re
import tempfile
import time
from . import County, Location
from . import driver_start, driver_stop
from . import debug_print, validate
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
county = County(
name="Kern",
url="https://phweb.kerncounty.com/Html5Viewer/index.html?viewer=COVID19Vaccination#",
)
def address_fixup(a):
""" Some Kern Co. addresses have typos. """
d = {
"2901 Silent Ave Suite 201, Bakersfield, CA 93308": "2901 Sillect Ave Suite 201, Bakersfield, CA 93308",
"3300 BUENA VISTA RD A, Bakersfield, CA 93311": "3300 Buena Vista Rd Bldg A, Bakersfield, CA 93311",
"8000 WHITE LANE, Bakersfield, CA 93301": "8000 WHITE LANE, BAKERSFIELD, CA 93309",
"Rite Aid Store 06303, Bakersfield, CA 93313": "3225 PANAMA LANE, BAKERSFIELD, CA 93313",
"3500 Stine Rd Bakersfield, Bakersfield, CA 93309": "3500 Stine Rd, Bakersfield, CA 93309",
}
return d.get(a, a)
# Returns a list of Location objects
def run():
dir = tempfile.TemporaryDirectory()
driver, display = driver_start(download_dir=dir.name)
driver.get(county.url)
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, "//button[contains(.,'OK')]"))
)
time.sleep(2)
# We wait for an OK button to get past the splash screen, but we actually
# need to click the offscreen submit input instead..
driver.execute_script(
"(function() {var i = document.getElementsByTagName('input'); i.item(i.length-1).click();})();"
)
time.sleep(1)
# Open the toolbar
driver.execute_script("$('.flyout-menu-active-tool').click();")
time.sleep(1)
# Click the button to open the table view
driver.execute_script("$('button.toolbar-item')[0].click();")
WebDriverWait(driver, 10).until(
EC.presence_of_element_located(
(By.XPATH, "//strong[contains(.,'Vaccination Locations')]")
)
)
# Open the options menu
driver.execute_script("$('button[data-tab-context-menu-button]')[0].click()")
time.sleep(1)
# Click the export to CSV button
WebDriverWait(driver, 10).until(
EC.presence_of_element_located(
(By.XPATH, "//strong[contains(., 'Export to CSV')]")
)
)
e = driver.find_element_by_xpath("//strong[contains(., 'Export to CSV')]")
button = e.find_element_by_xpath("..")
driver.execute_script("arguments[0].click();", button)
time.sleep(1)
# Click the OK button to download CSV to `dir/Export.csv`
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, "//p[contains(., 'Confirm?')]"))
)
p = driver.find_element_by_xpath("//p[contains(., 'Confirm?')]")
driver.execute_script("console.log(arguments[0]);", p)
div = p.find_element_by_xpath(".//following-sibling::div")
driver.execute_script("console.log(arguments[0]);", div)
button = div.find_element_by_xpath(".//button[contains(.,'OK')]")
driver.execute_script("console.log(arguments[0]);", button)
driver.execute_script("arguments[0].click();", button)
time.sleep(5) # How do we know if the download is done?
csv_path = os.path.join(dir.name, "Export.csv")
locations = []
with open(csv_path, newline="") as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
url = None
if row["Online Registration"] != "":
url = row["Online Registration"].strip()
address = f'{row["Address"].strip()}, {row["City"].strip()}, CA {row["Zip Code"].strip()}'
address = address_fixup(address)
locations.append(
Location(
name=row["Facility Name"].strip(),
address=address,
url=url,
phone=row["Phone Number"].strip(),
hours=row["Hours"].strip(),
county=county.name,
)
)
validate(locations)
return locations
if __name__ == "__main__":
locations = run()
debug_print(locations)
| 3.09375
| 3
|
results/gafqmc_info.py
|
wirawan0/pyqmc
| 0
|
12775833
|
# $Id: gafqmc_info.py,v 1.3 2011-03-09 15:44:47 wirawan Exp $
#
# gafqmc_info.py
# Tools to parse GAFQMC INFO file
#
# <NAME>
# Created: 20101025
#
# IMPORTANT: Try to make field names consistent with those in pwqmc_info.
# Python standard modules
import math
import os
import os.path
import re
import sys
import time
import numpy
from wpylib.iofmt.text_input import text_input, head, tail
from wpylib.db.result_base import result_base
from wpylib.sugar import ifelse
from wpylib.regexps import regex
from wpylib.text_tools import str_grep
class gafqmc_info(result_base):
'''Structure to represent the metadata contained in INFO file
(GAFQMC version).
Available information:
* info_file
* start_time
* info_mtime
* calc_time (defined as info_mtime - start_time in seconds)
* nbasis
* Evar_noconst Evar H0
* Etrial_noconst Etrial
* deltau betablk nblkstep
* nwlk nwlkmax nwlkmin
* itv_Em itv_pc itv_pc_eq
'''
meas_dtype = numpy.dtype([('beta',float), ('overlap',float),
('Etotal',float), ('Eproj',float)])
runtype_map = {
# fields: constraint, projector
# DO NOT EDIT strings below (and they are case sensitive);
# other codes may depend on these exact names,
# so any edit can screw up those user codes.
0: ('none', 'hybrid'),
1: ('phaseless cosine', 'Elocal'),
}
def parse_INFO(self, INFO):
'''Gets all the necessary info (calculation parameters) from a
GAFQMC INFO file.
This is a very old routine.
We use this as temporary starting point.'''
from pyqmc import PyqmcParseError
info_file = text_input(INFO)
self.clear()
rslt = self
rslt['info_file'] = INFO
rslt['info_mtime'] = time.localtime(os.path.getmtime(INFO))
rx_iflg_constraint = regex(r'^\s*iflg_constraint\s*=\s*([0-9]+)')
for L in info_file:
Ls = L.strip()
flds = Ls.split()
if len(flds) == 0:
continue
elif Ls.startswith("Number of particles:"):
# Add special exception for silly Cray fortran output:
if flds[3].startswith('2*'): # Cray
rslt["nup"] = rslt["ndn"] = int(flds[3][2:])
elif flds[3].endswith(','): # Cray
rslt["nup"] = int(flds[3].rstrip(','))
rslt["ndn"] = int(flds[4].rstrip(','))
else:
rslt["nup"] = int(flds[3])
rslt["ndn"] = int(flds[4])
elif Ls.startswith("Majority and minority det are independent"):
rslt["udet"] = True
elif Ls.startswith("Majority and minority det are coupled"):
rslt["udet"] = False
elif flds[0] == "Variational":
if flds[1] == "energy":
rslt["Evar"] = float(flds[3])
elif flds[1] == "energy=":
rslt["Evar"] = float(flds[2])
elif flds[0] == "nbasis":
rslt["nbasis"] = int(flds[2])
elif flds[0] == "Energy_N_QMC":
rslt["H0"] = float(flds[1])
elif flds[0] == "deltau=":
rslt["deltau"] = float(flds[1])
elif flds[0] == "beta=":
rslt["betablk"] = float(flds[1])
elif Ls.startswith("input etrial="):
rslt["Etrial_noconst"] = float(flds[2]) # no H0 yet
# anorm is also available on the same line:
rslt["anorm"] = float(flds[5])
elif Ls.startswith("New etrial to be used in El_bound:"):
rslt["Etrial_noconst"] = float(flds[8]) # H0 specified below
elif Ls.startswith("nblk="):
rslt["nblk"] = int(flds[1])
elif Ls.startswith("neq="):
rslt["neq"] = int(flds[1])
elif Ls.startswith("ngrth="):
rslt["ngrth"] = int(flds[1])
elif Ls.startswith("No Growth phase:"):
rslt["ngrth"] = 0
elif Ls.startswith("itv_em="):
rslt["itv_em"] = int(flds[1])
elif Ls.startswith("itv_pc="):
rslt["itv_pc"] = int(flds[1])
elif Ls.startswith("itv_pc_eq="):
rslt["itv_pc_eq"] = int(flds[1])
elif Ls.startswith("nblkstep="):
rslt["nblkstep"] = int(flds[1])
elif Ls.startswith("nwlk="):
rslt["nwlk"] = int(flds[1])
elif Ls.startswith("nwlkmax="):
rslt["nwlkmax"] = int(flds[1])
elif Ls.startswith("nwlkmin="):
rslt["nwlkmin"] = int(flds[1])
elif rx_iflg_constraint % Ls:
runtype = int(rx_iflg_constraint[1])
rslt["iflg_constraint"] = runtype
rslt["runtype"] = runtype # keyword uniformity with PWQMC (recommended)
runtype_rec = self.runtype_map[runtype]
rslt["constraint"], rslt["projector"] \
= runtype_rec[:2]
# ---runtime info below---
elif Ls.startswith("Using OpenMP with number of threads = "):
rslt["num_threads"] = int(flds[7])
elif Ls.startswith("Parallel version of GAFQMC, using NProc = "):
rslt["code_name"] = "gafqmc"
rslt["code_branch"] = "mpi"
rslt["num_tasks"] = int(flds[7])
elif Ls.startswith("Host:"):
rslt["run_host"] = flds[1]
elif Ls.startswith("Program was run on"):
# CAVEAT: start_time can be off by several hour if the local time
# zone is different from the time zone where the calculation
# was done.
# FIXME this!
rslt['start_time'] = \
time.strptime(flds[4] + " " + flds[6], "%Y/%m/%d %H:%M:%S")
elif Ls.startswith("Program was ended on"):
rslt['end_time'] = \
time.strptime(flds[4] + " " + flds[6], "%Y/%m/%d %H:%M:%S")
# measurement and other complex data capture
elif Ls.startswith("Measurement") and flds[1].startswith("phase...."):
#print "found meas!"
self.locate_text_marker(info_file,
(lambda S : S.startswith("Output:")),
max_try=30,
errmsg="Cannot locate the beginning of measurement data")
self.parse_measurement0(info_file, rslt)
rslt.setdefault("nwlkmax", rslt.nwlk * 2)
rslt.setdefault("nwlkmin", max(rslt.nwlk / 2, 1))
# fall back to original defaults:
rslt.setdefault("iflg_constraint", 1)
rslt.setdefault("runtype", 1)
rslt.setdefault("constraint", "phaseless cosine")
rslt.setdefault("projector", "Elocal")
rslt["Evar_noconst"] = rslt["Evar"] - rslt["H0"]
rslt["Etrial"] = rslt["Etrial_noconst"] + rslt["H0"]
rslt["calc_time"] = time.mktime(rslt[ifelse("end_time" in rslt, "end_time", "info_mtime")]) \
- time.mktime(rslt["start_time"])
rslt["run_mpi"] = ("num_tasks" in rslt)
rslt["run_openmp"] = ("num_threads" in rslt)
return rslt
def locate_text_marker(self, info_file, match_func, max_try, errmsg):
"""Seeks the text lines until a given marker is found.
An exception is raised if after max_try read attempts,
the marker is not found."""
for i in xrange(max_try):
Ls = info_file.next().strip()
if match_func(Ls):
return True
raise PyqmcParseError, errmsg
def parse_measurement0(self, info_file, rslt):
"""Internal routine to parse only the measurement results of the
file.
info_file is an open file-like object.
The last line read must have been 'Measurement phase...'
TODO:
- add stand-alone parse_measurement routine?
"""
from pyqmc import PyqmcParseError
# FIXME: Add beginning marker detection (previous text line must be
# "Output:")
for_D2E = lambda s : s.replace("D","E").replace("d","e")
EOS = re.compile(r"^\s*Final Results:\s*$") # end-of-stream marker
RS = re.compile(r"^\s*-+\s*$") # record separator
# Special handling in case the parsing was stalled by "BugStop" output
BUGSTOP = regex(r"^\s*BugStop\s*:\s*(?P<msg1>.*)")
meas = []
for L in info_file:
Ls_orig = L.strip()
Ls = for_D2E(Ls_orig)
flds = Ls.split()
if EOS.search(Ls):
break # end-of-stream detected
elif len(flds) == 3:
# special case to handle wrapped Fortran output
Ls2 = for_D2E(info_file.next().strip())
flds2 = Ls2.split()
if len(flds2) == 0:
raise PyqmcParseError, \
"Invalid format in GAFQMC measurement text (INFO)"
flds.append(flds2[0])
elif len(flds) < 4:
if BUGSTOP % Ls_orig:
self.store_bug_info0(rslt, info_file, BUGSTOP['msg1'])
break
else:
raise PyqmcParseError, \
"Invalid format in GAFQMC measurement text (INFO)"
try:
rec = tuple(map((lambda x: float(x.rstrip(','))), flds[:4]))
except:
if BUGSTOP % Ls_orig:
self.store_bug_info0(rslt, info_file, BUGSTOP['msg1'])
break
else:
raise PyqmcParseError, \
"Error parsing GAFQMC measurement text (INFO)"+str(Ls)
meas.append(rec)
try:
self.locate_text_marker(info_file,
(lambda S : RS.search(S)), max_try=20,
errmsg="Cannot locate a valid record separator in GAFQMC measurement text (INFO)")
except StopIteration:
from warnings import warn
info = self['info_file']
warn("StopIteration caught in file %s; stop scanning file." % (info,))
break
dtype = self.meas_dtype
rslt["meas_energy"] = numpy.array(meas, dtype=dtype)
def store_bug_info0(self, rslt, info_file, msg1):
"""Mark the run as buggy (by the existence of BUGSTOP field).
msg1 is the text that follows the `BugStop:' printout.
Caveat: the second line message may or may not be right, but hopefully
it can give us a clue on what's happening."""
rslt['BUGSTOP'] = True
try:
msg2 = info_file.next().strip()
msgs = (msg1, msg2)
except:
msgs = (msg1,)
rslt['BUGSTOP_msgs'] = msgs
parse_file_ = parse_INFO
def is_gafqmc_info(filename):
"""Detects whether a file is a GAFQMC info file.
"""
# TODO: This is a placeholder routine (API) for what could be more advanced
# in the future.
# Copied from gafqmc_quick_dirty.
snippet = head(filename, 400)
if str_grep("GAFQMC - Generic auxiliary-field quantum Monte Carlo", snippet):
return True
elif str_grep("Generic Auxiliary field Quantum Monte Carlo (GAFQMC)", snippet):
# gen76 and gen79 has this
return True
else:
return False
def is_gafqmc_info_finished(filename):
# TODO: This is a placeholder routine (API) for what could be more advanced
# in the future.
# Copied from gafqmc_quick_dirty.
if is_gafqmc_info(filename):
snippet = tail(filename, 400)
if str_grep("Summary of energies:", snippet):
return True
return False
| 2.28125
| 2
|
bus_monitor/plotter/start_live_streaming.py
|
mxl00474/Yokohama_bus_navi
| 0
|
12775834
|
import os
from PlotterBokeh import PlotterBokeh
from BusInfo import BusInfo
def start_live_streaming(doc):
if doc is None:
raise NotImplementedError()
# Set the initial location as Yokohama station
lat=35.46591430126525
lng=139.62125644093177
apiKey = os.getenv('GMAP_TOKEN')
plotter = PlotterBokeh(lat, lng, apiKey, doc)
bus_list = BusInfo.update()
plotter.init_buslocation(bus_list)
plotter.loop()
| 2.734375
| 3
|
invertendo_sequencia.py
|
isaberamos/Programinhas
| 1
|
12775835
|
<reponame>isaberamos/Programinhas<filename>invertendo_sequencia.py<gh_stars>1-10
seq = []
n = 1
while n:
n = int(input(print("Digite um número: ")))
if n != 0:
seq.append(n)
print(seq)
print(seq[:-1])
| 3.5625
| 4
|
tpp/models/encoders/base/recurrent.py
|
dqmis/neuralTPPs-1
| 17
|
12775836
|
import torch as th
import torch.nn as nn
import torch.nn.functional as F
from typing import Dict, List, Optional, Tuple
from tpp.models.encoders.base.variable_history import VariableHistoryEncoder
from tpp.pytorch.models import MLP
from tpp.utils.events import Events
class RecurrentEncoder(VariableHistoryEncoder):
"""Abstract classes for recurrent encoders. The encoding has a
variable history size.
Args:
name: The name of the encoder class.
rnn: RNN encoder function.
units_mlp: List of hidden layers sizes for MLP.
activations: MLP activation functions. Either a list or a string.
emb_dim: Size of the embeddings. Defaults to 1.
embedding_constraint: Constraint on the weights. Either `None`,
'nonneg' or 'softplus'. Defaults to `None`.
temporal_scaling: Scaling parameter for temporal encoding
padding_id: Id of the padding. Defaults to -1.
encoding: Way to encode the events: either times_only, marks_only,
concatenate or temporal_encoding. Defaults to times_only
marks: The distinct number of marks (classes) for the process. Defaults
to 1.
"""
def __init__(
self,
name: str,
rnn: nn.Module,
# MLP args
units_mlp: List[int],
activation_mlp: Optional[str] = "relu",
dropout_mlp: Optional[float] = 0.,
constraint_mlp: Optional[str] = None,
activation_final_mlp: Optional[str] = None,
# Other args
emb_dim: Optional[int] = 1,
embedding_constraint: Optional[str] = None,
temporal_scaling: Optional[float] = 1.,
encoding: Optional[str] = "times_only",
time_encoding: Optional[str] = "relative",
marks: Optional[int] = 1,
**kwargs):
super(RecurrentEncoder, self).__init__(
name=name,
output_size=units_mlp[-1],
emb_dim=emb_dim,
embedding_constraint=embedding_constraint,
temporal_scaling=temporal_scaling,
encoding=encoding,
time_encoding=time_encoding,
marks=marks,
**kwargs)
self.rnn = rnn
self.mlp = MLP(
units=units_mlp,
activations=activation_mlp,
constraint=constraint_mlp,
dropout_rates=dropout_mlp,
input_shape=self.rnn.hidden_size,
activation_final=activation_final_mlp)
def forward(self, events: Events) -> Tuple[th.Tensor, th.Tensor, Dict]:
"""Compute the (query time independent) event representations.
Args:
events: [B,L] Times and labels of events.
Returns:
representations: [B,L+1,M+1] Representations of each event.
representations_mask: [B,L+1] Mask indicating which representations
are well-defined.
"""
histories, histories_mask = self.get_events_representations(
events=events) # [B,L+1,D] [B,L+1]
representations, _ = self.rnn(histories)
representations = F.normalize(representations, dim=-1, p=2)
representations = self.mlp(representations) # [B,L+1,M+1]
return (representations, histories_mask,
dict()) # [B,L+1,M+1], [B,L+1], Dict
| 2.6875
| 3
|
vision/datasets/utils.py
|
tamnguyenvan/pytorch-ssd
| 0
|
12775837
|
import os
import glob
import numpy as np
from datetime import datetime
from scipy.io import loadmat
from PIL import Image
np.random.seed(42)
def calc_age(taken, dob):
birth = datetime.fromordinal(max(int(dob) - 366, 1))
# assume the photo was taken in the middle of the year
if birth.month < 7:
return taken - birth.year
else:
return taken - birth.year - 1
def get_meta(mat_path, db):
meta = loadmat(mat_path)
full_path = meta[db][0, 0]["full_path"][0]
dob = meta[db][0, 0]["dob"][0] # Matlab serial date number
gender = meta[db][0, 0]["gender"][0]
photo_taken = meta[db][0, 0]["photo_taken"][0] # year
face_score = meta[db][0, 0]["face_score"][0]
second_face_score = meta[db][0, 0]["second_face_score"][0]
age = [calc_age(photo_taken[i], dob[i]) for i in range(len(dob))]
return full_path, dob, gender, photo_taken, face_score, second_face_score, age
def load_data(data_dir, db='imdb', split=0.1):
out_paths = []
out_ages = []
out_genders = []
db_names = db.split(',')
# Load utkface if need.
if 'utk' in db_names:
utk_dir = os.path.join(data_dir, 'utkface-new')
utk_paths, utk_ages, utk_genders = load_utk(utk_dir)
out_paths += utk_paths
out_ages += utk_ages
out_genders += utk_genders
for d in db_names:
image_dir = os.path.join(data_dir, '{}_crop'.format(d))
mat_path = os.path.join(image_dir, '{}.mat'.format(d))
full_path, dob, gender, photo_taken, face_score, second_face_score, age = get_meta(mat_path, d)
sample_num = len(face_score)
min_score = 1.
for i in range(sample_num):
if face_score[i] < min_score:
continue
if (~np.isnan(second_face_score[i])) and second_face_score[i] > 0.0:
continue
if ~(0 <= age[i] <= 100):
continue
if np.isnan(gender[i]):
continue
out_genders.append(int(gender[i]))
out_ages.append(age[i])
out_paths.append(os.path.join(image_dir, str(full_path[i][0])))
indices = np.arange(len(out_paths))
np.random.shuffle(indices)
out_paths = list(np.asarray(out_paths)[indices])
out_ages = list(np.asarray(out_ages)[indices])
out_genders = list(np.asarray(out_genders)[indices])
num_train = int(len(out_paths) * (1 - split))
train_paths, train_ages, train_genders = out_paths[:num_train], out_ages[:num_train], out_genders[:num_train]
val_paths, val_ages, val_genders = out_paths[num_train:], out_ages[num_train:], out_genders[num_train:]
return (train_paths, train_ages, train_genders), (val_paths, val_ages, val_genders)
def load_utk(data_dir):
"""Load UTKFace dataset."""
out_paths = []
out_ages = []
out_genders = []
paths = glob.glob(os.path.join(data_dir, 'crop_part1', '*'))
for path in paths:
filename = os.path.basename(path)
out_paths.append(path)
age, gender = filename.split('_')[:2]
age = int(age)
gender = 1 if int(gender) == 0 else 0
out_ages.append(age)
out_genders.append(gender)
return out_paths, out_ages, out_genders
def load_appa(data_dir, ignore_list_filename=None):
"""Load APPA-real dataset."""
out_paths = []
out_ages = []
ignore_filenames = set()
if ignore_list_filename is not None:
ignore_list_path = os.path.join(data_dir, ignore_list_filename)
ignore_filenames = set(x.strip() for x in open(ignore_list_path))
data_file = os.path.join(data_dir, 'gt_avg_train.csv')
image_dir = os.path.join(data_dir, 'train')
with open(data_file) as f:
lines = [x.strip() for x in f]
for line in lines[1:]:
filename, _, _, _, age = line.strip().split(',')
if filename in ignore_filenames:
continue
image_path = os.path.join(image_dir, filename + '_face.jpg')
age = int(age)
out_paths.append(image_path)
out_ages.append(age)
return out_paths, out_ages
def load_aligned_data(data_dir, split=0.1):
out_paths = []
out_ages = []
out_genders = []
paths = glob.glob(os.path.join(data_dir, '*'))
for path in paths:
filename = os.path.basename(path)
age, gender = filename.split('_')[-2:]
gender = gender.split('.')[0]
age = int(age)
gender = int(gender)
out_paths.append(path)
out_ages.append(age)
out_genders.append(gender)
indices = np.arange(len(out_paths))
np.random.shuffle(indices)
out_paths = np.asarray(out_paths)[indices]
out_ages = np.asarray(out_ages)[indices]
out_genders = np.asarray(out_genders)[indices]
num_train = int(len(out_paths) * (1 - split))
train_paths, train_ages, train_genders = out_paths[:num_train], out_ages[:num_train], out_genders[:num_train]
val_paths, val_ages, val_genders = out_paths[num_train:], out_ages[num_train:], out_genders[num_train:]
return (train_paths, train_ages, train_genders), (val_paths, val_ages, val_genders)
| 2.453125
| 2
|
signup/migrations/0001_initial.py
|
harry-7/django-trial
| 0
|
12775838
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-06-12 21:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Signup',
fields=[
('email', models.EmailField(max_length=254, primary_key=True, serialize=False)),
('full_name', models.CharField(max_length=80)),
('date_joined', models.DateTimeField(auto_now_add=True)),
('password', models.CharField(max_length=20)),
],
),
]
| 1.59375
| 2
|
settings.py
|
fastent/fastent
| 8
|
12775839
|
import couchdb
def init(username = None, password = <PASSWORD>):
"""
Global Initialization of settings
Return the list of words closer to word1 in comparison with word2
Args:
word1 (str): first word
word2 (str): second word
Returns:
void: As it is global settings init nothing is returned
"""
global couchDB
if username is None:
couchDB = couchdb.Server("http://127.0.0.1:5984/")
else:
couchDB = couchdb.Server("http://%s:%s@127.0.0.1:5984/" % (username, password))
def db_initialize(dbname):
"""
Return of single db from cauchDB server if present or creates if not present
Args:
dbname (str): The designated database name
Returns:
db (Database object): the created or retrieved database
"""
try:
if dbname in couchDB:
db = couchDB[dbname]
else:
db = couchDB.create(dbname)
except Exception as e:
print(e)
return None
return db
| 3.015625
| 3
|
Mediante_Kid_day5_act2.py
|
RomartM/PythonSeminarActivities
| 0
|
12775840
|
<gh_stars>0
def reverse_string(string):
reversed_letters = list()
index = 1
for letter in string:
reversed_letters.append(string[ len(string) - index ])
index += 1
return "".join(reversed_letters)
word_input = str(input("Input a word: "))
print(f"INPUT: {word_input}")
print("OUTPUT: %s (%d characters)" % (reverse_string(word_input).upper(), len(word_input)))
| 4.03125
| 4
|
simulation/dm_control_cur/ddpg/ddpg_classes/simulator_residual.py
|
Cambridge-University-Robotics/cur_policy_learning_research
| 0
|
12775841
|
from simulation.dm_control_cur.utility_classes.simulator import Simulation
class ResidualSimulation(Simulation):
def __init__(
self,
controller_load_model=True,
controller_num_episodes=50,
**kwargs
):
super().__init__(**kwargs)
self.controller = Simulation(
load_model=controller_load_model,
label='controller',
name_model=self.NAME_MODEL,
task=self.TASK,
num_episodes=controller_num_episodes,
batch_size=self.BATCH_SIZE,
duration=self.DURATION,
)
def train_controller(self):
self.controller.train()
def show_controller_simulation(self):
self.controller.show_simulation()
def modify_action(self, action, state, t):
return self.controller.get_action(state, t)
| 2.671875
| 3
|
phy_sx127x/phy_sx127x_ahsm.py
|
dwhall/phy_sx127x
| 1
|
12775842
|
"""
Copyright 2020 <NAME>. See LICENSE for details.
"""
import logging
import time
import farc
from . import phy_sx127x
class PhySX127xAhsm(farc.Ahsm):
"""The physical layer (PHY) state machine for a Semtech SX127x device.
Automates the behavior of the Semtech SX127x family of radio transceivers.
For now, all behavior and operations are for LoRa mode.
"""
# Special time values to use when posting an action
TM_NOW = 0 # Use normally for "do it now"
TM_IMMEDIATE = -1 # Use sparingly to jump the queue
def __init__(self, lstn_by_dflt):
"""Class intialization
Listen by default means the radio enters
continuous-rx mode when it is not doing anything else.
If lstn_by_dflt is False, the radio enters sleep mode
when it is not doing anything else.
"""
super().__init__()
self.sx127x = phy_sx127x.PhySX127x()
self._lstn_by_dflt = lstn_by_dflt
self._dflt_stngs = ()
self._dflt_rx_stngs = ()
def get_stngs(self,):
"""Returns the current settings"""
return self._dflt_stngs
def post_rx_action(self, rx_time, rx_stngs, rx_durxn, rx_clbk):
"""Posts the _PHY_RQST event to this state machine
with the container-ized arguments as the value.
"""
assert not self._lstn_by_dflt, \
"""post_rx_action() should not be used when the PHY is
listen-by-default. Use set_dflt_rx_clbk() once, instead."""
# Convert NOW to an actual time
if rx_time == PhySX127xAhsm.TM_NOW:
rx_time = farc.Framework._event_loop.time()
# The order MUST begin: (action, stngs, ...)
rx_action = ("rx", rx_stngs, rx_durxn, rx_clbk)
self.post_fifo(farc.Event(farc.Signal._PHY_RQST, (rx_time, rx_action)))
def post_tx_action(self, tx_time, tx_stngs, tx_bytes):
"""Posts the _PHY_RQST event to this state machine
with the container-ized arguments as the value.
"""
assert type(tx_bytes) is bytes
# Convert NOW to an actual time
if tx_time == PhySX127xAhsm.TM_NOW:
tx_time = farc.Framework._event_loop.time()
# The order MUST begin: (action, stngs, ...)
tx_action = ("tx", tx_stngs, tx_bytes)
self.post_fifo(farc.Event(farc.Signal._PHY_RQST, (tx_time, tx_action)))
def set_dflt_rx_clbk(self, rx_clbk):
"""Stores the default RX callback for the PHY.
The default RX callback is used when this state machine is
initialized with listen-by-default set to True.
This state machine calls the default RX callback
when a frame is received and there are no reception errors.
"""
assert self._lstn_by_dflt, \
"""set_dflt_rx_clbk() should not be used when the PHY is
sleep-by-default. Pass a callback in post_rx_action() instead.
"""
self._dflt_rx_clbk = rx_clbk
def set_dflt_stngs(self, dflt_stngs):
"""Stores the default settings for the PHY.
This must be called before start() so they
can be written to the device during initilizing.
"""
self._dflt_stngs = dflt_stngs
def start_stack(self, ahsm_prio):
"""PHY is the bottom of the protocol stack, so just start this Ahsm"""
self.start(ahsm_prio)
# State machine
@farc.Hsm.state
def _initial(self, event):
"""Pseudostate: _initial
State machine framework initialization
"""
# Self-signaling
farc.Signal.register("_ALWAYS")
farc.Signal.register("_PHY_RQST")
# DIO Signal table (DO NOT CHANGE ORDER)
# This table is dual maintenance with phy_sx127x.PhySX127x.DIO_*
self._dio_sig_lut = (
farc.Signal.register("_DIO_MODE_RDY"),
farc.Signal.register("_DIO_CAD_DETECTED"),
farc.Signal.register("_DIO_CAD_DONE"),
farc.Signal.register("_DIO_FHSS_CHG_CHNL"),
farc.Signal.register("_DIO_RX_TMOUT"),
farc.Signal.register("_DIO_RX_DONE"),
farc.Signal.register("_DIO_CLK_OUT"),
farc.Signal.register("_DIO_PLL_LOCK"),
farc.Signal.register("_DIO_VALID_HDR"),
farc.Signal.register("_DIO_TX_DONE"),
farc.Signal.register("_DIO_PAYLD_CRC_ERR"),
)
# Self-signaling events
self._evt_always = farc.Event(farc.Signal._ALWAYS, None)
# Time events
self.tmout_evt = farc.TimeEvent("_PHY_TMOUT")
self.prdc_evt = farc.TimeEvent("_PHY_PRDC")
return self.tran(self._initializing)
@farc.Hsm.state
def _initializing(self, event):
""""State: _initializing
Application initialization.
Opens and verifies the SPI driver.
Sets default application values.
Transitions to the _scheduling state if the SPI Comms
and SX127x are good; otherwise, remains in this state
and periodically retries opening the SX127x.
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._initializing")
self.tmout_evt.post_in(self, 0.0)
# Init data
# We use two queues for a hybrid time-sorted queue:
# One for frames that sort by time.
# It's actually a dict object where the keys are the time value.
self._tm_queue = {}
# Another for frames that need to be sent immediately.
# This should be used sparingly.
self._im_queue = []
return self.handled(event)
elif sig == farc.Signal._PHY_TMOUT:
if self.sx127x.open(self._dio_isr_clbk):
assert len(self._dflt_stngs) > 0, \
"Default settings must be set before initializing"
self.sx127x.set_flds(self._dflt_stngs)
self.sx127x.write_stngs(False)
return self.tran(self._scheduling)
logging.warning("_initializing: no SX127x or SPI")
self.tmout_evt.post_in(self, 1.0)
return self.handled(event)
elif sig == farc.Signal.EXIT:
self.tmout_evt.disarm()
return self.handled(event)
return self.super(self.top)
@farc.Hsm.state
def _scheduling(self, event):
""""State: _scheduling
Writes any outstanding settings and always
transitions to _txing, _sleeping or _listening
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._scheduling")
# TODO: remove unecessary read once sm design is proven
assert self.sx127x.OPMODE_STBY == self.sx127x.read_opmode()
self.post_fifo(farc.Event(farc.Signal._ALWAYS, None))
return self.handled(event)
elif sig == farc.Signal._ALWAYS:
# If the next action is soon, go to its state
next_action = self._top_soon_action()
self._default_action = not bool(next_action)
if next_action:
_, action = next_action
if action[0] == "rx":
st = self._listening
elif action[0] == "tx":
st = self._txing
else:
# Placeholder for CAD, sleep
assert True, "Got here by accident"
# Otherwise, go to the default
elif self._lstn_by_dflt:
st = self._listening
else:
st = self._sleeping
return self.tran(st)
elif sig == farc.Signal._PHY_RQST:
tm, action = event.value
self._enqueue_action(tm, action)
return self.handled(event)
return self.super(self.top)
@farc.Hsm.state
def _lingering(self, event):
""""State: _scheduling
This state is for shared behavior
between the _listening and _sleeping states.
On entry, optionally starts a timer for when
to exit to go handle the next action.
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._lingering")
return self.handled(event)
elif sig == farc.Signal._PHY_RQST:
tm, action = event.value
self._enqueue_action(tm, action)
# If lingering because of default action
# transition to scheduling
if self._default_action:
return self.tran(self._scheduling)
# If lingering because of intentional action
# remain in current state
return self.handled(event)
elif sig == farc.Signal._PHY_TMOUT:
return self.tran(self._scheduling)
elif sig == farc.Signal.EXIT:
self.tmout_evt.disarm()
# Changing modes from rx or sleep to STBY is
# "near instantaneous" per SX127x datasheet
# so don't bother awaiting a _DIO_MODE_RDY
self.sx127x.write_opmode(self.sx127x.OPMODE_STBY, False)
return self.handled(event)
return self.super(self.top)
@farc.Hsm.state
def _listening(self, event):
""""State: _lingering:_listening
Puts the device into receive mode
either because of a receive action or listen-by-default.
Transitions to _rxing if a valid header is received.
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._lingering._listening")
action = self._pop_soon_action()
if action:
rx_time, rx_action = action
(action_str, rx_stngs, rx_durxn, rx_clbk) = rx_action
assert action_str == "rx"
self._rx_clbk = rx_clbk
else:
rx_stngs = self._dflt_rx_stngs
self._rx_clbk = self._dflt_rx_clbk
# Convert given settings to a mutable list
if rx_stngs:
stngs = list(rx_stngs)
else:
# Accept "None" as an argument for rx_stngs
stngs = []
# Combine and write RX settings
stngs.extend((("FLD_RDO_DIO0", 0), # _DIO_RX_DONE
("FLD_RDO_DIO1", 0), # _DIO_RX_TMOUT
("FLD_RDO_DIO3", 1))) # _DIO_VALID_HDR
self.sx127x.set_flds(stngs)
self.sx127x.write_stngs(True)
# Prep interrupts for RX
self.sx127x.write_lora_irq_mask(
self.sx127x.IRQ_FLAGS_ALL,
self.sx127x.IRQ_FLAGS_RXDONE |
self.sx127x.IRQ_FLAGS_PAYLDCRCERROR |
self.sx127x.IRQ_FLAGS_VALIDHEADER
)
self.sx127x.write_lora_irq_flags(
self.sx127x.IRQ_FLAGS_RXDONE |
self.sx127x.IRQ_FLAGS_PAYLDCRCERROR |
self.sx127x.IRQ_FLAGS_VALIDHEADER
)
self.sx127x.write_fifo_ptr(0x00)
# Start periodic event for update_rng()
self.prdc_evt.post_every(self, 0.100) # 100ms
# No action means listen-by-default; receive-continuosly
if not action:
self.sx127x.write_opmode(self.sx127x.OPMODE_RXCONT, False)
# An explicit action means do a receive-once
else:
# Perform a short blocking sleep until rx_time
# to obtain more accurate rx execution time on Linux.
now = farc.Framework._event_loop.time()
tiny_sleep = rx_time - now
assert tiny_sleep > 0.0, \
"didn't beat action time, need to increase _TM_SVC_MARGIN"
if tiny_sleep > PhySX127xAhsm._TM_BLOCKING_MAX:
tiny_sleep = PhySX127xAhsm._TM_BLOCKING_MAX
if tiny_sleep > PhySX127xAhsm._TM_BLOCKING_MIN:
time.sleep(tiny_sleep)
self.sx127x.write_opmode(self.sx127x.OPMODE_RXONCE, False)
# Start the rx duration timer
if rx_durxn > 0:
self.tmout_evt.post_in(self, rx_durxn)
return self.handled(event)
elif sig == farc.Signal._PHY_PRDC:
self.sx127x.updt_rng()
return self.handled(event)
elif sig == farc.Signal._DIO_VALID_HDR:
self._rxd_hdr_time = event.value
return self.tran(self._rxing)
elif sig == farc.Signal._DIO_PAYLD_CRC_ERR:
logging.info("PHY:_listening@_DIO_PAYLD_CRC_ERR")
# TODO: incr phy_data stats crc err cnt
return self.tran(self._scheduling)
elif sig == farc.Signal._DIO_RX_TMOUT:
logging.info("PHY:_listening@_DIO_RX_TMOUT")
# TODO: incr phy_data stats rx tmout
return self.tran(self._scheduling)
elif sig == farc.Signal.EXIT:
self.prdc_evt.disarm()
return self.handled(event)
return self.super(self._lingering)
@farc.Hsm.state
def _rxing(self, event):
""""State: _lingering:_listening:_rxing
Continues a reception in progress.
Protects reception by NOT transitioning upon a _PHY_RQST event.
Transitions to _scheduling after reception ends.
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._rxing")
return self.handled(event)
elif sig == farc.Signal._DIO_RX_DONE:
self._on_lora_rx_done()
return self.tran(self._scheduling)
elif sig == farc.Signal._PHY_RQST:
# Overrides _lingering's _PHY_RQST handler because we want to
# remain in this state even if we were listening-by-default
tm, action = event.value
self._enqueue_action(tm, action)
return self.handled(event)
return self.super(self._listening)
@farc.Hsm.state
def _sleeping(self, event):
""""State: _lingering:_sleeping
Puts the device into sleep mode.
Timer and timeout handling is performed
by the parent state, _lingering()
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._lingering._sleeping")
self.sx127x.write_opmode(self.sx127x.OPMODE_SLEEP, False)
return self.handled(event)
return self.super(self._lingering)
@farc.Hsm.state
def _txing(self, event):
""""State: _txing
Prepares for transmission, transmits,
awaits DIO_TX_DONE event from radio,
then transitions to the _scheduling state.
"""
sig = event.signal
if sig == farc.Signal.ENTRY:
logging.debug("PHY._txing")
action = self._pop_soon_action()
assert action is not None, "Mutation between top() and pop()"
(tx_time, tx_action) = action
assert tx_action[0] == "tx", "Mutation between top() and pop()"
(_, tx_stngs, tx_bytes) = tx_action
# Convert given settings to a mutable list
if tx_stngs:
stngs = list(tx_stngs)
else:
# Accept "None" as an argument for tx_stngs
stngs = []
# Write TX settings from higher layer and
# one setting needed for this PHY operation
stngs.append(("FLD_RDO_DIO0", 1)) # _DIO_TX_DONE
self.sx127x.set_flds(stngs)
self.sx127x.write_stngs(False)
# Prep interrupts for TX
self.sx127x.write_lora_irq_mask(
self.sx127x.IRQ_FLAGS_ALL, # disable these
self.sx127x.IRQ_FLAGS_TXDONE # enable these
)
# Write payload into radio's FIFO
self.sx127x.write_fifo_ptr(0x00)
self.sx127x.write_fifo(tx_bytes)
self.sx127x.write_lora_payld_len(len(tx_bytes))
# Blocking sleep until tx_time (assuming a short amount)
now = farc.Framework._event_loop.time()
tiny_sleep = tx_time - now
if tiny_sleep > PhySX127xAhsm._TM_BLOCKING_MAX:
tiny_sleep = PhySX127xAhsm._TM_BLOCKING_MAX
if tiny_sleep > 0.001:
time.sleep(tiny_sleep)
# Start software timer for backstop
self.tmout_evt.post_in(self, 1.0) # TODO: calc soft timeout delta
# Start transmission and await DIO_TX_DONE
self.sx127x.write_opmode(self.sx127x.OPMODE_TX, False)
return self.handled(event)
elif sig == farc.Signal._DIO_TX_DONE:
# TODO: phy stats TX_DONE
return self.tran(self._scheduling)
elif sig == farc.Signal._PHY_RQST:
tm, action = event.value
self._enqueue_action(tm, action)
return self.handled(event)
elif sig == farc.Signal._PHY_TMOUT:
logging.warning("PHY._txing@_PHY_TMOUT")
if self.sx127x.in_sim_mode():
# Sim-radio will never emit DIO events
# so go straight to _scheduling
return self.tran(self._scheduling)
else:
# SX127x takes time to change modes from TX to STBY.
# Use DIO5/ModeReady here so we don't transition
# to _scheduling and try to do stuff before the
# chip is in STBY mode. Await _DIO_MODE_RDY.
self.sx127x.write_opmode(self.sx127x.OPMODE_STBY, True)
return self.handled(event)
elif sig == farc.Signal._DIO_MODE_RDY:
return self.tran(self._scheduling)
elif sig == farc.Signal.EXIT:
self.tmout_evt.disarm()
return self.handled(event)
return self.super(self.top)
# Private
# The margin within which the Ahsm will transition to
# the action's state if there is an entry in the action queue;
# otherwise, transitions to the default state, listening or sleeping.
_TM_SOON = 0.040
# The amount of time it takes to get from the _lingering state
# through _scheduling and to the next action's state.
# This is used so we can set a timer to exit _lingering
# and make it to the deisred state before the designated time.
_TM_SVC_MARGIN = 0.020
# assert _TM_SVC_MARGIN < _TM_SOON
# Blocking times are used around the time.sleep() operation
# to obtain more accurate tx/rx execution times on Linux.
_TM_BLOCKING_MAX = 0.100
_TM_BLOCKING_MIN = 0.001
def _dio_isr_clbk(self, dio):
"""A callback given to the PHY for when a DIO pin event occurs.
The Rpi.GPIO's thread calls this procedure (like an interrupt).
This procedure posts an Event to this state machine
corresponding to the DIO pin that transitioned.
The pin edge's arrival time is the value of the Event.
"""
now = farc.Framework._event_loop.time()
self.post_fifo(farc.Event(self._dio_sig_lut[dio], now))
def _enqueue_action(self, tm, action_args):
"""Enqueues the action at the given time"""
IOTA = 0.000_000_1 # a small amount of time
# IMMEDIATELY means this frame jumps to the front of the line
# put it in the immediate queue (which is serviced before the tm_queue)
if tm == PhySX127xAhsm.TM_IMMEDIATE:
self._im_queue.append(action_args)
else:
# Ensure this tx time doesn't overwrite an existing one
# by adding an iota of time if there is a duplicate.
# This results in FIFO for frames scheduled at the same time.
tm_orig = tm
while tm in self._tm_queue:
tm += IOTA
# Protect against infinite while-loop
if tm == tm_orig:
IOTA *= 10.0
self._tm_queue[tm] = action_args
def _on_lora_rx_done(self,):
"""Reads received bytes and meta data from the radio.
Checks and logs any errors.
Passes the rx_data to the next layer higher via callback.
"""
frame_bytes, rssi, snr, flags = self.sx127x.read_lora_rxd()
if flags == 0:
# TODO: incr phy_data stats rx done
self._rx_clbk(self._rxd_hdr_time, frame_bytes, rssi, snr)
elif flags & self.sx127x.IRQ_FLAGS_RXTIMEOUT:
logging.info("PHY._rxing@RXTMOUT")
# TODO: incr phy_data stats rx tmout
elif flags & self.sx127x.IRQ_FLAGS_PAYLDCRCERROR:
logging.info("PHY._rxing@CRCERR")
# TODO: incr phy_data stats rx payld crc err
def _pop_soon_action(self,):
"""Returns the next (time, action) pair from the queue and removes it.
Returns None if the queue is empty.
"""
if self._im_queue:
tm = farc.Framework._event_loop.time()
action = self._im_queue.pop()
return (tm, action)
elif self._tm_queue:
tm = min(self._tm_queue.keys())
now = farc.Framework._event_loop.time()
if tm < now + PhySX127xAhsm._TM_SOON:
action = self._tm_queue[tm]
del self._tm_queue[tm]
return (tm, action)
return None
def _top_soon_action(self,):
"""Returns the next (time, action) pair from the queue without removal.
Returns None if the queue is empty.
"""
if self._im_queue:
tm = PhySX127xAhsm.TM_IMMEDIATE
action = self._im_queue[-1]
return (tm, action)
elif self._tm_queue:
tm = min(self._tm_queue.keys())
now = farc.Framework._event_loop.time()
if tm < now + PhySX127xAhsm._TM_SOON:
action = self._tm_queue[tm]
return (tm, action)
return None
| 2.671875
| 3
|
data_tools/__init__.py
|
erik-grabljevec/Tennis-Modelling
| 4
|
12775843
|
__author__ = 'riko'
from handle_data import *
| 1.078125
| 1
|
search_operation/icde/views.py
|
youyinnn/COEN6311_super
| 0
|
12775844
|
from common.http_response import json_response_builder as response
from common.jwt import get_user_id as get_id_from_request
from common.jwt import auth_require
from common.project_const import const
from icde.capture import icde_capture
from . import access as icde_access
@auth_require
@icde_capture(const.PAPER_SHARE)
def share_paper(request):
return response(0)
@icde_capture(const.PAPER_SEARCH)
def search_paper(request):
return response(0)
@icde_capture(const.PAPER_ORIGIN_CLICK)
def go_paper_origin(request):
return response(0)
@icde_capture(const.PAPER_DETAIL_CLICK)
def go_paper_detail_page(request):
return response(0)
def get_paper_share_count(request):
getParams = request.GET.dict()
paper_id = getParams.get('paper_id')
return response(0, body=icde_access.access_paper_share_count(paper_id))
@auth_require
def get_paper_team_share_records(request):
getParams = request.GET.dict()
user_id = get_id_from_request(request)
paper_id = getParams.get('paper_id')
return response(0, body=icde_access.access_paper_team_share_records(user_id, paper_id))
@auth_require
def get_user_activities(request):
user_id = get_id_from_request(request)
return response(0, body=icde_access.access_user_activities(user_id))
@auth_require
def get_team_member_activities(request):
getParams = request.GET.dict()
team_id = getParams.get('team_id')
return response(0, body=icde_access.access_team_member_activities(team_id))
def get_all_trending_list(request):
searh_term_trending = icde_access.access_search_term_trending_list()
click_rate_trending = icde_access.access_click_rate_trending_list()
like_trending = icde_access.access_like_trending_list()
dislike_trending = icde_access.access_dislike_trending_list()
share_trending = icde_access.access_share_trending_list()
return response(0, body={
'tranding_list': [
searh_term_trending,
click_rate_trending,
like_trending,
dislike_trending,
share_trending
]
})
| 2.25
| 2
|
build/android/xwalkcore_library_template/prepare_r_java.py
|
shawngao5/crosswalk
| 1
|
12775845
|
<filename>build/android/xwalkcore_library_template/prepare_r_java.py
#!/usr/bin/env python
#
# Copyright (c) 2013 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Copy the generated R.java to additional packages under gen.
Besides copying, the script will change the fixed value in
new created R.javas to references to the generated R.java.
The generated R.java looks like:
package app_package;
public final class R {
public static final class attr {
public static int value=0x70000001;
}
}
After modified, it will be:
package additional_package;
public final class R {
public static final class attr {
public static int value=app_package.R.attr.value;
}
}
"""
import optparse
import os
import re
import sys
# To match line "package app_package;".
RE_PACKAGE = re.compile('^package ([a-zA-Z_]+(?:|\.[a-zA-Z_]+)*);$')
# To match line " public static final class attr {".
RE_CLASS = re.compile('^(?:|[ ]*)public static final class ([a-zA-Z_]+) {$')
# To match line " public static int value=0x70000001;".
RE_VALUE = re.compile('^([ ]*)public static int ([a-zA-Z_]+)=(0x[0-9a-f]{8});$')
def PlaceRJavaInPackage(gen_path, app_package, target_package):
r_java = os.path.join(gen_path, app_package.replace('.', os.sep), 'R.java')
if not os.path.isfile(r_java):
print '%s does not exist' % r_java
sys.exit(1)
target_folder = os.path.join(gen_path, target_package.replace('.', os.sep))
if not os.path.isdir(target_folder):
os.makedirs(target_folder)
target_java_file = open(os.path.join(target_folder, 'R.java'), 'w')
current_class = None
got_package = False
for line in open(r_java, 'r').readlines():
if not got_package:
# Looking for package declaration.
match_package = RE_PACKAGE.match(line)
if match_package and match_package.groups()[0] == app_package:
got_package = True
target_java_file.write('package %s;\n' % target_package)
else:
target_java_file.write(line)
continue
# Trying to match class pattern first.
match_class = RE_CLASS.match(line)
if match_class:
current_class = match_class.groups()[0]
target_java_file.write(line)
continue
if current_class:
match_value = RE_VALUE.match(line)
if match_value:
target_java_file.write(
'%spublic static int %s=%s.R.%s.%s;\n' % (match_value.groups()[0],
match_value.groups()[1],
app_package,
current_class,
match_value.groups()[1]))
continue
target_java_file.write(line)
target_java_file.close()
def main():
option_parser = optparse.OptionParser()
option_parser.add_option('--app-package', default=None,
help='The package which provides R.java')
option_parser.add_option('--packages', default=None,
help='The additional packages which R.java to be placed in, '
'delimited by semicolon')
option_parser.add_option('--gen-path', default=None,
help='Path of the gen folder')
opts, _ = option_parser.parse_args()
if opts.packages == None or opts.packages.strip() == '':
return 0
if opts.gen_path == None:
print 'gen path not specified'
return 1
if opts.app_package == None:
print 'app package not specified'
return 1
for package in opts.packages.strip().split(';'):
PlaceRJavaInPackage(opts.gen_path, opts.app_package, package)
return 0
if '__main__' == __name__:
sys.exit(main())
| 2.25
| 2
|
trial_of_the_stones/the_trial_of_the_stones.py
|
ikostan/ElegantBrowserAutomationWithPythonAndSelenium
| 3
|
12775846
|
from trial_of_the_stones.models.page_model import PageModel
import selenium
import unittest
import time
def trial_of_the_stones_automation():
'''
Source web page: https://techstepacademy.com/trial-of-the-stones
:return:
'''
# open web page
page = PageModel(selenium.webdriver.Chrome())
page.open_page()
password = solve_riddle_of_stone(page)
solve_riddle_of_secrets(password, page)
solve_the_two_merchants(page)
final_check(page)
# close web page
time.sleep(2)
page.close()
def solve_riddle_of_stone(page):
# type answer and click on answer button
page.riddle_of_stone_field.send_keys('rock')
unittest.TestCase().assertFalse(page.password.is_displayed())
page.riddle_of_stone_button.click()
# verify is password displayed
unittest.TestCase().assertTrue(page.password.is_displayed())
password = page.password.text
unittest.TestCase().assertEqual('<PASSWORD>', password)
return password
def solve_riddle_of_secrets(password, page):
# type password and click on Answer button
page.password_field.send_keys(password)
unittest.TestCase().assertFalse(page.password_success.is_displayed())
page.password_answer_button.click()
unittest.TestCase().assertEqual('Success!', page.password_success.text)
unittest.TestCase().assertTrue(page.password_success.is_displayed())
def solve_the_two_merchants(page):
# compare wealth and type the richest merchant name
page.richest_merchant_field.send_keys(page.bernard_name
if int(page.bernard_wealth) > int(page.jessica_wealth)
else page.jessica_name)
unittest.TestCase().assertFalse(page.merchant_success.is_displayed())
page.richest_merchant_button.click()
unittest.TestCase().assertTrue(page.merchant_success.is_displayed())
unittest.TestCase().assertEqual('Success!', page.merchant_success.text)
def final_check(page):
# final check
unittest.TestCase().assertFalse(page.trial_complete.is_displayed())
page.check_answers_button.click()
unittest.TestCase().assertTrue(page.trial_complete.is_displayed())
unittest.TestCase().assertEqual('Trial Complete', page.trial_complete.text)
if __name__ == '__main__':
trial_of_the_stones_automation()
| 3.046875
| 3
|
data-structures/sorting/quicksort/quicksort-randomized.py
|
andrenbrandao/algorithm-problems
| 0
|
12775847
|
from random import randint
"""
For an array of size 10^6 the execution time of the randomized version was 10x faster.
I used an already sorted array, which is an example of a worst case scenario.
The algorithm by selection always the smallest element as the pivot makes n recursive calls
and because the partition step is O(n), it takes O(n^2) to execute.
Avg Quicksort: 47.597230195999146
Avg Quicksort Randomized: 4.145071268081665
"""
def quicksort_randomized(arr):
def swap(arr, j, i):
arr[i], arr[j] = arr[j], arr[i]
def partition(arr, left, right):
pivot = left
j = left
for i in range(left + 1, right + 1):
if arr[i] <= arr[pivot]:
j += 1
swap(arr, j, i)
new_pivot_pos = j
swap(arr, pivot, new_pivot_pos)
return new_pivot_pos
def random_partition(arr, left, right):
pivot = randint(left, right)
swap(arr, left, pivot)
return partition(arr, left, right)
def sort(arr, left, right):
if left < right:
m = random_partition(arr, left, right)
sort(arr, left, m - 1)
sort(arr, m + 1, right)
sort(arr, 0, len(arr) - 1)
if __name__ == "__main__":
arr = [int(i) for i in input().split()]
quicksort_randomized(arr)
print(arr)
| 4.09375
| 4
|
src/evaluation/krippendorffalpha.py
|
anbasile/arxiv2018-bayesian-ensembles
| 21
|
12775848
|
<reponame>anbasile/arxiv2018-bayesian-ensembles
'''
Created on 10 May 2016
@author: simpson
'''
import numpy as np
def alpha(U, C, L):
'''
U - units of analysis, i.e. the data points being labelled
C - a list of classification labels
L - a list of labeller IDs
'''
N = float(np.unique(U).shape[0])
Uids = np.unique(U)
Dobs = 0.0
Dexpec = 0.0
for i, u in enumerate(Uids):
uidxs = U==u
Lu = L[uidxs]
m_u = Lu.shape[0]
if m_u < 2:
continue
Cu = C[uidxs]
#for cuj in Cu:
# Dobs += 1.0 / (m_u - 1.0) * np.sum(np.abs(cuj - Cu))
Dobs += 1.0 / (m_u - 1.0) * np.sum(np.abs(Cu[:, np.newaxis] - Cu[np.newaxis, :]))
# too much memory required
# Dexpec = np.sum(np.abs(C.flatten()[:, np.newaxis] - C.flatten()[np.newaxis, :]))
for i in range(len(U)):
if np.sum(U==U[i]) < 2:
continue
Dexpec += np.sum(np.abs(C[i] - C)) # sum up all differences regardless of user and data unit
Dobs = 1 / N * Dobs
Dexpec = Dexpec / (N * (N-1))
alpha = 1 - Dobs / Dexpec
return alpha
| 2.296875
| 2
|
bioimageit_core/dataset.py
|
bioimageit/bioimageit_core
| 2
|
12775849
|
# -*- coding: utf-8 -*-
"""BioImagePy dataset metadata definitions.
This module contains classes that allows to describe the
metadata of scientific dataset
Classes
-------
DataSet
RawDataSet
ProcessedDataSet
"""
import re
from bioimageit_core.config import ConfigAccess
from bioimageit_core.data import RawData, ProcessedData
from bioimageit_core.metadata.run import Run
from bioimageit_core.metadata.factory import metadataServices
from bioimageit_core.metadata.query import query_list_single
class RawDataSet:
"""Class that store a dataset metadata for RawDataSet
Parameters
----------
md_uri
URI of the metadata in the database or file system
depending on backend
Attributes
----------
md_uri
List of the URIs of the data metadata
"""
def __init__(self, md_uri: str = ''):
self.md_uri = md_uri
self.metadata = None # DataSetContainer()
config = ConfigAccess.instance().config['metadata']
self.service = metadataServices.get(config["service"], **config)
self.read()
def read(self):
"""Read the metadata from database
The data base connection is managed by the configuration
object
"""
self.metadata = self.service.read_rawdataset(self.md_uri)
def write(self):
"""Write the metadata to database
The data base connection is managed by the configuration
object
"""
self.service.write_rawdataset(self.metadata, self.md_uri)
def size(self):
"""get the size of the dataser
Returns
-------
The number of data in the dataset
"""
return len(self.metadata.uris)
def get(self, i: int) -> RawData:
"""get one data information
Parameters
----------
i
Index of the data in the dataset
Returns
----------
RawData
The data common information
"""
return RawData(self.metadata.uris[i])
def to_search_containers(self):
"""Convert RawDataSet into a list of SearchContainer
Returns
-------
list
List of data as list of SearchContainer
"""
search_list = []
for i in range(self.size()):
data = RawData(self.metadata.uris[i])
search_list.append(data.to_search_container())
return search_list
def get_data(self, query: str) -> list:
"""query on tags
In this verion only AND queries are supported
(ex: tag1=value1 AND tag2=value2)
and performed on the RawData set
Parameters
----------
query
String query with the key=value format.
Returns
-------
list
List of selected data (md.json files urls are returned)
"""
queries = re.split(' AND ', query)
# initially all the raw data are selected
selected_list = self.to_search_containers()
if query == '':
return selected_list
# run all the AND queries on the preselected dataset
for q in queries:
selected_list = query_list_single(selected_list, q)
# convert SearchContainer list to uri list
out = []
for d in selected_list:
out.append(d.uri())
return out
def add_data(self, data: RawData):
"""Add one data to the dataset
Parameters
----------
data
data to add
"""
data.write()
self.metadata.uris.append(data.md_uri)
self.service.write_rawdataset(self.metadata, self.md_uri)
def get_data_list(self) -> list:
"""Get the metadata information as a list
Returns
-------
list
List of the data metadata stored in BiRawData objects
"""
data_list = []
for i in range(self.size()):
data_list.append(RawData(self.metadata.uris[i]))
return data_list
class ProcessedDataSet:
"""Class that store a dataset metadata for ProcessedDataSet
Parameters
----------
md_uri
URI of the metadata in the database or file system
depending on backend
Attributes
----------
md_uri
List of the URIs of the data metadata
"""
def __init__(self, md_uri: str = ''):
self.md_uri = md_uri
self.metadata = None # DataSetContainer()
config = ConfigAccess.instance().config['metadata']
self.service = metadataServices.get(config["service"], **config)
self.read()
def read(self):
"""Read the metadata from database
The data base connection is managed by the configuration
object
"""
self.metadata = self.service.read_processeddataset(self.md_uri)
def write(self):
"""Write the metadata to database
The data base connection is managed by the configuration
object
"""
self.service.write_processeddataset(self.metadata, self.md_uri)
def add_run(self, run: Run):
"""Add Run to the dataset
The input Run URI is created by this method
Parameters
----------
run
Run to add
"""
run.md_uri = self.service.add_run_processeddataset(run.metadata,
self.md_uri)
def create_data(self, data: ProcessedData):
"""create a new data metadata in the dataset
The input data object must contain only the metadata (ie no
uri and no md_uri).
This method generate the uri and the md_uri and save all the
metadata
Parameters
----------
data
metadata of the processed data to create
"""
self.service.create_data_processeddataset(data.metadata, self.md_uri)
def size(self):
"""get the size of the dataser
Returns
-------
The number of data in the dataset
"""
return len(self.metadata.uris)
def get(self, i: int) -> ProcessedData:
"""get one data information
Parameters
----------
i
Index of the data in the dataset
Returns
----------
RawData
The data common information
"""
return ProcessedData(self.metadata.uris[i])
def to_search_containers(self):
"""Convert RawDataSet into a list of SearchContainer
Returns
-------
list
List of data as list of SearchContainer
"""
search_list = []
for i in range(self.size()):
data = ProcessedData(self.metadata.uris[i])
search_list.append(data.to_search_container())
return search_list
def get_data(self, query: str, origin_output_name: str = '') -> list:
"""Run a query on a BiProcessedDataSet
Parameters
----------
query
Query on tags (ex: 'Population'='population1')
origin_output_name
Filter only the process output with the given name
if origin_output_name is empty, it gets all the processed
data
Returns
-------
list
List of the data URIs
"""
# get all the tags per data
pre_list = self.to_search_containers()
# remove the data where output origin is not the asked one
selected_list = []
if origin_output_name != '':
for pdata in pre_list:
data = ProcessedData(pdata.uri())
if data.metadata.output["name"] == origin_output_name:
selected_list.append(pdata)
else:
selected_list = pre_list
if query == '':
return selected_list
# query on tags
queries = re.split(' AND ', query)
# run all the AND queries on the preselected dataset
for q in queries:
selected_list = query_list_single(selected_list, q)
# convert SearchContainer list to uri list
out = []
for d in selected_list:
out.append(d.uri())
return out
def add_data(self, data: ProcessedData):
"""Add one data to the dataset
Parameters
----------
data
data to add
"""
data.write()
self.metadata.uris.append(data.md_uri)
self.service.write_processeddataset(self.metadata, self.md_uri)
def get_data_list(self) -> list:
"""Get the metadata information as a list
Returns
-------
list
List of the data metadata stored in BiRawData objects
"""
data_list = []
for i in range(self.size()):
data_list.append(ProcessedData(self.metadata.uris[i]))
return data_list
| 2.640625
| 3
|
Handout/09.Strings e Fatiamento/02ex.py
|
pedroivoal/Dessoft
| 0
|
12775850
|
def conta_a(palavra):
c = 0
for letra in palavra:
if letra == 'a':
c += 1
return c
s = 'Insper'
r = s[::-2]
print(r)
| 3.5625
| 4
|