hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
36a9b87e26910ffc305f742982f6fa1a3f99417f | 5,673 | py | Python | tests/cli_test.py | de-code/layered-vision | 5cb34ed2fb787fb1e3a8dd7ee7f4f932fe81c038 | [
"MIT"
] | 5 | 2021-01-03T11:38:40.000Z | 2021-08-31T19:33:02.000Z | tests/cli_test.py | de-code/layered-vision | 5cb34ed2fb787fb1e3a8dd7ee7f4f932fe81c038 | [
"MIT"
] | 41 | 2020-12-01T06:59:24.000Z | 2022-03-31T13:23:23.000Z | tests/cli_test.py | de-code/layered-vision | 5cb34ed2fb787fb1e3a8dd7ee7f4f932fe81c038 | [
"MIT"
] | 2 | 2021-02-02T07:48:34.000Z | 2021-05-29T21:19:34.000Z | from pathlib import Path
from typing import Union
import cv2
import pytest
from layered_vision.cli import (
parse_value_expression,
parse_set_value,
get_merged_set_values,
main
)
EXAMPLE_IMAGE_URL = (
r'https://raw.githubusercontent.com/numpy/numpy'
r'/v1.20.1/branding/logo/logomark/numpylogoicon.png'
)
| 29.701571 | 83 | 0.559845 |
36aa46d45cf3ea3334546c1c84c23f98e38d06f3 | 5,958 | py | Python | src/discordbot/writeToken.py | mavjav-edu/discordpy | c3da0903bd7772d089536f935a381b301efb8fd5 | [
"MIT"
] | 1 | 2020-06-22T01:15:49.000Z | 2020-06-22T01:15:49.000Z | src/discordbot/writeToken.py | mavjav-edu/discordpy | c3da0903bd7772d089536f935a381b301efb8fd5 | [
"MIT"
] | 2 | 2020-09-24T20:34:37.000Z | 2021-06-25T15:38:45.000Z | src/discordbot/writeToken.py | mavjav-edu/discordpy | c3da0903bd7772d089536f935a381b301efb8fd5 | [
"MIT"
] | null | null | null | import os
import re
import base64
import keyring
from cryptography.fernet import Fernet
# Make sure the key, Fernet objects within scope of future dependencies
# by setting to here (to nothing, for now)
frn = Fernet(base64.b64encode(bytes(list(range(32)))))
key = bytes(0)
if os.path.isfile('./key'): # Check the 'key' file already exists
# attempts to open a 'key' file where we store the Fernet key
# (`rb` means `Read Binary` from file)
keyf = open("key", 'rb')
key = keyf.read()
keyf.close() # close the key file
else:
# This is for when the 'key' file doesn't exist or was deleted
print("Key did not exist. Creating...")
# attempts to create/open a 'key' file where we store the
# Fernet key (wb+ means Write Binary to file with additional
# read privileges)
keyf = open("key", 'wb+')
# generates a Fernet key and saves that key to the 'key' file
key = Fernet.generate_key()
keyf.write(key)
keyf.close() # close the key file
# create Fernet object to do encryption using our key from above
frn = Fernet(key)
print("[1] Store bot token in key ring", "[2] Store bot token to disk")
question = "Should we keep token in keyring or store to disk? [1-2]>\n"
howToStoreToken = int(input(question))
correctToken = False
while not(howToStoreToken == 1 or howToStoreToken == 2):
howToStoreToken = int(input(question)) # Keep asking for a 1 or 2
# basic regex pattern checks on presumed token
correctToken = False
while not(correctToken):
token = input("What's the bot token? > ")
clientSecretPat = re.compile("^.{32}$")
clientIDPat = re.compile("^\d{18}$")
tokenPat = re.compile("^.{59}$")
wrong = "The string you've entered looks like the %s.\
Are you sure you copied the correct field?"
if tokenPat.match(token):
print("Token pattern matches! ")
correctToken = True
continue
elif clientSecretPat.match(token):
print(wrong % "client secret")
continue
elif clientIDPat.match(token):
print(wrong % "client ID")
continue
if howToStoreToken == 1:
# ask the user for the Discord token, then writes the token as password
# into the keyring with the Fernet key as the username
keyring.set_password("system", key, (
input("What is the secret?> ")))
if not keyring.get_password("system", key.decode('utf-8')) is None:
print("Your token has been stored in the file system keyring!")
else:
print("Could not store token in the file system keyring!")
elif howToStoreToken == 2:
tokenFilename = input("What should be the token filename?> ")
while(os.path.isfile('./' + tokenFilename)):
print(tokenFilename, "already exists.\nChoose another name.")
tokenFilename = input("What should be the token filename?> ")
try:
# try-finally block for error `IOError` on opening token file for
# writing binary
# attempt to create/open a 'tokenFilename' file where we store the
# encrypted Discord token (we don't need to read 'tokenFilename'
# here, so we only need write privileges)
tokenf = open(tokenFilename, 'wb')
# ask the user for the Discord token, then encodes as binary, then
# encrypts the binary, and then writes binary to 'token' file
tokenf.write(frn.encrypt(str.encode(token)))
except PermissionError as error:
print(error, "\nCould not write token file. Check permissions.")
finally:
tokenf.close() # close the file `token`
if(os.path.isfile('./' + tokenFilename)):
print("Your token has been stored in a file!")
# read and write `.gitignore` to make sure we don't accidentally upload
# key or token
try:
# open `.gitignore` as a read only
gitignoref = open(".gitignore", 'r')
# store the content of `.gitignore`
gitignore = gitignoref.read()
# open `.gitignore` append mode (write starting at the end of the file)
gitignoref = open(".gitignore", 'a')
# regular expression pattern matching the word 'key' anywhere
keyRE = re.compile("key", re.MULTILINE)
# if the word 'key' is not found in the content of `.gitignore`
if(re.search(keyRE, gitignore) is None):
# then add 'key' to the next line in `.gitignore`
gitignoref.write("\nkey")
# if the word 'token' is not found in the content of `.gitignore`
if(howToStoreToken == "2"):
# regular expression pattern matching the word 'token' anywhere
tokenRE = re.compile(tokenFilename, re.MULTILINE)
if(re.search(tokenRE, gitignore) is None):
# then add 'key' to the next line in `.gitignore`
gitignoref.write("\n" + tokenFilename)
except PermissionError as error:
print(error, "\nCould not write gitignore file. Check permissions.")
finally:
# Below code will run in any event (whether there is an error or not)
gitignoref.close() # close the file `.gitignore`
# Change the mod-logs channel in `discordbot.py`
question = "What is your `mod-logs` channel ID?"
modlogsID = input(question)
channelIDRe = '\d{18}'
channelIDPat = re.compile("^" + channelIDRe + "$")
while not(channelIDPat.match(modlogsID)):
print("Input ID incorrect. See https://bit.ly/31q1Qlh for instructions.")
modlogsID = input(question)
if os.path.isfile("discordbot.py"):
discordbotf = open("discordbot.py", 'r')
discordbot = discordbotf.readlines()
discordbotf.close()
modlogsReComp = re.compile("(\s+modlogs = )(" + channelIDRe + ")(.*)")
for lineNum in range(len(discordbot)):
print(lineNum)
if re.search(modlogsReComp, discordbot[lineNum]):
discordbot[lineNum] = re.sub(
modlogsReComp, r"\1 012345678901234567 \3", discordbot[lineNum]
)
break
discordbotf = open("discordbot.py", 'w')
discordbotf.writelines(discordbot)
discordbotf.close()
| 36.777778 | 79 | 0.659785 |
36abaa99b236edf5ae7e28366041af627d5c697a | 38,462 | py | Python | Deprecated/three_stmts.py | FrankVolpe/SIMFIN | 63631d8cc7a7f19570b21aa1f7c49995fa0765d7 | [
"BSD-3-Clause"
] | 1 | 2019-07-29T04:35:25.000Z | 2019-07-29T04:35:25.000Z | Deprecated/three_stmts.py | FrankVolpe/SIMFIN | 63631d8cc7a7f19570b21aa1f7c49995fa0765d7 | [
"BSD-3-Clause"
] | null | null | null | Deprecated/three_stmts.py | FrankVolpe/SIMFIN | 63631d8cc7a7f19570b21aa1f7c49995fa0765d7 | [
"BSD-3-Clause"
] | 1 | 2020-12-23T23:26:17.000Z | 2020-12-23T23:26:17.000Z | from base_classes import *
| 49.500644 | 89 | 0.371094 |
36abdd1471f5a742fa98e77ecb26e8e8f6f70696 | 4,127 | py | Python | agents/ag_useHisExplorDecayedP.py | a-pedram/kaggle-mab | 5d9d6d47541f6b71a5a886146928aa57a5c77591 | [
"MIT"
] | null | null | null | agents/ag_useHisExplorDecayedP.py | a-pedram/kaggle-mab | 5d9d6d47541f6b71a5a886146928aa57a5c77591 | [
"MIT"
] | null | null | null | agents/ag_useHisExplorDecayedP.py | a-pedram/kaggle-mab | 5d9d6d47541f6b71a5a886146928aa57a5c77591 | [
"MIT"
] | null | null | null | import numpy as np
from collections import Counter
decay_rate = 0.97
n_rounds = 2000
bandit_count = 100
total_reward = None
last_bandit = None
last_reward = None
his_hits = None
his_record = None
my_record = None
my_hits = None
wins = None
losses = None
bandits_record = None
record_index = None
x1 = None
x2 = None
sp = np.linspace(0,1,1000)
spp = 1 - sp
myProbabs = np.random.rand(bandit_count)* 0.001 + 0.5
n_lookback = 4 | 35.886957 | 121 | 0.642598 |
36ac48e2ab27df3c0677dca73c8d8951f0e9ae52 | 1,156 | py | Python | principles-of-computing/Practice Exercises/Solitaire Mancala/Solitaire Mancala/poc_simpletest.py | kingwatam/misc-python | 8a10f14eb79b9d93bbe889175fe5ab532da73c70 | [
"MIT"
] | 1 | 2019-09-03T03:47:39.000Z | 2019-09-03T03:47:39.000Z | principles-of-computing/Practice Exercises/Solitaire Mancala/Solitaire Mancala/poc_simpletest.py | kingwatam/misc-python | 8a10f14eb79b9d93bbe889175fe5ab532da73c70 | [
"MIT"
] | null | null | null | principles-of-computing/Practice Exercises/Solitaire Mancala/Solitaire Mancala/poc_simpletest.py | kingwatam/misc-python | 8a10f14eb79b9d93bbe889175fe5ab532da73c70 | [
"MIT"
] | null | null | null | """
Lightweight testing class inspired by unittest from Pyunit
https://docs.python.org/2/library/unittest.html
Note that code is designed to be much simpler than unittest
and does NOT replicate uinittest functionality
"""
| 31.243243 | 62 | 0.554498 |
36ad7dd9946b30b8edbad769fd9fe67f2dcb1c2d | 2,014 | py | Python | jwc_core/jwc_sender.py | Inetgeek/Notice-Pusher | 052e4ecbf7520ae93e16af6ae89f560d6a6d888a | [
"MIT"
] | 2 | 2021-09-16T04:19:52.000Z | 2022-03-28T03:48:29.000Z | jwc_core/jwc_sender.py | Inetgeek/Notice-Pusher | 052e4ecbf7520ae93e16af6ae89f560d6a6d888a | [
"MIT"
] | null | null | null | jwc_core/jwc_sender.py | Inetgeek/Notice-Pusher | 052e4ecbf7520ae93e16af6ae89f560d6a6d888a | [
"MIT"
] | 1 | 2021-09-16T04:21:08.000Z | 2021-09-16T04:21:08.000Z | #!/usr/bin/python3
# coding: utf-8
import sys
import os, time, datetime
import smtplib
from email import (header)
from email.mime import (text, multipart)
with open(r'/home/jwc_notice.txt', "r+", encoding="utf-8") as file: #
a = file.read()
send_title = ""
send_head = '<p style="color:#507383"></p>'
send_content = '<p style="font-size:34px;color:#ca1b0f;"><span style="border-bottom: 1px dashed #ccc; z-index: 1; position: static;"></span></p>'+'<hr><p style="color:#FC5531">:<p>\n\n'+a
Nowtime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
# @scheduler.scheduled_job('cron', hour = 22,minute = 50)
if __name__ == '__main__':
_init_()
| 36.618182 | 205 | 0.600298 |
36adec35d9afaf6063824fc88cc7373dff86a943 | 6,328 | py | Python | distributed_systems/ftp/frontend.py | JRhodes95/net-sys-cw | 926ea3b133416c4a6f8065be5caa34a5e5b49878 | [
"MIT"
] | null | null | null | distributed_systems/ftp/frontend.py | JRhodes95/net-sys-cw | 926ea3b133416c4a6f8065be5caa34a5e5b49878 | [
"MIT"
] | null | null | null | distributed_systems/ftp/frontend.py | JRhodes95/net-sys-cw | 926ea3b133416c4a6f8065be5caa34a5e5b49878 | [
"MIT"
] | null | null | null | import os
os.environ["PYRO_LOGFILE"] = "pyro.log"
os.environ["PYRO_LOGLEVEL"] = "DEBUG"
import Pyro4
import Pyro4.util
import Pyro4.naming
import sys
import pprint
"""
Front end controller for the 2017/18 Networks and Distributed Systems
Summative Assignment.
Author: Z0954757
"""
sys.excepthook = Pyro4.util.excepthook
pp = pprint.PrettyPrinter()
def main():
"""Main function to initiate the front end, expose it to the network, and
find any servers.
"""
frontend = FrontEnd()
frontend.find_servers()
with Pyro4.Daemon() as daemon:
frontend_uri = daemon.register(frontend)
with Pyro4.locateNS() as ns:
ns.register("filesystem.frontend", frontend_uri)
print("Frontend available.")
daemon.requestLoop()
if __name__ == "__main__":
main()
| 34.769231 | 109 | 0.62658 |
36ae9333eaabebfa5f7eb2cc25d299b4c6d41d73 | 4,501 | py | Python | tests/test_hexamer/test_search_hexamer.py | zyxue/kleat3 | 861b02797937eea51e99f9c29d195fb3e7dea376 | [
"MIT"
] | null | null | null | tests/test_hexamer/test_search_hexamer.py | zyxue/kleat3 | 861b02797937eea51e99f9c29d195fb3e7dea376 | [
"MIT"
] | null | null | null | tests/test_hexamer/test_search_hexamer.py | zyxue/kleat3 | 861b02797937eea51e99f9c29d195fb3e7dea376 | [
"MIT"
] | null | null | null | import unittest
from kleat.hexamer.search import plus_search, minus_search, search
from kleat.hexamer.hexamer import extract_seq
# Good drawing example, utilize them later
# def test_extract_seq_where_for_plus_strand_clv_supported_by_suffix():
# """
# AATAAA AA <-tail of suffix contig
# ACGG||||CGGCC <-suffix contig
# 0123456789012345 <-contig coord
# 1 |
# ...7890123456789012... <-genome coord
# 1 2|
# ^ref_clv
# """
# clv = 11
# strand = '+'
# contig = MagicMock()
# contig.query_sequence = 'ACGGAATAAACGGCCAA'
# contig.cigartuples = ((S.BAM_CMATCH, 15), (S.BAM_CSOFT_CLIP, 2))
# ref_fa = MagicMock()
# assert extract_seq(contig, strand, clv, ref_fa) == 'ACGGAATAAACGGCC'
# def test_extract_seq_where_for_minus_strand_clv_supported_by_suffix():
# """
# TTT TTTATT <-tail of suffix contig
# AC||||CGGC <-suffix contig
# 012345678901 <-contig coord
# | 1
# ...890123456789... <-genome coord
# | 1
# ^ref_clv
# """
# clv = 11
# strand = '+'
# contig = MagicMock()
# contig.query_sequence = 'TTACTTTATTCGC'
# contig.cigartuples = ((S.BAM_CMATCH, 15), (S.BAM_CSOFT_CLIP, 2))
# ref_fa = MagicMock()
# assert extract_seq(contig, strand, clv, ref_fa) == 'ACTTTATTCGC'
| 36.008 | 91 | 0.608309 |
36afd304529f60846fb23519859a8bcc5c007db7 | 3,825 | py | Python | _mod_Community/LineDrawer/Lines_Callbacks.py | tianlunjiang/_NukeStudio_v2 | 5ed9b9217aff16d903bdcda5c2f1e1cd3bebe367 | [
"CNRI-Python"
] | 6 | 2019-08-27T01:30:15.000Z | 2020-11-17T00:40:01.000Z | _mod_Community/LineDrawer/Lines_Callbacks.py | tianlunjiang/_NukeMods | 47861bfc273262abba55b9f9a61782a5d89479b1 | [
"CNRI-Python"
] | 2 | 2019-01-22T04:09:28.000Z | 2019-01-23T15:11:39.000Z | _mod_Community/LineDrawer/Lines_Callbacks.py | tianlunjiang/_NukeMods | 47861bfc273262abba55b9f9a61782a5d89479b1 | [
"CNRI-Python"
] | 1 | 2020-08-03T22:43:23.000Z | 2020-08-03T22:43:23.000Z | import nuke
| 30.11811 | 136 | 0.614641 |
36b0624ad538450600494ca8e1dbfc5af431fa64 | 907 | py | Python | 臺灣言語平臺/management/commands/加sheet的json.py | sih4sing5hong5/tai5-uan5_gian5-gi2_phing5-tai5 | 26f93e06176d8637556938d635a2e101ae7eb1ab | [
"MIT"
] | 14 | 2016-03-22T15:02:06.000Z | 2018-10-10T02:08:25.000Z | 臺灣言語平臺/management/commands/加sheet的json.py | sih4sing5hong5/tai5-uan5_gian5-gi2_gi2-liau7_phing5-thai5 | 26f93e06176d8637556938d635a2e101ae7eb1ab | [
"MIT"
] | 160 | 2015-10-15T10:34:39.000Z | 2019-03-03T11:54:02.000Z | 臺灣言語平臺/management/commands/加sheet的json.py | sih4sing5hong5/tai5-uan5_gian5-gi2_gi2-liau7_phing5-thai5 | 26f93e06176d8637556938d635a2e101ae7eb1ab | [
"MIT"
] | 5 | 2016-06-19T13:41:08.000Z | 2020-12-15T06:58:06.000Z | import json
from django.core.management.base import BaseCommand
from . import sheet
from django.conf import settings
| 23.868421 | 74 | 0.55237 |
36b163e4e896ecc59896f84da3ea4de1f6c5f0dd | 6,851 | py | Python | flask_googlelogin.py | leakim34/flask-googlelogin | 67346d232414fdba7283f516cb7540d41134d175 | [
"MIT"
] | 35 | 2015-01-28T16:13:55.000Z | 2022-02-12T20:53:32.000Z | flask_googlelogin.py | fnokeke/flask-googlelogin | 67346d232414fdba7283f516cb7540d41134d175 | [
"MIT"
] | 4 | 2015-08-14T13:33:47.000Z | 2018-12-04T10:33:17.000Z | flask_googlelogin.py | fnokeke/flask-googlelogin | 67346d232414fdba7283f516cb7540d41134d175 | [
"MIT"
] | 29 | 2015-01-28T10:23:47.000Z | 2022-02-12T20:53:34.000Z | """
Flask-GoogleLogin
"""
from base64 import (urlsafe_b64encode as b64encode,
urlsafe_b64decode as b64decode)
from urllib import urlencode
from urlparse import parse_qsl
from functools import wraps
from flask import request, redirect, abort, current_app, url_for
from flask_login import LoginManager, make_secure_token
import requests
GOOGLE_OAUTH2_AUTH_URL = 'https://accounts.google.com/o/oauth2/auth'
GOOGLE_OAUTH2_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token'
GOOGLE_OAUTH2_USERINFO_URL = 'https://www.googleapis.com/oauth2/v2/userinfo'
USERINFO_EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email'
USERINFO_PROFILE_SCOPE = 'https://www.googleapis.com/auth/userinfo.profile'
def login_url(self, params=None, **kwargs):
"""
Return login url with params encoded in state
Available Google auth server params:
response_type: code, token
prompt: none, select_account, consent
approval_prompt: force, auto
access_type: online, offline
scopes: string (separated with commas) or list
redirect_uri: string
login_hint: string
"""
kwargs.setdefault('response_type', 'code')
kwargs.setdefault('access_type', 'online')
if 'prompt' not in kwargs:
kwargs.setdefault('approval_prompt', 'auto')
scopes = kwargs.pop('scopes', self.scopes.split(','))
if USERINFO_PROFILE_SCOPE not in scopes:
scopes.append(USERINFO_PROFILE_SCOPE)
redirect_uri = kwargs.pop('redirect_uri', self.redirect_uri)
state = self.sign_params(params or {})
return GOOGLE_OAUTH2_AUTH_URL + '?' + urlencode(
dict(client_id=self.client_id,
scope=' '.join(scopes),
redirect_uri=redirect_uri,
state=state,
**kwargs))
def unauthorized_callback(self):
"""
Redirect to login url with next param set as request.url
"""
return redirect(self.login_url(params=dict(next=request.url)))
def exchange_code(self, code, redirect_uri):
"""
Exchanges code for token/s
"""
token = requests.post(GOOGLE_OAUTH2_TOKEN_URL, data=dict(
code=code,
redirect_uri=redirect_uri,
grant_type='authorization_code',
client_id=self.client_id,
client_secret=self.client_secret,
)).json()
if not token or token.get('error'):
abort(400)
return token
def get_access_token(self, refresh_token):
"""
Use a refresh token to obtain a new access token
"""
token = requests.post(GOOGLE_OAUTH2_TOKEN_URL, data=dict(
refresh_token=refresh_token,
grant_type='refresh_token',
client_id=self.client_id,
client_secret=self.client_secret,
)).json()
if not token or token.get('error'):
return
return token
def oauth2callback(self, view_func):
"""
Decorator for OAuth2 callback. Calls `GoogleLogin.login` then
passes results to `view_func`.
"""
return decorated
def user_loader(self, func):
"""
Shortcut for `login_manager`'s `flask_login.LoginManager.user_loader`
"""
self.login_manager.user_loader(func)
| 31 | 77 | 0.591301 |
36b22bde35972eb29f3533959fb0afa7c884f64c | 11,729 | py | Python | datajoint_utilities/dj_search/search.py | iamamutt/datajoint-utilities | e5c87cf968d4a50f6819fd6ab743f264641947cc | [
"MIT"
] | 1 | 2022-02-03T18:19:50.000Z | 2022-02-03T18:19:50.000Z | datajoint_utilities/dj_search/search.py | iamamutt/datajoint-utilities | e5c87cf968d4a50f6819fd6ab743f264641947cc | [
"MIT"
] | 4 | 2021-12-07T01:42:24.000Z | 2022-02-21T17:36:56.000Z | datajoint_utilities/dj_search/search.py | iamamutt/datajoint-utilities | e5c87cf968d4a50f6819fd6ab743f264641947cc | [
"MIT"
] | 2 | 2021-11-08T14:47:41.000Z | 2022-01-20T19:44:32.000Z | import datajoint as dj
import re
import inspect
from termcolor import colored
| 50.339056 | 103 | 0.499446 |
36b29aea512c076457ac717e34101b418e8451d8 | 1,838 | py | Python | acvrct.py | lyzcoote/VRChat-Py-Launcher | 307fc26d2a464ba97e93293f757e7fdaa29098de | [
"MIT"
] | null | null | null | acvrct.py | lyzcoote/VRChat-Py-Launcher | 307fc26d2a464ba97e93293f757e7fdaa29098de | [
"MIT"
] | null | null | null | acvrct.py | lyzcoote/VRChat-Py-Launcher | 307fc26d2a464ba97e93293f757e7fdaa29098de | [
"MIT"
] | null | null | null | ################################################################################
# #
# Modules #
# #
################################################################################
import otherUtils as otherUtils
import sys
import os
################################################################################
# #
# Functions #
# #
################################################################################
def override_where():
""" overrides certifi.core.where to return actual location of cacert.pem"""
return os.path.abspath("cacert.pem")
if hasattr(sys, "frozen"):
import certifi.core
os.environ["REQUESTS_CA_BUNDLE"] = override_where()
certifi.core.where = override_where
import requests.utils
import requests.adapters
requests.utils.DEFAULT_CA_BUNDLE_PATH = override_where()
requests.adapters.DEFAULT_CA_BUNDLE_PATH = override_where()
################################################################################
# #
# Main App #
# #
################################################################################
if __name__ == "__main__":
sys.tracebacklimit = 0
otherUtils.launcherMenu()
| 40.844444 | 80 | 0.259521 |
36b2e2e2a3deb4780a06fa5d022548e328c4c7a6 | 16,002 | py | Python | keepercommander/vault.py | Keeper-Security/commander | 93fee5d2ba56f2288e00ab33003597d00a302b5c | [
"MIT"
] | null | null | null | keepercommander/vault.py | Keeper-Security/commander | 93fee5d2ba56f2288e00ab33003597d00a302b5c | [
"MIT"
] | null | null | null | keepercommander/vault.py | Keeper-Security/commander | 93fee5d2ba56f2288e00ab33003597d00a302b5c | [
"MIT"
] | null | null | null | # _ __
# | |/ /___ ___ _ __ ___ _ _
# | ' </ -_) -_) '_ \/ -_) '_|
# |_|\_\___\___| .__/\___|_|
# |_|
#
# Keeper Commander
# Contact: ops@keepersecurity.com
#
import abc
import datetime
import json
from typing import Optional, List, Tuple, Iterable, Type, Union, Dict, Any
import itertools
from .params import KeeperParams
from . import record_types
| 37.213953 | 118 | 0.541057 |
36b37042d59ea92a31e186729793c7340e94c845 | 773 | py | Python | tests/test_quaternionic.py | mhostetter/quaternionic | 159ba7caa41afe33172373ebe4119209577812c1 | [
"MIT"
] | 40 | 2020-08-11T22:13:49.000Z | 2022-03-25T08:10:19.000Z | tests/test_quaternionic.py | mhostetter/quaternionic | 159ba7caa41afe33172373ebe4119209577812c1 | [
"MIT"
] | 28 | 2020-08-27T17:09:34.000Z | 2022-03-27T19:56:57.000Z | tests/test_quaternionic.py | mhostetter/quaternionic | 159ba7caa41afe33172373ebe4119209577812c1 | [
"MIT"
] | 6 | 2020-09-05T13:12:51.000Z | 2022-03-10T02:59:05.000Z | import warnings
import numpy as np
import quaternionic
import pytest
| 38.65 | 69 | 0.659767 |
36b4217be63fc502a7a8b608b61caf14733e4c6e | 1,477 | py | Python | carla_ros_bridge/src/carla_ros_bridge/coordinate_converter.py | OlafOrangi/ros-bridge | 732d5f99e5e1f4d0ea7e4873ccc34f0a40f1203c | [
"MIT"
] | null | null | null | carla_ros_bridge/src/carla_ros_bridge/coordinate_converter.py | OlafOrangi/ros-bridge | 732d5f99e5e1f4d0ea7e4873ccc34f0a40f1203c | [
"MIT"
] | null | null | null | carla_ros_bridge/src/carla_ros_bridge/coordinate_converter.py | OlafOrangi/ros-bridge | 732d5f99e5e1f4d0ea7e4873ccc34f0a40f1203c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from geometry_msgs.msg import Pose, Point, Quaternion, Vector3
import numpy as np
import tf
def convert_pose(pose):
"""
convert pose between left and right-hand coordinate system
:param pose: pose to be converted
:return: converted pose
"""
data = Pose()
data.position = convert_vector3(pose.position)
data.orientation = convert_quaternion(pose.orientation)
return data
def convert_vector3(pt):
"""
convert vector3 between left and right-hand coordinate system
:param pt: point to be converted
:return: converted point
"""
return Vector3(pt.x, -pt.y, pt.z)
def convert_point(pt):
"""
convert point between left and right-hand coordinate system
:param pt: point to be converted
:return: converted point
"""
return Point(pt.x, -pt.y, pt.z)
def convert_quaternion(q):
"""
convert quaternion between left and right-hand coordinate system
:param q: quaternion to be converted
:return: converted quaternion
"""
euler = tf.transformations.euler_from_quaternion([q.x, q.y, q.z, q.w])
euler = (euler[0], euler[1], -euler[2])
return Quaternion(*tf.transformations.quaternion_from_euler(*euler))
def convert_euler(euler):
"""
convert euler angles between left and right-hand coordinate system
:param euler: euler angles to be converted
:return: converted euler angles
"""
return Vector3(euler.x, euler.y, -euler.z)
| 26.375 | 74 | 0.688558 |
36b50824ddb6f2e96f0d94699793a7e9265c44f3 | 518 | py | Python | models/IFR_generalized_SB.py | rileymcmorrow/C-SFRAT | c696942940118172dfb2c3b8cc27b8d2fd5a5a17 | [
"MIT"
] | null | null | null | models/IFR_generalized_SB.py | rileymcmorrow/C-SFRAT | c696942940118172dfb2c3b8cc27b8d2fd5a5a17 | [
"MIT"
] | 3 | 2021-03-09T16:13:59.000Z | 2021-09-20T16:50:07.000Z | models/IFR_generalized_SB.py | rileymcmorrow/C-SFRAT | c696942940118172dfb2c3b8cc27b8d2fd5a5a17 | [
"MIT"
] | 4 | 2021-07-20T18:01:12.000Z | 2021-11-22T10:13:35.000Z | from core.model import Model
| 24.666667 | 50 | 0.525097 |
36b666e75f8d2123fc2f466527229d2f55e94174 | 1,263 | py | Python | TrendTrading/ProbModel/CheckScripts/updated market indicator.py | benjabee10/WKUResearch | 5cc384c0e0c1afc82c38a9e6eb63b80c85af7c97 | [
"MIT"
] | null | null | null | TrendTrading/ProbModel/CheckScripts/updated market indicator.py | benjabee10/WKUResearch | 5cc384c0e0c1afc82c38a9e6eb63b80c85af7c97 | [
"MIT"
] | null | null | null | TrendTrading/ProbModel/CheckScripts/updated market indicator.py | benjabee10/WKUResearch | 5cc384c0e0c1afc82c38a9e6eb63b80c85af7c97 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import talib
big= 200
small= 50
threshold=0.02
#context.market (shortperiod, longperiod):
#Market Values= 0-negative, 1-no trend, 2-positive
| 25.26 | 61 | 0.639747 |
36b8262c6d34969be77ba59f989410637bf778e2 | 6,097 | py | Python | google_drive_online_decompression.py | xunyixiangchao/Google-Drive-Online-Decompression | 02121e3c25ad0ef3ceb0652a4a4e16f803e8463a | [
"Apache-2.0"
] | null | null | null | google_drive_online_decompression.py | xunyixiangchao/Google-Drive-Online-Decompression | 02121e3c25ad0ef3ceb0652a4a4e16f803e8463a | [
"Apache-2.0"
] | null | null | null | google_drive_online_decompression.py | xunyixiangchao/Google-Drive-Online-Decompression | 02121e3c25ad0ef3ceb0652a4a4e16f803e8463a | [
"Apache-2.0"
] | 1 | 2021-06-04T16:08:35.000Z | 2021-06-04T16:08:35.000Z | # -*- coding: utf-8 -*-
"""Google_Drive_Online_Decompression.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/16e0tv3LEkAFaYHmKH2H63Cg6rpCNWFky
# ** GoogleDrive**
"""
#@markdown GoogleDrive
from google.colab import drive
drive.mount('/content/drive')
"""# **RAR**
# RAR
"""
#@markdown RAR
#@markdown <font size="4" color=red><b>destination</b></font> RAR.rar
destination = "" #@param {type:"string"}
!unrar v "$destination"
"""# RAR"""
#@markdown RAR
#@markdown <font size="4" color=red><b>destination</b></font> .rar
destination = "" #@param {type:"string"}
!unrar v "$destination/*.rar"
"""## RAR ********"""
#@markdown RAR
#@markdown <font size="4" color=red><b>destination</b></font> .rar
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
print("")
!unrar x -p"$password" -o+ "$destination" "$files"
"""## RAR"""
#@markdown RAR
#@markdown <font size="4" color=red><b>destination</b></font> .rar
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
print("")
!unrar x -p"$password" -o+ "$destination/*.rar" "$files"
"""# **ZIP**
# ZIP
"""
#@markdown ZIP
#@markdown <font size="4" color=red><b>destination</b></font> .zip
destination = "" #@param {type:"string"}
!unzip -l "$destination"
"""# ZIP"""
#@markdown ZIP
#@markdown <font size="4" color=red><b>destination</b></font> .zip
destination = "" #@param {type:"string"}
!unzip -l "$destination/*.zip"
"""### ZIP ********"""
#@markdown ZIP
#@markdown <font size="4" color=red><b>destination</b></font> .zip
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
print("")
!7z x -aoa "$destination" -P"$password" -o"$files"
"""## ZIP"""
#@markdown ZIP
#@markdown <font size="4" color=red><b>destination</b></font> .zip
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
print("")
!7z x -aoa "$destination/*.zip" -P"$password" -o"$files"
"""# **7Z**
# 7Z
"""
#@markdown 7Z
#@markdown <font size="4" color=red><b>destination</b></font> .7z
destination = "" #@param {type:"string"}
!7z l "$destination"
"""# 7Z"""
#@markdown 7Z
#@markdown <font size="4" color=red><b>destination</b></font> .7z
destination = "" #@param {type:"string"}
!7z l "$destination/*.7z.*"
"""## 7Z ********"""
#@markdown 7Z
#@markdown <font size="4" color=red><b>destination</b></font> 7Z.7z
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
print("")
!7z x -aoa "$destination" -P"$password" -r -o"$files"
"""## 7z"""
#@markdown 7Z
#@markdown <font size="4" color=red><b>destination</b></font> .7z
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
print("")
!7z x -aoa "$destination/*.7z" -P"$password" -o"$files"
"""# <font color=red><b>****</b></font>
#
"""
#@markdown
#@markdown <font size="4" color=red><b>destination</b></font> .xxx
destination = "" #@param {type:"string"}
!7z l "$destination"
"""# """
#@markdown
#@markdown <font size="4" color=red><b>destination</b></font> .xxx
destination = "" #@param {type:"string"}
!7z l "$destination/*.*"
"""## ********"""
#@markdown
#@markdown <font size="4" color=red><b>destination</b></font> 7Z.xxx
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
!7z x -aoa "$destination" -P"$password" -r -o"$files"
"""## """
#@markdown
#@markdown <font size="4" color=red><b>destination</b></font> .xxx
destination = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>files</b></font> ()
files = "" #@param {type:"string"}
#@markdown <font size="4" color=red><b>password</b></font>
password = "" #@param {type:"string"}
!7z x -aoa "$destination/*.*" -P"$password" -o"$files" | 23.360153 | 102 | 0.657865 |
36b8b92109f8c9655104ce9dade2ed763cbf2735 | 678 | py | Python | hackerearth/Algorithms/A plane journey/solution.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | 4 | 2020-07-24T01:59:50.000Z | 2021-07-24T15:14:08.000Z | hackerearth/Algorithms/A plane journey/solution.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | hackerearth/Algorithms/A plane journey/solution.py | ATrain951/01.python-com_Qproject | c164dd093954d006538020bdf2e59e716b24d67c | [
"MIT"
] | null | null | null | """
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
n, m = map(int, input().strip().split())
a = sorted(map(int, input().strip().split()), reverse=True)
b = sorted(map(int, input().strip().split()), reverse=True)
if a[0] > b[0]:
print(-1)
else:
min_time = 1
i = j = 0
while i < len(a):
if j < len(b) and a[i] <= b[j]:
j += 1
elif a[i] <= b[j - 1]:
min_time += 2
i += 1
print(min_time)
| 26.076923 | 94 | 0.538348 |
36b8bfd65b80b877d57938c5b868d8f66abde496 | 65 | py | Python | ml/av/io/__init__.py | necla-ml/ml | 7ebd29382326e3958297607da7182c211865e7ff | [
"BSD-3-Clause"
] | 1 | 2022-02-21T21:06:29.000Z | 2022-02-21T21:06:29.000Z | ml/av/io/__init__.py | necla-ml/ml | 7ebd29382326e3958297607da7182c211865e7ff | [
"BSD-3-Clause"
] | null | null | null | ml/av/io/__init__.py | necla-ml/ml | 7ebd29382326e3958297607da7182c211865e7ff | [
"BSD-3-Clause"
] | null | null | null | """APIs from ml.vision.io and ml.audio.io
"""
from .api import * | 16.25 | 41 | 0.661538 |
36b8ccb8c50334dfa92a74050719c2548bf9dec4 | 738 | py | Python | addon.py | codingPF/plugin.video.newsApp | 64f7c3e2e742cef5cd7c3303e2ffb3ec07771476 | [
"MIT"
] | null | null | null | addon.py | codingPF/plugin.video.newsApp | 64f7c3e2e742cef5cd7c3303e2ffb3ec07771476 | [
"MIT"
] | null | null | null | addon.py | codingPF/plugin.video.newsApp | 64f7c3e2e742cef5cd7c3303e2ffb3ec07771476 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
The main addon module
SPDX-License-Identifier: MIT
"""
# -- Imports ------------------------------------------------
import xbmcaddon
import resources.lib.appContext as appContext
import resources.lib.settings as Settings
import resources.lib.logger as Logger
import resources.lib.main as Main
# -- Main Code ----------------------------------------------
if __name__ == '__main__':
appContext.init()
appContext.initAddon(xbmcaddon.Addon())
appContext.initLogger(Logger.Logger(appContext.ADDONCLASS.getAddonInfo('id'), appContext.ADDONCLASS.getAddonInfo('version')))
appContext.initSettings(Settings.Settings(appContext.ADDONCLASS))
PLUGIN = Main.Main()
PLUGIN.run()
del PLUGIN
| 29.52 | 129 | 0.647696 |
36ba21d593e601f39648ce3de11ea90f9d215efd | 6,226 | py | Python | bfgame/components/equipment.py | ChrisLR/BasicDungeonRL | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | [
"MIT"
] | 3 | 2017-10-28T11:28:38.000Z | 2018-09-12T09:47:00.000Z | bfgame/components/equipment.py | ChrisLR/BasicDungeonRL | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | [
"MIT"
] | null | null | null | bfgame/components/equipment.py | ChrisLR/BasicDungeonRL | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | [
"MIT"
] | null | null | null | from bflib import units
from core import contexts
from core.components import Component, listing
from core.messaging import StringBuilder, Actor, Target, Verb
| 37.506024 | 115 | 0.625442 |
36ba65041a866ce133db66a746c7905283d02484 | 544 | py | Python | students/K33402/Shuginin_Yurii/LR2/homework_board/board_app/urls.py | emina13/ITMO_ICT_WebDevelopment_2021-2022 | 498a6138e352e7e0ca40d1eb301bc29416158f51 | [
"MIT"
] | null | null | null | students/K33402/Shuginin_Yurii/LR2/homework_board/board_app/urls.py | emina13/ITMO_ICT_WebDevelopment_2021-2022 | 498a6138e352e7e0ca40d1eb301bc29416158f51 | [
"MIT"
] | null | null | null | students/K33402/Shuginin_Yurii/LR2/homework_board/board_app/urls.py | emina13/ITMO_ICT_WebDevelopment_2021-2022 | 498a6138e352e7e0ca40d1eb301bc29416158f51 | [
"MIT"
] | 1 | 2022-03-19T09:24:42.000Z | 2022-03-19T09:24:42.000Z | from django.urls import path
from . import views
urlpatterns = [
path('', views.StartPageView.as_view()),
path('accounts/created/', views.NotificationView.as_view()),
path('accounts/<int:pk>/update/', views.StudentUpdate.as_view()),
path('profile/', views.ProfilePageView.as_view()),
path('profile/all_tasks/', views.AllTasks.as_view()),
path('profile/all_tasks/answer', views.solution_create),
path('profile/class_marks/subject_select', views.subject_select),
path('profile/class_marks', views.class_marks),
]
| 38.857143 | 69 | 0.71875 |
36bbde81383cafa0b00f9d5defddc4acebc151af | 4,478 | py | Python | tests/enviroments_test/test_environments.py | DKE-Data/agrirouter-sdk-python | 6d6b26606f7d424c62289af56da55acf412772fc | [
"Apache-2.0"
] | null | null | null | tests/enviroments_test/test_environments.py | DKE-Data/agrirouter-sdk-python | 6d6b26606f7d424c62289af56da55acf412772fc | [
"Apache-2.0"
] | null | null | null | tests/enviroments_test/test_environments.py | DKE-Data/agrirouter-sdk-python | 6d6b26606f7d424c62289af56da55acf412772fc | [
"Apache-2.0"
] | null | null | null | """Test agrirouter/environments/environments.py"""
from agrirouter.environments.environments import ProductionEnvironment as PE
from agrirouter.environments.environments import QAEnvironment as QAE
from tests.constants import application_id
| 42.245283 | 143 | 0.712818 |
36bbe5261935347fbb62f2ff569d538d41679679 | 556 | py | Python | foursquare/tests/test_lang.py | milind-shakya-sp/foursquare | 1df90777f9b86d8247e8d79e7fbe8e88c8cdd467 | [
"MIT"
] | 1 | 2019-06-10T21:12:01.000Z | 2019-06-10T21:12:01.000Z | foursquare/tests/test_lang.py | milind-shakya-sp/foursquare | 1df90777f9b86d8247e8d79e7fbe8e88c8cdd467 | [
"MIT"
] | null | null | null | foursquare/tests/test_lang.py | milind-shakya-sp/foursquare | 1df90777f9b86d8247e8d79e7fbe8e88c8cdd467 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# (c) 2016 Mike Lewis
import logging; log = logging.getLogger(__name__)
from . import MultilangEndpointTestCase
| 27.8 | 85 | 0.652878 |
36bc7e0436f464b768c92e41f855171401f6f554 | 4,923 | py | Python | src/tests/model_deployment_tests.py | vravisrpi/mlops-vertex | 0944b22996a5405f64d7ae162bd2427ffd81884d | [
"Apache-2.0"
] | null | null | null | src/tests/model_deployment_tests.py | vravisrpi/mlops-vertex | 0944b22996a5405f64d7ae162bd2427ffd81884d | [
"Apache-2.0"
] | null | null | null | src/tests/model_deployment_tests.py | vravisrpi/mlops-vertex | 0944b22996a5405f64d7ae162bd2427ffd81884d | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test an uploaded model to Vertex AI."""
import os
import logging
import tensorflow as tf
test_instance = {
"dropoff_grid": ["POINT(-87.6 41.9)"],
"euclidean": [2064.2696],
"loc_cross": [""],
"payment_type": ["Credit Card"],
"pickup_grid": ["POINT(-87.6 41.9)"],
"trip_miles": [1.37],
"trip_day": [12],
"trip_hour": [16],
"trip_month": [2],
"trip_day_of_week": [4],
"trip_seconds": [555],
}
SERVING_DEFAULT_SIGNATURE_NAME = "serving_default"
from google.cloud import aiplatform as vertex_ai
| 31.557692 | 95 | 0.672354 |
36bd2fc4cc690280e24a0e546825f3792edd1b9b | 266 | py | Python | noxfile.py | aodag/asbool | 1c5d74c9b2f641a3452c1e7118a4a83ffe665ab5 | [
"MIT"
] | 8 | 2015-11-20T01:20:13.000Z | 2021-02-20T04:24:08.000Z | noxfile.py | aodag/asbool | 1c5d74c9b2f641a3452c1e7118a4a83ffe665ab5 | [
"MIT"
] | 2 | 2020-12-08T05:16:48.000Z | 2021-02-16T11:12:06.000Z | noxfile.py | aodag/asbool | 1c5d74c9b2f641a3452c1e7118a4a83ffe665ab5 | [
"MIT"
] | null | null | null | import nox
nox.options.sessions = ["test"]
| 17.733333 | 45 | 0.593985 |
36bdb06f6f3497fa1d06a8cb17f94061f6766f18 | 9,085 | py | Python | selectGoodFeatures.py | TimSC/PyFeatureTrack | 11668181e56fb9472a0c8db291c88546e7fae0cf | [
"BSD-2-Clause"
] | 33 | 2015-02-24T18:23:11.000Z | 2022-03-08T09:55:02.000Z | selectGoodFeatures.py | Nestart/PyFeatureTrack | 11668181e56fb9472a0c8db291c88546e7fae0cf | [
"BSD-2-Clause"
] | 1 | 2017-03-08T21:07:33.000Z | 2017-06-04T21:58:01.000Z | selectGoodFeatures.py | Nestart/PyFeatureTrack | 11668181e56fb9472a0c8db291c88546e7fae0cf | [
"BSD-2-Clause"
] | 15 | 2016-06-29T08:55:58.000Z | 2020-06-04T03:01:39.000Z |
from __future__ import print_function
import math, numpy as np
from PIL import Image
from klt import *
from error import *
from convolve import *
from klt_util import *
import goodFeaturesUtils
KLT_verbose = 1
#*********************************************************************
#*********************************************************************
#* _enforceMinimumDistance
#*
#* Removes features that are within close proximity to better features.
#*
#* INPUTS
#* featurelist: A list of features. The nFeatures property
#* is used.
#*
#* OUTPUTS
#* featurelist: Is overwritten. Nearby "redundant" features are removed.
#* Writes -1's into the remaining elements.
#*
#* RETURNS
#* The number of remaining features.
#*
#*********************************************************************
#*********************************************************************
#* KLTSelectGoodFeatures
#*
#* Main routine, visible to the outside. Finds the good features in
#* an image.
#*
#* INPUTS
#* tc: Contains parameters used in computation (size of image,
#* size of window, min distance b/w features, sigma to compute
#* image gradients, # of features desired).
#* img: Pointer to the data of an image (probably unsigned chars).
#*
#* OUTPUTS
#* features: List of features. The member nFeatures is computed.
#*
| 30.692568 | 128 | 0.681233 |
36be052ecd5aed78815486dfc598f4e2ff2a749d | 3,345 | py | Python | pysparsdr/pySparSDR.py | ucsdwcsng/pySparSDR | 6622fce9c75b180b8601d9deecafff401e6a4d9f | [
"Apache-2.0"
] | null | null | null | pysparsdr/pySparSDR.py | ucsdwcsng/pySparSDR | 6622fce9c75b180b8601d9deecafff401e6a4d9f | [
"Apache-2.0"
] | null | null | null | pysparsdr/pySparSDR.py | ucsdwcsng/pySparSDR | 6622fce9c75b180b8601d9deecafff401e6a4d9f | [
"Apache-2.0"
] | null | null | null | #/bin/python3
import numpy as np
from scipy import signal as sig | 37.166667 | 295 | 0.635277 |
36bf9270f81abe8f83096f56129e26e2554011cc | 803 | py | Python | dirtyclean/tests/test_dirtyclean.py | paultopia/dirtyclean | 1b93b29e070b53afede22ff28497fd68f28d0326 | [
"MIT"
] | 2 | 2017-12-04T16:58:57.000Z | 2021-03-02T04:59:54.000Z | dirtyclean/tests/test_dirtyclean.py | paultopia/dirtyclean | 1b93b29e070b53afede22ff28497fd68f28d0326 | [
"MIT"
] | null | null | null | dirtyclean/tests/test_dirtyclean.py | paultopia/dirtyclean | 1b93b29e070b53afede22ff28497fd68f28d0326 | [
"MIT"
] | null | null | null | from dirtyclean import clean
import unittest
| 33.458333 | 124 | 0.617684 |
36bfb2d78d16ac861521aa10b4dcdbc76d656637 | 1,321 | py | Python | findNearestControl.py | petrarch1603/SurveyApplications | 129a4e24123bf81687c0a60cccbe3d0a83f63e40 | [
"MIT"
] | 1 | 2019-08-24T20:29:05.000Z | 2019-08-24T20:29:05.000Z | findNearestControl.py | petrarch1603/SurveyApplications | 129a4e24123bf81687c0a60cccbe3d0a83f63e40 | [
"MIT"
] | null | null | null | findNearestControl.py | petrarch1603/SurveyApplications | 129a4e24123bf81687c0a60cccbe3d0a83f63e40 | [
"MIT"
] | null | null | null | import csv
control = "/Users/patrickmcgranaghan1/Documents/Python/python_work/SurveyApplications/source_data/control.csv"
set_points = "/Users/patrickmcgranaghan1/Documents/Python/python_work/SurveyApplications/source_data/setPoints.csv"
max_hypotenuse = 200 # Integer in feet
# Note in the State Plane Coordinate System the coordinates are written Northing(Y), Easting(X)
# This is the opposite of the normal (X, Y) coordinate system.
with open(set_points, 'r') as set_pts:
set_reader = csv.reader(set_pts)
for set_coord in set_reader:
temp_list = []
with open(control, 'r') as ctrl:
ctrl_reader = csv.reader(ctrl)
for ctrl_coord in ctrl_reader:
xDelta = int(set_coord[2]) - int(ctrl_coord[2])
yDelta = int(set_coord[1]) - int(ctrl_coord[1])
hypotenuse = ((xDelta ** 2) + (yDelta ** 2)) ** 0.5
if hypotenuse <= max_hypotenuse:
tup = (ctrl_coord[0], hypotenuse)
temp_list.append(tup)
closest_base = (min(temp_list, key=lambda t: t[1]))
# Below write code to insert the closest control points into the spreadsheet in a selected column
print(set_coord[0] + " is closest to " + (closest_base[0]) + ". A distance of " + str(closest_base[1]))
| 48.925926 | 115 | 0.650265 |
36c07f8de0ab1e4bb4abec5686212164de45b5a1 | 2,118 | py | Python | stac_compose/collections/controller.py | dgi-catalog/stac-compose | 1cae4a58fcfb36082c203db3c99e2779fc207400 | [
"MIT"
] | null | null | null | stac_compose/collections/controller.py | dgi-catalog/stac-compose | 1cae4a58fcfb36082c203db3c99e2779fc207400 | [
"MIT"
] | 14 | 2021-03-01T20:59:20.000Z | 2021-11-24T19:14:49.000Z | stac_compose/collections/controller.py | dgi-catalog/stac-compose | 1cae4a58fcfb36082c203db3c99e2779fc207400 | [
"MIT"
] | null | null | null | from json import dumps
from pprint import PrettyPrinter
from cerberus.validator import Validator
from flask import request
from flask_restx import Resource
from werkzeug.exceptions import BadRequest
from stac_compose.collections import ns as api
from stac_compose.collections.business import CollectionsBusiness
from stac_compose.collections.parsers import validate, COLLECTIONS_CONTROLLER_VALIDATION
from stac_compose.decorator import catch_generic_exceptions
from stac_compose.environment import SC_LOGGING_LEVEL
from stac_compose.logger import create_logger
# create logger object
logger = create_logger(__name__, level=SC_LOGGING_LEVEL)
pp = PrettyPrinter(indent=4)
| 29.830986 | 92 | 0.715297 |
36c20378107325500044b16060b5655f3ad7826c | 6,070 | py | Python | python/tvm/auto_scheduler/workload_registry.py | jiangzoi/incubator-tvm | 144c6f45f7217b9df2f5605e06f0903e470ac11c | [
"Apache-2.0"
] | 2 | 2020-07-07T07:38:45.000Z | 2021-06-02T07:08:09.000Z | python/tvm/auto_scheduler/workload_registry.py | jiangzoi/incubator-tvm | 144c6f45f7217b9df2f5605e06f0903e470ac11c | [
"Apache-2.0"
] | 1 | 2020-07-29T07:29:17.000Z | 2020-07-29T07:29:17.000Z | python/tvm/auto_scheduler/workload_registry.py | jiangzoi/incubator-tvm | 144c6f45f7217b9df2f5605e06f0903e470ac11c | [
"Apache-2.0"
] | 1 | 2021-07-03T08:09:32.000Z | 2021-07-03T08:09:32.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Workload registration and serialization.
We use a json string to represent a workload (a computation graph).
The format of the string is `[func_name, [args...]]`.
The dag should be the return value of this `func_name(*args)`.
Rationale: The workload is actually a compute dag defined by tvm dsl. But serializing compute dags
and matching them efficiently is not easy. Therefore, we use the above string to encode a compute
dag.
These strings are efficient for serialization/matching and won't be too long.
When we need the dag, we decode the string and call the function, which will return the dag.
"""
import pickle
import json
import tvm._ffi
from .utils import serialize_args, deserialize_args, get_func_name
WORKLOAD_FUNC_REGISTRY = {}
def register_workload(func_name, f=None, override=False):
""" Register a function that generates a certain workload.
The input function should take hashable and jsonable arguments
(int, float, tuple of int, tvm.tensor.Tensor, ...) and return a list of tvm.tensor.Tensor.
Parameters
----------
func_name : Union[Function, str]
The generation function that returns the compute declaration Tensors or its function name.
f : Optional[Function]
The generation function to be registered.
override : boolean = False
Whether override existing entry.
Examples
--------
@auto_scheduler.register_workload
def matmul(N, M, K):
A = te.placeholder((N, K), name='A')
B = te.placeholder((K, M), name='B')
k = te.reduce_axis((0, K), name='k')
C = te.compute((N, M), lambda i, j: tvm.sum(A[i][k] * B[k][j], axis=[k]), name='C')
return [A, B, C]
"""
global WORKLOAD_FUNC_REGISTRY
if callable(func_name):
f = func_name
func_name = get_func_name(f)
if not isinstance(func_name, str):
raise ValueError("expect string function name")
def register(myf):
"""internal register function"""
if func_name in WORKLOAD_FUNC_REGISTRY and not override:
raise RuntimeError('%s has been registered already' % func_name)
WORKLOAD_FUNC_REGISTRY[func_name] = myf
return myf
if f:
return register(f)
return register
def make_workload_key(func, args):
""" Make a workload key by function and arguments.
Parameters
----------
func : Union[Function, str]
The function that returns the compute declaration Tensors.
Can be the a function or the function name.
args : Args
The args of the function.
Returns
-------
workload_key : Str
The workload key of the function.
"""
global WORKLOAD_FUNC_REGISTRY
if callable(func):
func_name = get_func_name(func)
elif isinstance(func, str):
func_name = func
else:
raise ValueError("Invalid function: " + str(func) +
" . `make_workload_key` expects a callable function or its function name")
if not func_name in WORKLOAD_FUNC_REGISTRY:
raise ValueError("%s is not registered. " % func,
"Please register it with @auto_scheduler.register_workload")
args = serialize_args(args)
return json.dumps((func_name,) + args)
def decode_workload_key_to_func_args(workload_key):
""" Decode a workload key to the registerd function name and its corresponding args.
Parameters
----------
workload_key : str
The input workload key.
Returns
-------
name : str
The function name of this workload key.
args : List[Tensor]
The args of the generation function.
"""
global WORKLOAD_FUNC_REGISTRY
workload = json.loads(workload_key)
if not workload[0] in WORKLOAD_FUNC_REGISTRY:
raise ValueError("%s is not registered. " % workload[0] +
"Please register it with @auto_scheduler.register_workload")
return workload[0], deserialize_args(workload[1:])
def save_workload_func_registry(filename):
""" Dump workload function registry to a pickle binary file.
Parameters
----------
filename : str
The filename to dump workload function registry to.
"""
global WORKLOAD_FUNC_REGISTRY
pickle.dump(WORKLOAD_FUNC_REGISTRY, open(filename, 'wb'))
def load_workload_func_registry(filename):
""" Load workload function registry from a pickle binary file.
Parameters
----------
filename : str
The filename to load workload function registry from.
"""
global WORKLOAD_FUNC_REGISTRY
WORKLOAD_FUNC_REGISTRY = pickle.load(open(filename, 'rb'))
| 31.614583 | 99 | 0.682208 |
36c26ea8b70af852028240a4c83a673def2fbdd3 | 485 | py | Python | main/xrandr/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | null | null | null | main/xrandr/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | null | null | null | main/xrandr/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | null | null | null | pkgname = "xrandr"
pkgver = "1.5.1"
pkgrel = 0
build_style = "gnu_configure"
hostmakedepends = ["pkgconf"]
makedepends = ["libxrandr-devel"]
pkgdesc = "Command line interface to X RandR extension"
maintainer = "q66 <q66@chimera-linux.org>"
license = "MIT"
url = "https://xorg.freedesktop.org"
source = f"$(XORG_SITE)/app/{pkgname}-{pkgver}.tar.xz"
sha256 = "7bc76daf9d72f8aff885efad04ce06b90488a1a169d118dea8a2b661832e8762"
| 30.3125 | 75 | 0.748454 |
36c29207131a5d0aabb533544ef1349cab67ea61 | 2,477 | py | Python | src/arch/riscv/RiscvCPU.py | yclin99/CS251A_final_gem5 | 391ca1d7c9484f4d58fce9a4424821dcbb2463ac | [
"BSD-3-Clause"
] | 1 | 2022-03-25T13:18:26.000Z | 2022-03-25T13:18:26.000Z | src/arch/riscv/RiscvCPU.py | yclin99/CS251A_final_gem5 | 391ca1d7c9484f4d58fce9a4424821dcbb2463ac | [
"BSD-3-Clause"
] | 1 | 2022-03-25T14:15:30.000Z | 2022-03-25T14:15:30.000Z | src/arch/riscv/RiscvCPU.py | ksco/gem5-xiangshan | 0baf1b5229885d81d689a677102f0665aaac5514 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2021 Google, Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.objects.BaseAtomicSimpleCPU import BaseAtomicSimpleCPU
from m5.objects.BaseNonCachingSimpleCPU import BaseNonCachingSimpleCPU
from m5.objects.BaseTimingSimpleCPU import BaseTimingSimpleCPU
from m5.objects.BaseO3CPU import BaseO3CPU
from m5.objects.BaseMinorCPU import BaseMinorCPU
from m5.objects.RiscvDecoder import RiscvDecoder
from m5.objects.RiscvMMU import RiscvMMU
from m5.objects.RiscvInterrupts import RiscvInterrupts
from m5.objects.RiscvISA import RiscvISA
| 44.232143 | 72 | 0.805006 |
36c4f0d8dd30675016f1cde8a4e0b430d5e215ed | 164 | py | Python | misc/validateInput.py | viju4you/Python | 3c4a3a46265e71fc21da62d2cb204d20dcd9ec62 | [
"CC0-1.0"
] | 110 | 2017-03-11T23:37:46.000Z | 2021-07-12T11:51:32.000Z | misc/validateInput.py | viju4you/Python | 3c4a3a46265e71fc21da62d2cb204d20dcd9ec62 | [
"CC0-1.0"
] | null | null | null | misc/validateInput.py | viju4you/Python | 3c4a3a46265e71fc21da62d2cb204d20dcd9ec62 | [
"CC0-1.0"
] | 52 | 2016-11-27T19:50:40.000Z | 2022-02-09T06:37:24.000Z | # Validate input
while True:
print('Enter your age:')
age = input()
if age.isdecimal():
break
print('Pleas enter a number for your age.')
| 16.4 | 47 | 0.597561 |
36c5772cb7b021a7fd6965ba28a4663832c436d3 | 1,003 | py | Python | ckan/migration/versions/041_resource_new_fields.py | florianm/ckan | 1cfd98d591ac70b4eb81048bcd227b6c1354b1bf | [
"Apache-2.0"
] | 12 | 2015-08-28T16:59:07.000Z | 2020-03-08T01:39:30.000Z | ckan/migration/versions/041_resource_new_fields.py | florianm/ckan | 1cfd98d591ac70b4eb81048bcd227b6c1354b1bf | [
"Apache-2.0"
] | 13 | 2019-05-02T21:01:28.000Z | 2020-10-20T23:34:48.000Z | ckan/migration/versions/041_resource_new_fields.py | florianm/ckan | 1cfd98d591ac70b4eb81048bcd227b6c1354b1bf | [
"Apache-2.0"
] | 10 | 2015-05-08T04:33:20.000Z | 2020-03-03T15:17:58.000Z | from migrate import *
| 30.393939 | 65 | 0.731805 |
36c6dd4f5d4854726c666ad63dd36dff26b82159 | 1,153 | py | Python | src/train_model.py | hzdr/dvc_tutorial_series | f53eee599cc05e2c2ea31f6e2fd567a4ac3061a3 | [
"BSD-3-Clause"
] | 2 | 2021-06-24T13:39:39.000Z | 2022-02-27T13:35:02.000Z | src/train_model.py | hzdr/dvc_tutorial_series | f53eee599cc05e2c2ea31f6e2fd567a4ac3061a3 | [
"BSD-3-Clause"
] | null | null | null | src/train_model.py | hzdr/dvc_tutorial_series | f53eee599cc05e2c2ea31f6e2fd567a4ac3061a3 | [
"BSD-3-Clause"
] | null | null | null | import pickle
import pandas as pd
import yaml
from sklearn.linear_model import ElasticNet, LogisticRegression
from sklearn.ensemble import RandomForestRegressor
from config import Config
Config.MODELS_PATH.mkdir(parents=True, exist_ok=True)
with open ("params.yaml", "r") as fd:
params = yaml.safe_load(fd)
model_type = params['model_type']
lr = params['lr']
random_state = params['random_state']
#epochs = params['train']['epochs']
alpha = params['train']['alpha']
l1_rate = params['train']['l1_rate']
X_train = pd.read_csv(str(Config.FEATURES_PATH / "train_features.csv"))
y_train = pd.read_csv(str(Config.FEATURES_PATH / "train_labels.csv"))
if model_type == "LogisticRegression":
model = LogisticRegression(l1_ratio=l1_rate, random_state=random_state)
if model_type == "RandomForestRegressor":
model = RandomForestRegressor(
n_estimators=150, max_depth=6, random_state=random_state
)
if model_type == "ElasticNet":
model = ElasticNet(
alpha=alpha, l1_ratio=l1_rate, random_state=random_state
)
model.fit(X_train, y_train)
pickle.dump(model, open(str(Config.MODELS_PATH / "model.pickle"), "wb")) | 28.825 | 75 | 0.743278 |
36c6fc43e8d2fdc269e708e857550cc5862aa1c5 | 8,226 | py | Python | opentracing/harness/api_check.py | autocracy/opentracing-python | ac45df0c39b4cce8e6e6ca40dedc2b9f6c388328 | [
"MIT"
] | null | null | null | opentracing/harness/api_check.py | autocracy/opentracing-python | ac45df0c39b4cce8e6e6ca40dedc2b9f6c388328 | [
"MIT"
] | null | null | null | opentracing/harness/api_check.py | autocracy/opentracing-python | ac45df0c39b4cce8e6e6ca40dedc2b9f6c388328 | [
"MIT"
] | null | null | null | # Copyright (c) 2016 The OpenTracing Authors.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import time
import pytest
import opentracing
from opentracing import Format
| 39.358852 | 79 | 0.612205 |
36c93b1ef9b9eeb9b865aada75df7cf42d64021f | 29,950 | py | Python | Colab/vision_transformer_dogs_and_cats_python_generator.py | Thanusan19/Vision_Transformer | 80179d57e617ef6cd9599de93c7c7633f891f9a9 | [
"Apache-2.0"
] | 1 | 2021-07-02T13:55:11.000Z | 2021-07-02T13:55:11.000Z | Colab/vision_transformer_dogs_and_cats_python_generator.py | Thanusan19/Vision_Transformer | 80179d57e617ef6cd9599de93c7c7633f891f9a9 | [
"Apache-2.0"
] | null | null | null | Colab/vision_transformer_dogs_and_cats_python_generator.py | Thanusan19/Vision_Transformer | 80179d57e617ef6cd9599de93c7c7633f891f9a9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Vision Transformer Dogs and Cats Python Generator
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/12u7r2OMkt_rFmOQq2g5FtX7Z0EbyPYFN
See code at https://github.com/google-research/vision_transformer/
See paper at https://arxiv.org/abs/2010.11929
This Colab allows you to run the [JAX](https://jax.readthedocs.org) implementation of the Vision Transformer.
## 1) Using generator
### 1.1) Download the dataset and unpack it on the colab machine
"""
!pwd
!mkdir dataset
!ls
!wget -c "https://download.microsoft.com/download/3/E/1/3E1C3F21-ECDB-4869-8368-6DEBA77B919F/kagglecatsanddogs_3367a.zip" -P dataset/
!ls dataset/
# Quiet and overwrite, will create folder and unpack in CatsAndDogs
!unzip -qo dataset/kagglecatsanddogs_3367a.zip -d dataset/CatsAndDogs
# Print the number of cats and dogs images in the set
!ls -l dataset/CatsAndDogs/PetImages/Cat/*.jpg | wc -l
!ls -l dataset/CatsAndDogs/PetImages/Dog/*.jpg | wc -l
# Sanity check for later
!ls dataset/CatsAndDogs/PetImages/Cat/*.jpg | sed -E 's#.*/##' | sort > /tmp/Cats.txt
!ls dataset/CatsAndDogs/PetImages/Dog/*.jpg | sed -E 's#.*/##' | sort > /tmp/Dogs.txt
!diff /tmp/Cats.txt /tmp/Dogs.txt
"""### 1.2) Find the corrupted files
#### Find the corrupted files
"""
# Will be quiet, except for errors
# see [https://peteris.rocks/blog/quiet-and-unattended-installation-with-apt-get/]
!apt-get install imagemagick -qq > /dev/null
# Examples that are corrupted : Cat/1418.jpg, Cat/4293.jpg, Cat/666.jpg
# Can take a bit of time to check all 25000 images
!mogrify -set comment 'Image rewritten with ImageMagick' dataset/CatsAndDogs/PetImages/*/*.jpg |& tee dataset/CatsAndDogs/mogrify_output
#!cat dataset/CatsAndDogs/mogrify_output
"""#### Fix some problems with a certain picture in Cats (handmade)"""
# Sanity check for later
!ls dataset/CatsAndDogs/PetImages/Cat/*.jpg | sed -E 's#.*/##' | sort > /tmp/Cats.txt
!ls dataset/CatsAndDogs/PetImages/Dog/*.jpg | sed -E 's#.*/##' | sort > /tmp/Dogs.txt
!diff /tmp/Cats.txt /tmp/Dogs.txt
# Cat 10404 has three versions...
from google.colab import files
import time
files.view('dataset/CatsAndDogs/PetImages/Cat/10404-0.jpg')
time.sleep(0.5)
files.view('dataset/CatsAndDogs/PetImages/Cat/10404-1.jpg')
time.sleep(0.5)
files.view('dataset/CatsAndDogs/PetImages/Cat/10404-2.jpg')
!rm dataset/CatsAndDogs/PetImages/Cat/10404-1.jpg dataset/CatsAndDogs/PetImages/Cat/10404-2.jpg
!mv dataset/CatsAndDogs/PetImages/Cat/10404-0.jpg dataset/CatsAndDogs/PetImages/Cat/10404.jpg
# Sanity check for later
!ls dataset/CatsAndDogs/PetImages/Cat/*.jpg | sed -E 's#.*/##' | sort > /tmp/Cats.txt
!ls dataset/CatsAndDogs/PetImages/Dog/*.jpg | sed -E 's#.*/##' | sort > /tmp/Dogs.txt
!diff /tmp/Cats.txt /tmp/Dogs.txt
"""### 1.3) Create the exclusion and description files
#### Functions to create the exclusion list and the global description
"""
from pathlib import Path
import re
import time
def checkExistanceAndEmptiness(output_file_path:str, doOverwrite:bool):
okayToOverwrite = True
output_path = Path(output_file_path)
if output_path.exists():
print('File exists')
if output_path.stat().st_size != 0:
print('File is not empty')
if not doOverwrite:
okayToOverwrite = False
print('not over-writing')
else:
mode = 'w+'
print('over-writing')
else:
print('File is empty')
mode = 'w+'
else:
print('File don\'t exist')
mode = 'w'
return mode, okayToOverwrite
def createExclusionFile(dataset_dir_path:str, mogrify_output_file_path:str,
output_file_path:str, doOverwrite:bool=False):
"""
dataset_dir_path le chemin d'accs au dossier du dataset
output_file_path le chemin du fichier que l'on veut crer
doOverwrite permet d'craser le fichier, si il existe dj, si le paramtre
est pass True (False par defaut).
"""
print
# Check if file exists or not and gives the write or write and read depending,
# as well as the bolean to overwrite or not the file
mode, okayToOverwrite = checkExistanceAndEmptiness(output_file_path, doOverwrite)
dataset_path = Path(dataset_dir_path)
output_path = Path(output_file_path)
print(dataset_path)
if okayToOverwrite:
with output_path.open(mode) as outfile:
#writing in the file
# Lecture du fichier d'exclusion
mogrify_output = Path(mogrify_output_file_path)
regex_files = re.compile('dataset/.*/[0-9]*.jpg')
added_lines = []
with mogrify_output.open('r') as infile:
for line in infile.readlines():
# time.sleep(1)
if line.endswith("\n"):
line = line[:-1]
first_match = regex_files.findall(line)[0]
first_path = Path(first_match)
string = str(first_path.relative_to(dataset_path))
# string = first_match.replace(str(dataset_path)+"/", "")
if string not in added_lines:
outfile.write(string+"\n")
added_lines.append(string)
def createGlobalDescription(dataset_dir_path:str, exclude_img_file_path:str,
output_file_path:str, doOverwrite:bool=False):
"""
Va generer le fichier de tout le dataset
dataset_dir_path le chemin d'accs au dossier du dataset
exclude_img_file_path le chemin d'accs d'un fichier d'exclusion de fichiers
corrompus dans la liste. De la forme :
path/vers/le/fichier1.jpg
path/vers/le/fichier2.jpg
path/vers/le/fichier3.jpg
path/vers/le/fichier4.jpg
output_file_path le chemin du fichier que l'on veut crer
doOverwrite permet d'craser le fichier, si il existe dj, si le paramtre
est pass True (False par defaut).
"""
# Lecture du fichier d'exclusion
exclude_path = Path(exclude_img_file_path)
exclude_img_list = []
with exclude_path.open('r') as file:
for line in file.readlines():
if line.endswith("\n"):
line = line[:-1]
line = str(Path(line)) # To be able to compare it to other file path
#print("exclude file line :", line)
exclude_img_list.append(line)
print("exclude_img_list", exclude_img_list)
# Compter celui qui a le plus d'exclus, pour en avoir le mme nombre de
# chaque cot
count_cat = 0; count_dog = 0
for exclude_file in exclude_img_list:
#print("Cat or Dog ?", exclude_file.split("/")[-2])
if exclude_file.split("/")[-2] == 'Cat':
count_cat += 1
else:
count_dog += 1
print("count_cat", count_cat, "count_dog", count_dog)
left_to_exclude_dogs = count_cat-count_dog if count_cat >= count_dog else 0
left_to_exclude_cats = count_dog-count_cat if count_dog >= count_cat else 0
# Check if file exists or not and gives the write or write and read depending,
# as well as the bolean to overwrite or not the file
mode, okayToOverwrite = checkExistanceAndEmptiness(output_file_path, doOverwrite)
output_path = Path(output_file_path)
# Ecriture du fichier
if okayToOverwrite:
with output_path.open(mode) as file:
#writing in the file
ds_dir_path = Path(dataset_dir_path)
#print("ds_dir_path", ds_dir_path)
class_num = -1
for class_dir in ds_dir_path.joinpath("PetImages").iterdir():
if class_dir.is_dir():
class_num += 1
print(" class_dir", class_dir)
print(" class_num", class_num)
if str(class_dir).endswith('Cat'):
left_to_exclude_count = left_to_exclude_cats
print(" left_to_exclude_count for Cats is :", left_to_exclude_count)
else:
left_to_exclude_count = left_to_exclude_dogs
print(" left_to_exclude_count for Dogs is :", left_to_exclude_count)
added_count = 0
for class_img in class_dir.iterdir():
if class_img.match('[0-9]*.jpg'):
local_image_path = class_img.relative_to(ds_dir_path)
# Check for exclusion
#print("class_img:", class_img)
#print("exclude_img_list:", exclude_img_list)
#print("class_img relative to:", str(class_img.relative_to(ds_dir_path)))
#time.sleep(2)
if str(local_image_path) not in exclude_img_list:
#print(" ds_dir_path", ds_dir_path)
#print(" class_dir", class_dir)
#print(" class_img", class_img)
if left_to_exclude_count > 0:
left_to_exclude_count -= 1
#print(" class_img", class_img)
print(" > that was a left to exclude", local_image_path)
#time.sleep(1)
else:
file.write(str(local_image_path) + "\t" + str(class_num) + "\n")
added_count += 1
else:
#print(" class_img", class_img)
print(" > excluded from the exclusion list", local_image_path)
#time.sleep(1)
if str(class_dir).endswith('Cat'):
print("Added", added_count, "cats to the description file")
else:
print("Added", added_count, "dogs to the description file")
"""#### Create the exclusion list and the global description"""
createExclusionFile(dataset_dir_path='./dataset/CatsAndDogs',
mogrify_output_file_path='./dataset/CatsAndDogs/mogrify_output',
output_file_path='./dataset/CatsAndDogs/exclude.txt',
doOverwrite=True)
createGlobalDescription(dataset_dir_path='./dataset/CatsAndDogs',
exclude_img_file_path='./dataset/CatsAndDogs/exclude.txt',
output_file_path='./dataset/CatsAndDogs/description.txt',
doOverwrite=True)
"""### 1.4) Create a training and a test set
##### The python generator for the dataset
"""
from pathlib import Path
import tensorflow as tf
import numpy as np
import cv2
import random
import math
"""## 2) ViT Colab
##### Copyright 2020 Google LLC.
"""
#@title Licensed under the Apache License, Version 2.0 (the "License");
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""<a href="https://colab.research.google.com/github/google-research/vision_transformer/blob/master/vit_jax.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
### Setup
Needs to be executed once in every VM.
The cell below downloads the code from Github and install necessary dependencies.
"""
#@markdown Select whether you would like to store data in your personal drive.
#@markdown
#@markdown If you select **yes**, you will need to authorize Colab to access
#@markdown your personal drive
#@markdown
#@markdown If you select **no**, then any changes you make will diappear when
#@markdown this Colab's VM restarts after some time of inactivity...
use_gdrive = 'yes' #@param ["yes", "no"]
if use_gdrive == 'yes':
from google.colab import drive
drive.mount('/gdrive')
root = '/gdrive/My Drive/vision_transformer_colab'
import os
if not os.path.isdir(root):
os.mkdir(root)
os.chdir(root)
print(f'\nChanged CWD to "{root}"')
else:
from IPython import display
display.display(display.HTML(
'<h1 style="color:red">CHANGES NOT PERSISTED</h1>'))
# Clone repository and pull latest changes.
![ -d vision_transformer ] || git clone --depth=1 https://github.com/google-research/vision_transformer
!cd vision_transformer && git pull
!pip install -qr vision_transformer/vit_jax/requirements.txt
#!pip install -r vision_transformer/vit_jax/requirements.txt
"""### Imports"""
# Shows all available pre-trained models.
!gsutil ls -lh gs://vit_models/*
"""For now let's try with `ViT-B_16` (pre-trained on imagenet21k, no fine tunning)."""
# Download a pre-trained model.
model = 'ViT-B_16'
![ -e "$model".npz ] || gsutil cp gs://vit_models/imagenet21k/"$model".npz .
#@markdown TPU setup : Boilerplate for connecting JAX to TPU.
import os
if 'google.colab' in str(get_ipython()) and 'COLAB_TPU_ADDR' in os.environ:
# Make sure the Colab Runtime is set to Accelerator: TPU.
import requests
if 'TPU_DRIVER_MODE' not in globals():
url = 'http://' + os.environ['COLAB_TPU_ADDR'].split(':')[0] + ':8475/requestversion/tpu_driver0.1-dev20191206'
resp = requests.post(url)
TPU_DRIVER_MODE = 1
# The following is required to use TPU Driver as JAX's backend.
from jax.config import config
config.FLAGS.jax_xla_backend = "tpu_driver"
config.FLAGS.jax_backend_target = "grpc://" + os.environ['COLAB_TPU_ADDR']
print('Registered TPU:', config.FLAGS.jax_backend_target)
else:
print('No TPU detected. Can be changed under "Runtime/Change runtime type".')
import flax
import jax
from matplotlib import pyplot as plt
import numpy as np
import tqdm
# Shows the number of available devices.
# In a CPU/GPU runtime this will be a single device.
# In a TPU runtime this will be 8 cores.
jax.local_devices()
# Open some code files in a split editor on the right.
# You can open more files in the file tab on the left.
from google.colab import files
files.view('vision_transformer/vit_jax/checkpoint.py')
files.view('vision_transformer/vit_jax/input_pipeline.py')
files.view('vision_transformer/vit_jax/models.py')
files.view('vision_transformer/vit_jax/momentum_clip.py')
files.view('vision_transformer/vit_jax/train.py')
files.view('vision_transformer/vit_jax/hyper.py')
# Commented out IPython magic to ensure Python compatibility.
# Import files from repository.
# Updating the files in the editor on the right will immediately update the
# modules by re-importing them.
import sys
if './vision_transformer' not in sys.path:
sys.path.append('./vision_transformer')
# From https://ipython.org/ipython-doc/3/config/extensions/autoreload.html
# Reload all modules (except those excluded by %aimport) every time before
# executing the Python code typed.
# %load_ext autoreload
# %autoreload 2
from vit_jax import checkpoint
from vit_jax import hyper
from vit_jax import input_pipeline
from vit_jax import logging
from vit_jax import models
from vit_jax import momentum_clip
from vit_jax import train
logger = logging.setup_logger('./logs')
# Helper functions for images.
labelnames = dict(
# https://www.cs.toronto.edu/~kriz/cifar.html
cifar10=('airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'),
# https://www.cs.toronto.edu/~kriz/cifar.html
cifar100=('apple', 'aquarium_fish', 'baby', 'bear', 'beaver', 'bed', 'bee', 'beetle', 'bicycle', 'bottle', 'bowl', 'boy', 'bridge', 'bus', 'butterfly', 'camel', 'can', 'castle', 'caterpillar', 'cattle', 'chair', 'chimpanzee', 'clock', 'cloud', 'cockroach', 'couch', 'crab', 'crocodile', 'cup', 'dinosaur', 'dolphin', 'elephant', 'flatfish', 'forest', 'fox', 'girl', 'hamster', 'house', 'kangaroo', 'computer_keyboard', 'lamp', 'lawn_mower', 'leopard', 'lion', 'lizard', 'lobster', 'man', 'maple_tree', 'motorcycle', 'mountain', 'mouse', 'mushroom', 'oak_tree', 'orange', 'orchid', 'otter', 'palm_tree', 'pear', 'pickup_truck', 'pine_tree', 'plain', 'plate', 'poppy', 'porcupine', 'possum', 'rabbit', 'raccoon', 'ray', 'road', 'rocket', 'rose', 'sea', 'seal', 'shark', 'shrew', 'skunk', 'skyscraper', 'snail', 'snake', 'spider', 'squirrel', 'streetcar', 'sunflower', 'sweet_pepper', 'table', 'tank', 'telephone', 'television', 'tiger', 'tractor', 'train', 'trout', 'tulip', 'turtle', 'wardrobe', 'whale', 'willow_tree', 'wolf', 'woman', 'worm'),
# Addition for Dogs and Cats
dogscats=('dog', 'cat')
)
def make_label_getter(dataset):
"""Returns a function converting label indices to names."""
return getter
def show_img(img, ax=None, title=None):
"""Shows a single image."""
if ax is None:
ax = plt.gca()
ax.imshow(img[...])
ax.set_xticks([])
ax.set_yticks([])
if title:
ax.set_title(title)
def show_img_grid(imgs, titles):
"""Shows a grid of images."""
n = int(np.ceil(len(imgs)**.5))
_, axs = plt.subplots(n, n, figsize=(3 * n, 3 * n))
for i, (img, title) in enumerate(zip(imgs, titles)):
img = (img + 1) / 2 # Denormalize
show_img(img, axs[i // n][i % n], title)
"""### Load the Python Generator"""
num_devices = len(jax.local_devices())
# The bypass
batch_size = 64
num_classes = 2
dataset = 'dogscats'
dgscts_train = MyDogsCats(ds_description_path='/content/dataset/CatsAndDogs/description.txt',
dataset_path='/content/dataset/CatsAndDogs',
set_type='train',
train_prop=0.8)
dgscts_test = MyDogsCats(ds_description_path='/content/dataset/CatsAndDogs/description.txt',
dataset_path='/content/dataset/CatsAndDogs',
set_type='test',
train_prop=0.8)
ds_train = dgscts_train.getDataset().batch(batch_size, drop_remainder=True)
ds_test = dgscts_test.getDataset().batch(batch_size, drop_remainder=True)
if num_devices is not None:
ds_train = ds_train.map(_shard, tf.data.experimental.AUTOTUNE)
ds_test = ds_test.map(_shard, tf.data.experimental.AUTOTUNE)
ds_test = ds_test.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
ds_train = ds_train.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
"""### Load dataset"""
# Fetch a batch of test images for illustration purposes.
batch = next(iter(ds_test.as_numpy_iterator()))
# Note the shape : [num_local_devices, local_batch_size, h, w, c]
# print(batch)
print(batch['image'].shape)
print(batch['label'].shape)
# Show some imags with their labels.
images, labels = batch['image'][1][:9], batch['label'][1][:9]
titles = map(make_label_getter(dataset), labels.argmax(axis=1))
show_img_grid(images, titles)
# Same as above, but with train images.
# Do you spot a difference?
# Check out input_pipeline.get_data() in the editor at your right to see how the
# images are preprocessed differently.
batch = next(iter(ds_train.as_numpy_iterator()))
images, labels = batch['image'][1][:9], batch['label'][1][:9]
titles = map(make_label_getter(dataset), labels.argmax(axis=1))
show_img_grid(images, titles)
[print(i.shape) for i in images]
"""### Load pre-trained"""
# Load model definition & initialize random parameters.
VisionTransformer = models.KNOWN_MODELS[model].partial(num_classes=num_classes)
_, params = VisionTransformer.init_by_shape(
jax.random.PRNGKey(0),
# Discard the "num_local_devices" dimension of the batch for initialization.
[(batch['image'].shape[1:], batch['image'].dtype.name)])
# Load and convert pretrained checkpoint.
# This involves loading the actual pre-trained model results, but then also also
# modifying the parameters a bit, e.g. changing the final layers, and resizing
# the positional embeddings.
# For details, refer to the code and to the methods of the paper.
params = checkpoint.load_pretrained(
pretrained_path=f'{model}.npz',
init_params=params,
model_config=models.CONFIGS[model],
logger=logger,
)
"""### Evaluate"""
# So far, all our data is in the host memory. Let's now replicate the arrays
# into the devices.
# This will make every array in the pytree params become a ShardedDeviceArray
# that has the same data replicated across all local devices.
# For TPU it replicates the params in every core.
# For a single GPU this simply moves the data onto the device.
# For CPU it simply creates a copy.
params_repl = flax.jax_utils.replicate(params)
print('params.cls:', type(params['cls']).__name__, params['cls'].shape)
print('params_repl.cls:', type(params_repl['cls']).__name__, params_repl['cls'].shape)
# Then map the call to our model's forward pass onto all available devices.
vit_apply_repl = jax.pmap(VisionTransformer.call)
def get_accuracy(params_repl):
"""Returns accuracy evaluated on the test set."""
good = total = 0
steps = dgscts_test.num_samples // batch_size
#steps = input_pipeline.get_dataset_info(dataset, 'test')['num_examples'] // batch_size
for _, batch in zip(tqdm.notebook.trange(steps), ds_test.as_numpy_iterator()):
predicted = vit_apply_repl(params_repl, batch['image'])
is_same = predicted.argmax(axis=-1) == batch['label'].argmax(axis=-1)
good += is_same.sum()
total += len(is_same.flatten())
return good / total
# Random performance without fine-tuning.
get_accuracy(params_repl)
"""### Fine-tune"""
# 100 Steps take approximately 15 minutes in the TPU runtime.
total_steps = 10 ## 100
warmup_steps = 5
decay_type = 'cosine'
grad_norm_clip = 1
# This controls in how many forward passes the batch is split. 8 works well with
# a TPU runtime that has 8 devices. 64 should work on a GPU. You can of course
# also adjust the batch_size above, but that would require you to adjust the
# learning rate accordingly.
accum_steps = 8
base_lr = 0.03
# Check out train.make_update_fn in the editor on the right side for details.
update_fn_repl = train.make_update_fn(VisionTransformer.call, accum_steps)
# We use a momentum optimizer that uses half precision for state to save
# memory. It als implements the gradient clipping.
opt = momentum_clip.Optimizer(grad_norm_clip=grad_norm_clip).create(params)
opt_repl = flax.jax_utils.replicate(opt)
lr_fn = hyper.create_learning_rate_schedule(total_steps, base_lr, decay_type, warmup_steps)
# Prefetch entire learning rate schedule onto devices. Otherwise we would have
# a slow transfer from host to devices in every step.
lr_iter = hyper.lr_prefetch_iter(lr_fn, 0, total_steps)
# Initialize PRNGs for dropout.
update_rngs = jax.random.split(jax.random.PRNGKey(0), jax.local_device_count())
# The world's simplest training loop.
# Completes in ~20 min on the TPU runtime.
for step, batch, lr_repl in zip(
tqdm.notebook.trange(1, total_steps + 1),
ds_train.as_numpy_iterator(),
lr_iter
):
print("loop", step, batch['image'].shape, batch['label'].shape)
opt_repl, loss_repl, update_rngs = update_fn_repl(
opt_repl, lr_repl, batch, update_rngs)
print("fini la loop", type(opt_repl), type(loss_repl), type(update_rngs))
# Should be ~97.2% for CIFAR10
# Should be ~71.2% for CIFAR100
get_accuracy(opt_repl.target)
"""### Inference"""
# Download model pre-trained on imagenet21k and fine-tuned on imagenet2012.
![ -e "$model"_imagenet2012.npz ] || gsutil cp gs://vit_models/imagenet21k+imagenet2012/"$model".npz "$model"_imagenet2012.npz
VisionTransformer = models.KNOWN_MODELS[model].partial(num_classes=1000)
# Load and convert pretrained checkpoint.
params = checkpoint.load(f'{model}_imagenet2012.npz')
params['pre_logits'] = {} # Need to restore empty leaf for Flax.
# Get imagenet labels.
!wget https://storage.googleapis.com/bit_models/ilsvrc2012_wordnet_lemmas.txt
imagenet_labels = dict(enumerate(open('ilsvrc2012_wordnet_lemmas.txt')))
# Get a random picture with the correct dimensions.
!wget https://picsum.photos/384 -O picsum.jpg
import PIL
img = PIL.Image.open('picsum.jpg')
img
# Predict on a batch with a single item (note very efficient TPU usage...)
logits, = VisionTransformer.call(params, (np.array(img) / 128 - 1)[None, ...])
preds = flax.nn.softmax(logits)
for idx in preds.argsort()[:-11:-1]:
print(f'{preds[idx]:.5f} : {imagenet_labels[idx]}', end='')
"""## 3) Nos test
### Resize sans garder les proportions
"""
# Get a random picture with the correct dimensions.
!wget https://lorraine.gatech.edu/sites/default/files/uploads/images/superblock_images/metz-campus.jpeg -O pic_gatech.jpg
import PIL
img = PIL.Image.open('pic_gatech.jpg')
#img
img = img.resize((384,384))
img
# Predict on a batch with a single item (note very efficient TPU usage...)
logits, = VisionTransformer.call(params, (np.array(img) / 128 - 1)[None, ...])
preds = flax.nn.softmax(logits)
for idx in preds.argsort()[:-11:-1]:
print(f'{preds[idx]:.5f} : {imagenet_labels[idx]}', end='')
"""### Resize en gardant une propostion carr"""
# Get a random picture with the correct dimensions.
!wget https://lorraine.gatech.edu/sites/default/files/uploads/images/superblock_images/metz-campus.jpeg -O pic_gatech.jpg
import PIL
img = PIL.Image.open('pic_gatech.jpg')
(w, h) = (img.width, img.height)
if w>=h:
crop_box = ((w/2)-(h/2), 0, (w/2)+(h/2), h)
else:
crop_box = ((h/2)-(w/2), 0, (h/2)+(w/2), w)
img = img.resize((384,384), box=crop_box)
img
# Predict on a batch with a single item (note very efficient TPU usage...)
logits, = VisionTransformer.call(params, (np.array(img) / 128 - 1)[None, ...])
preds = flax.nn.softmax(logits)
for idx in preds.argsort()[:-11:-1]:
print(f'{preds[idx]:.5f} : {imagenet_labels[idx]}', end='') | 38.007614 | 1,046 | 0.67576 |
36c9545921e82accc771994b4028870845e16cb0 | 19,349 | py | Python | tests/test_cli.py | jameswilkerson/elex | 27733e3c473fef48676f8bdd56247bee49ad32ea | [
"Apache-2.0"
] | 183 | 2015-11-25T15:13:47.000Z | 2022-01-07T23:02:36.000Z | tests/test_cli.py | jameswilkerson/elex | 27733e3c473fef48676f8bdd56247bee49ad32ea | [
"Apache-2.0"
] | 198 | 2015-11-24T16:48:48.000Z | 2020-10-26T10:38:56.000Z | tests/test_cli.py | jameswilkerson/elex | 27733e3c473fef48676f8bdd56247bee49ad32ea | [
"Apache-2.0"
] | 65 | 2015-12-03T21:29:38.000Z | 2021-08-10T20:03:49.000Z | import csv
import sys
import json
import tests
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from six import with_metaclass
from elex.cli.app import ElexApp
from collections import OrderedDict
DATA_FILE = 'tests/data/20151103_national.json'
DATA_ELECTION_DATE = '2015-11-03'
DELSUM_DATA_FILE = 'tests/data/20160118_delsum.json'
DELSUPER_DATA_FILE = 'tests/data/20160118_delsuper.json'
ELECTIONS_DATA_FILE = 'tests/data/00000000_elections.json'
DISTRICT_DATA_FILE = 'tests/data/20160201_district_results.json'
TEST_COMMANDS = [
'races',
'candidates',
'reporting-units',
'candidate-reporting-units',
'results',
]
| 33.826923 | 80 | 0.567213 |
36cad5c25faaf8cf1d768a98197ce4f6fa877fa3 | 4,321 | py | Python | unipipeline/worker/uni_worker_consumer.py | aliaksandr-master/unipipeline | d8eac38534172aee59ab5777321cabe67f3779ef | [
"MIT"
] | null | null | null | unipipeline/worker/uni_worker_consumer.py | aliaksandr-master/unipipeline | d8eac38534172aee59ab5777321cabe67f3779ef | [
"MIT"
] | 1 | 2021-09-14T13:08:13.000Z | 2021-09-14T13:08:13.000Z | unipipeline/worker/uni_worker_consumer.py | aliaksandr-master/unipipeline | d8eac38534172aee59ab5777321cabe67f3779ef | [
"MIT"
] | null | null | null | from typing import TypeVar, Generic, Optional, Type, Any, Union, Dict, TYPE_CHECKING
from unipipeline.errors.uni_payload_error import UniPayloadParsingError, UniAnswerPayloadParsingError
from unipipeline.errors.uni_sending_to_worker_error import UniSendingToWorkerError
from unipipeline.answer.uni_answer_message import UniAnswerMessage
from unipipeline.brokers.uni_broker_message_manager import UniBrokerMessageManager
from unipipeline.errors.uni_work_flow_error import UniWorkFlowError
from unipipeline.message.uni_message import UniMessage
from unipipeline.message_meta.uni_message_meta import UniMessageMeta, UniMessageMetaErrTopic, UniAnswerParams
from unipipeline.worker.uni_worker import UniWorker
from unipipeline.worker.uni_worker_consumer_manager import UniWorkerConsumerManager
from unipipeline.worker.uni_worker_consumer_message import UniWorkerConsumerMessage
from unipipeline.definitions.uni_worker_definition import UniWorkerDefinition
if TYPE_CHECKING:
from unipipeline.modules.uni_mediator import UniMediator
TInputMsgPayload = TypeVar('TInputMsgPayload', bound=UniMessage)
TAnswerMsgPayload = TypeVar('TAnswerMsgPayload', bound=Optional[UniMessage])
| 61.728571 | 207 | 0.765564 |
36cae8cc11223214274fe92b0ac8c6515461f9fe | 1,825 | py | Python | funing/_ui/about.py | larryw3i/Funing | 8ef88af8766f0246614517fa00f3b322ba722d6b | [
"MIT"
] | 1 | 2021-08-22T05:56:09.000Z | 2021-08-22T05:56:09.000Z | funing/_ui/about.py | larryw3i/Funing | 8ef88af8766f0246614517fa00f3b322ba722d6b | [
"MIT"
] | null | null | null | funing/_ui/about.py | larryw3i/Funing | 8ef88af8766f0246614517fa00f3b322ba722d6b | [
"MIT"
] | null | null | null |
import gettext
import os
import re
import subprocess
import sys
import time
import tkinter as tk
import tkinter.filedialog as tkf
import uuid
import webbrowser
from datetime import date, datetime
from enum import Enum
from tkinter import *
from tkinter import messagebox
from tkinter.ttk import *
import cv2
import numpy as np
import pygubu
import yaml
from PIL import Image, ImageTk
from funing import *
from funing.locale import _
from funing.settings import *
translator = _
| 23.397436 | 73 | 0.656438 |
36cd33528502d61cfd130bce552b6359665140f3 | 8,039 | py | Python | pysnmp-with-texts/Fore-Common-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/Fore-Common-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/Fore-Common-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module Fore-Common-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Fore-Common-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:14:34 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Bits, MibIdentifier, enterprises, Counter64, Unsigned32, ModuleIdentity, Counter32, TimeTicks, NotificationType, ObjectIdentity, IpAddress, Gauge32, Integer32, iso, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "MibIdentifier", "enterprises", "Counter64", "Unsigned32", "ModuleIdentity", "Counter32", "TimeTicks", "NotificationType", "ObjectIdentity", "IpAddress", "Gauge32", "Integer32", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
fore = ModuleIdentity((1, 3, 6, 1, 4, 1, 326))
if mibBuilder.loadTexts: fore.setLastUpdated('9911050000Z')
if mibBuilder.loadTexts: fore.setOrganization('Marconi Communications')
if mibBuilder.loadTexts: fore.setContactInfo(' Postal: Marconi Communications, Inc. 5000 Marconi Drive Warrendale, PA 15086-7502 Tel: +1 724 742 6999 Email: bbrs-mibs@marconi.com Web: http://www.marconi.com')
if mibBuilder.loadTexts: fore.setDescription('Definitions common to all FORE private MIBS.')
admin = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1))
systems = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2))
foreExperiment = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 3))
operations = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 1))
snmpErrors = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 2))
snmpTrapDest = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 3))
snmpAccess = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 4))
assembly = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 5))
fileXfr = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 6))
rmonExtensions = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 7))
preDot1qVlanMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 8))
snmpTrapLog = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 9))
ilmisnmp = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 10))
entityExtensionMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 11))
ilmiRegistry = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 14))
foreIfExtension = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 15))
frameInternetworking = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 16))
ifExtensions = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 1, 17))
atmAdapter = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 1))
atmSwitch = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2))
etherSwitch = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 3))
atmAccess = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 5))
hubSwitchRouter = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 6))
ipoa = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 7))
stackSwitch = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 10))
switchRouter = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 15))
software = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 2))
asxd = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 2, 1))
hardware = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 1))
asx = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 1, 1))
asx200wg = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 4))
asx200bx = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 5))
asx200bxe = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 6))
cabletron9A000 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 7))
asx1000 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 8))
le155 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 9))
sfcs200wg = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 10))
sfcs200bx = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 11))
sfcs1000 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 12))
tnx210 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 15))
tnx1100 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 16))
asx1200 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 17))
asx4000 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 18))
le25 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 19))
esx3000 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 20))
tnx1100b = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 21))
asx150 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 22))
bxr48000 = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 24))
asx4000m = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 25))
axhIp = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 26))
axhSig = MibIdentifier((1, 3, 6, 1, 4, 1, 326, 2, 2, 27))
mibBuilder.exportSymbols("Fore-Common-MIB", ilmiRegistry=ilmiRegistry, fore=fore, ilmisnmp=ilmisnmp, NsapPrefix=NsapPrefix, atmAccess=atmAccess, snmpTrapDest=snmpTrapDest, rmonExtensions=rmonExtensions, preDot1qVlanMIB=preDot1qVlanMIB, operations=operations, ipoa=ipoa, software=software, tnx1100=tnx1100, snmpErrors=snmpErrors, sfcs200bx=sfcs200bx, snmpAccess=snmpAccess, sfcs200wg=sfcs200wg, le25=le25, sfcs1000=sfcs1000, esx3000=esx3000, frameInternetworking=frameInternetworking, asx4000m=asx4000m, AtmAddress=AtmAddress, assembly=assembly, ConnectionType=ConnectionType, axhIp=axhIp, bxr48000=bxr48000, ifExtensions=ifExtensions, asx=asx, asxd=asxd, asx4000=asx4000, TransitNetwork=TransitNetwork, fileXfr=fileXfr, EntryStatus=EntryStatus, foreIfExtension=foreIfExtension, asx1000=asx1000, asx200bxe=asx200bxe, axhSig=axhSig, TrapNumber=TrapNumber, SpansAddress=SpansAddress, IntegerBitString=IntegerBitString, atmSwitch=atmSwitch, cabletron9A000=cabletron9A000, AtmSigProtocol=AtmSigProtocol, tnx1100b=tnx1100b, asx200bx=asx200bx, etherSwitch=etherSwitch, asx1200=asx1200, hubSwitchRouter=hubSwitchRouter, entityExtensionMIB=entityExtensionMIB, switchRouter=switchRouter, NsapAddr=NsapAddr, asx200wg=asx200wg, systems=systems, atmAdapter=atmAdapter, foreExperiment=foreExperiment, PYSNMP_MODULE_ID=fore, admin=admin, le155=le155, GeneralState=GeneralState, hardware=hardware, stackSwitch=stackSwitch, asx150=asx150, tnx210=tnx210, snmpTrapLog=snmpTrapLog)
| 73.752294 | 1,461 | 0.707302 |
36d0e1753fba4845d6f1c53b001fd0c1077f6cbc | 2,753 | py | Python | lib/logger.py | YahiaKandeel/ironport-correlator | cb426f412dba403f056c40eef631f0c252eada08 | [
"Apache-2.0"
] | 6 | 2019-10-28T01:18:51.000Z | 2022-01-26T11:43:14.000Z | lib/logger.py | YahiaKandeel/ironport-correlator | cb426f412dba403f056c40eef631f0c252eada08 | [
"Apache-2.0"
] | null | null | null | lib/logger.py | YahiaKandeel/ironport-correlator | cb426f412dba403f056c40eef631f0c252eada08 | [
"Apache-2.0"
] | 2 | 2020-04-30T11:17:27.000Z | 2021-11-17T02:26:48.000Z | ################################################################################
# Styler & Logger
################################################################################
from logging.handlers import SysLogHandler
import logging
import json
import pprint
import time
from .decoder import decode
import collections
# Log Keys Order
keys = [
'ICID', 'MID', "MessageID", 'Related_MID',
'OutbreakFilters', 'CASE', 'GRAYMAIL', 'Antivirus', 'LDAP_Drop',
'SPF', 'DKIM', 'DKIM_Detail', 'DMARK', 'DMARK_Detail',
"Subject", "Attachments", "From", "To",
"SenderReputation", "ThreatCategory", "SuspectedDomains", "DomainAge",
'Action', 'Action_Desc', 'Content_Filter', "IP", "Other"
]
# Syslog
def syslog(siemContext):
'''
Return a syslogger instance
'''
# Create Handler
handler = SysLogHandler(address=(siemContext["server"], siemContext["port"]),
facility=SysLogHandler.LOG_LOCAL5)
# Configure Logger
logger = logging.getLogger(siemContext["ident"])
logger.setLevel(logging.INFO)
# Configure Formater
formatter = logging.Formatter('%(name)s: %(message)r')
handler.setFormatter(formatter)
# Add handler to the logger
logger.addHandler(handler)
# return
return logger
def style(message, msgexpand):
'''
Style and expand a message
'''
message_log = collections.OrderedDict()
result = []
for key in keys:
values = filter(None, message.get(key, []))
message_log[key] = ' || '.join(list(set(values)))
# Decode Subject & Attachments
message_log["Subject"] = decode(message_log["Subject"])
# message_log["Attachments"] = decode(message_log["Attachments"])
# If msgexpand
if msgexpand:
for recipient in message.get('To', []):
message_log['To'] = recipient
result.append(
json.dumps(message_log, ensure_ascii=False))
# Else
else:
result.append(
json.dumps(message_log, ensure_ascii=False))
return result
def syslogger(logger_queue, siemContext, options):
'''
Logger Process
'''
print("\t[+]Starting Logger Process")
# Logger
logger = syslog(siemContext)
while True:
# Get Data from Logger Queue
data = logger_queue.get()
# If there is a message
if data:
[(mid, message)] = data.items()
# Style It
messages = style(message, options["expand"])
# Log
for message in messages:
logger.info(message)
print('\r\n'+'#' * 100)
pprint.pprint(json.loads(message))
else:
# sleep
time.sleep(0.05)
| 27.808081 | 81 | 0.564475 |
36d148c1ce0bdea8709582045309f0f2acad2b33 | 954 | py | Python | services/web/apps/inv/inv/plugins/log.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 84 | 2017-10-22T11:01:39.000Z | 2022-02-27T03:43:48.000Z | services/web/apps/inv/inv/plugins/log.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 22 | 2017-12-11T07:21:56.000Z | 2021-09-23T02:53:50.000Z | services/web/apps/inv/inv/plugins/log.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
] | 23 | 2017-12-06T06:59:52.000Z | 2022-02-24T00:02:25.000Z | # ---------------------------------------------------------------------
# inv.inv log plugin
# ---------------------------------------------------------------------
# Copyright (C) 2007-2018 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# NOC modules
from .base import InvPlugin
| 28.909091 | 71 | 0.336478 |
36d22a39c7974086f08155ff6bf52d3cb2267f62 | 574 | py | Python | blender/arm/logicnode/transform/LN_separate_quaternion.py | niacdoial/armory | 3f9b633fbf772017c576a3f77695a6c28d9956e1 | [
"Zlib"
] | null | null | null | blender/arm/logicnode/transform/LN_separate_quaternion.py | niacdoial/armory | 3f9b633fbf772017c576a3f77695a6c28d9956e1 | [
"Zlib"
] | null | null | null | blender/arm/logicnode/transform/LN_separate_quaternion.py | niacdoial/armory | 3f9b633fbf772017c576a3f77695a6c28d9956e1 | [
"Zlib"
] | null | null | null | from arm.logicnode.arm_nodes import *
| 31.888889 | 57 | 0.679443 |
36d3212ee65298917f85198d847d449f780e78c7 | 3,110 | py | Python | tools/exporter_python/exporter.py | moriyalb/hades | ea2743a23022f65b3931eb482b6ec18804410ba3 | [
"MIT"
] | 5 | 2018-05-18T10:01:46.000Z | 2021-08-18T13:59:47.000Z | tools/exporter_python/exporter.py | moriyalb/hades | ea2743a23022f65b3931eb482b6ec18804410ba3 | [
"MIT"
] | null | null | null | tools/exporter_python/exporter.py | moriyalb/hades | ea2743a23022f65b3931eb482b6ec18804410ba3 | [
"MIT"
] | null | null | null | import getopt
import sys
import os
import schema
import server
import orm
CLIENT_TYPE = {
'--client_lua_path' : "lua",
'--client_cs_path' : "cs",
'--client_cpp_path' : "cpp",
'--client_js_path' : "js",
'--client_python_path' : "python",
}
if __name__ == "__main__":
#try:
export()
#except Exception as e:
# print("Error in exporter -> ", e) | 31.734694 | 104 | 0.684566 |
36d456418e0f32038550bac5f2b5a0f1d2148fc5 | 707 | py | Python | python/python project/te330.py | WhitePhosphorus4/xh-learning-code | 025e31500d9f46d97ea634d7fd311c65052fd78e | [
"Apache-2.0"
] | null | null | null | python/python project/te330.py | WhitePhosphorus4/xh-learning-code | 025e31500d9f46d97ea634d7fd311c65052fd78e | [
"Apache-2.0"
] | null | null | null | python/python project/te330.py | WhitePhosphorus4/xh-learning-code | 025e31500d9f46d97ea634d7fd311c65052fd78e | [
"Apache-2.0"
] | null | null | null | import wx
app = App()
f = wx.Frame(None, -1, "Hello", [700, 500])
wx.Button(f, size = [0, 0])
#s = wx.Image("uu.png", wx.BITMAP_TYPE_ANY).ConvertToBitmap()
b = wx.Button(f, -1,'Hello', size = [80, 30], style = wx.BORDER_NONE)
#bb= wx.StaticBitmap(b, -1, wx.Image("uu.png", wx.BITMAP_TYPE_ANY).ConvertToBitmap())
b.SetBackgroundColour("#FEFEFE")
b.Bind(wx.EVT_BUTTON, A)
b.Bind(wx.EVT_ENTER_WINDOW, B)
b.Bind(wx.EVT_LEAVE_WINDOW, C)
f.Show()
app.MainLoop() | 27.192308 | 85 | 0.666195 |
36d4d897387b020fc8db80ecfcfac7847d28fc17 | 2,048 | py | Python | examples/sneswii2gamepad/code.py | dglaude/CircuitPython_Joystic_Controller | a9ef8855b9be457b25c9a436fcbbf6aebe39b4e9 | [
"Unlicense",
"MIT-0",
"MIT"
] | null | null | null | examples/sneswii2gamepad/code.py | dglaude/CircuitPython_Joystic_Controller | a9ef8855b9be457b25c9a436fcbbf6aebe39b4e9 | [
"Unlicense",
"MIT-0",
"MIT"
] | null | null | null | examples/sneswii2gamepad/code.py | dglaude/CircuitPython_Joystic_Controller | a9ef8855b9be457b25c9a436fcbbf6aebe39b4e9 | [
"Unlicense",
"MIT-0",
"MIT"
] | null | null | null | # SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
# You must add a gamepad HID device inside your boot.py file
# in order to use this example.
# See this Learn Guide for details:
# https://learn.adafruit.com/customizing-usb-devices-in-circuitpython/hid-devices#custom-hid-devices-3096614-9
import time
import board
import neopixel
led = neopixel.NeoPixel(board.NEOPIXEL, 1)
led.brightness = 0.3
led[0] = (0, 0, 0)
# SPDX-FileCopyrightText: 2021 John Furcean
# SPDX-License-Identifier: MIT
# Classic Controller also work with CLV-202.
# But the "Super Nintendo SNES Classic Mini Controller" has less button and not stick.
from wiichuck.classic_controller import ClassicController
controller = ClassicController(board.I2C())
# SPDX-FileCopyrightText: Copyright (c) 2021 Dan Halbert for Adafruit Industries
#
# SPDX-License-Identifier: Unlicense
import usb_hid
from hid_gamepad import Gamepad
gp = Gamepad(usb_hid.devices)
x=0
y=0
oldx=0
oldy=0
while True:
_, buttons, dpad, _ = controller.values
if buttons.A:
led[0] = (255, 0, 0)
if buttons.B:
led[0] = (255, 255, 0)
if buttons.X:
led[0] = (0, 0, 255)
if buttons.Y:
led[0] = (0, 255, 0)
if buttons.R:
led[0] = (0, 0, 0)
print("button R")
if buttons.L:
led[0] = (0, 0, 0)
print("button L")
if buttons.start:
led[0] = (0, 0, 0)
print("button start")
if buttons.select:
led[0] = (0, 0, 0)
print("button select")
if (y!=0) and not (dpad.up or dpad.down):
y=0
if dpad.up:
y = 127
led[0] = (0, 0, 0)
print("dpad up")
if dpad.down:
y = -127
led[0] = (0, 0, 0)
print("dpad down")
if (x!=0) and not (dpad.right or dpad.left):
x=0
if dpad.right:
x = 127
led[0] = (0, 0, 0)
print("dpad right")
if dpad.left:
x = -127
led[0] = (0, 0, 0)
print("dpad left")
gp.move_joysticks(x, y)
| 23.54023 | 110 | 0.605957 |
36d54bbaca57e4631b154f3ca77d029d7fd103ad | 3,345 | py | Python | spleeter/util.py | ashirviskas/spleeter-pytorch | 853d4bb6048fae879543342a8278c298854637f3 | [
"MIT"
] | 28 | 2019-11-29T10:23:16.000Z | 2022-03-28T13:02:29.000Z | spleeter/util.py | ashirviskas/spleeter-pytorch | 853d4bb6048fae879543342a8278c298854637f3 | [
"MIT"
] | 2 | 2020-05-07T04:07:50.000Z | 2020-07-13T02:40:41.000Z | spleeter/util.py | ashirviskas/spleeter-pytorch | 853d4bb6048fae879543342a8278c298854637f3 | [
"MIT"
] | 6 | 2020-05-31T08:23:56.000Z | 2022-02-22T16:38:19.000Z | import numpy as np
import tensorflow as tf
from .unet import UNet
| 36.358696 | 88 | 0.529746 |
36d54c415ce82d548b3b02e02ceb85813202d7ef | 1,920 | py | Python | SDEprocesses/explicit.py | PyPaperParrot/pystoch | 14d1dbeefaeb3696378e0db6e565347df87a02bc | [
"MIT"
] | 1 | 2019-06-06T13:53:51.000Z | 2019-06-06T13:53:51.000Z | SDEprocesses/explicit.py | PyPaperParrot/pystoch | 14d1dbeefaeb3696378e0db6e565347df87a02bc | [
"MIT"
] | null | null | null | SDEprocesses/explicit.py | PyPaperParrot/pystoch | 14d1dbeefaeb3696378e0db6e565347df87a02bc | [
"MIT"
] | null | null | null | import numpy as np
import exceptions as ex
# 4. -
# 6.
| 34.285714 | 207 | 0.611458 |
36d5db401ea4ab6c6ef162a66fa84ae2937cecdb | 3,847 | py | Python | oscontainer/cgroup_v2_subsystem.py | Lothiraldan/oscontainer | 140504711372494f919b4de7bf84e80d11013fa0 | [
"MIT"
] | null | null | null | oscontainer/cgroup_v2_subsystem.py | Lothiraldan/oscontainer | 140504711372494f919b4de7bf84e80d11013fa0 | [
"MIT"
] | null | null | null | oscontainer/cgroup_v2_subsystem.py | Lothiraldan/oscontainer | 140504711372494f919b4de7bf84e80d11013fa0 | [
"MIT"
] | null | null | null | import math
from oscontainer.constants import CGROUP_TYPE_V2, PER_CPU_SHARES, NO_LIMIT
from oscontainer.cgroup_subsystem import CgroupController, CgroupSubsystem
from oscontainer.utils import limit_from_str
CPU_WEIGHT = "cpu.weight"
CPU_MAX = "cpu.max"
CPU_CPUSET_CPUS = "cpuset.cpus"
CPU_CPUSET_CPUS_EFFECTIVE = "cpuset.cpus.effective"
MEMORY_CURRENT = "memory.current"
MEMORY_MAX = "memory.max"
| 34.044248 | 102 | 0.641279 |
36d6b30d341d10b3fc5496de476fb8b78f692188 | 460 | py | Python | openapi/tests/matchers.py | suihanki/openapi | c67ee4ec0284bc1da5bda2c6b8497d6a33bb69a0 | [
"Apache-2.0"
] | 25 | 2016-08-09T18:40:42.000Z | 2019-07-15T20:37:13.000Z | openapi/tests/matchers.py | suihanki/openapi | c67ee4ec0284bc1da5bda2c6b8497d6a33bb69a0 | [
"Apache-2.0"
] | 5 | 2016-08-16T18:34:44.000Z | 2020-03-24T21:01:26.000Z | openapi/tests/matchers.py | suihanki/openapi | c67ee4ec0284bc1da5bda2c6b8497d6a33bb69a0 | [
"Apache-2.0"
] | 11 | 2016-09-14T09:12:49.000Z | 2020-01-31T19:27:07.000Z | """
Custom hamcrest matchers.
"""
from hamcrest.core.base_matcher import BaseMatcher
from json import dumps, loads
equal_to_json = JSONMatcher
| 18.4 | 70 | 0.671739 |
36d743457c72e522cd69762028d8c4a8aaf9d131 | 2,741 | py | Python | build-container/docxify3.py | signaux-faibles/datapi | 296ee922dc47eea4176f5c7bdde35c218bf9c817 | [
"MIT"
] | null | null | null | build-container/docxify3.py | signaux-faibles/datapi | 296ee922dc47eea4176f5c7bdde35c218bf9c817 | [
"MIT"
] | 31 | 2020-04-23T11:29:16.000Z | 2021-06-23T05:45:08.000Z | build-container/docxify3.py | signaux-faibles/datapi | 296ee922dc47eea4176f5c7bdde35c218bf9c817 | [
"MIT"
] | null | null | null | # coding: utf-8
import sys
import json
from mailmerge import MailMerge
# Le template contient ce jour les champs :
# auteur l'auteur du document
# date_edition la date d'dition du document
# confidentialite le destinataire du document
# raison_sociale la raison sociale de l'entreprise
# siret le numro de SIRET de l'tablissement
# type_etablissement le type d'tablissement sige social ou tablissement secondaire
# tete_de_groupe la tte de groupe si l'entreprise fait partie d'un groupe
# departement le departement de l'tablissement
# commune la commune de l'tablissement
# territoire_industrie le Territoire d'industrie
# secteur_activite le secteur d'activit
# activite le libell et le code activit
# secteurs_covid appartenance aux secteurs dits COVID-19 S1, S1 bis ou S2
# statut_juridique le statut juridique comme SAS ou SARL
# date_ouverture_etablissement la date d'ouverture de l'tablissement
# date_creation_entreprise la date de cration de l'entreprise
# effectif le dernier effectif
# activite_partielle demande d'activit partielle sur les 12 derniers mois ou non
# dette_sociale dette sociale en hausse sur les 3 derniers mois ou non
# part_salariale dette salariale restante ou non
# annee_exercice anne du dernier exercice comptable
# ca chiffre d'affaires
# ebe excdent brut d'exploitation
# rex rsultat d'exploitation
# procol dernire procdure collective
# detection_sf risque identifi par l'algorithme de dtection Signaux Faibles
# date_debut_suivi date de dbut de suivi par l'auteur
# description_wekan description dans l'outil de suivi Kanban Wekan
template = 'template.docx'
# Lecture des donnes JSON depuis l'entre standard
# Remplissage du modle DOCX contenant des champs de fusion (MERGEFIELD) et criture dans la sortie standard
data = get_json_input_data()
fill_template_with_data(data)
sys.exit(0) | 38.069444 | 108 | 0.785845 |
36d88c360c0960445e0699b390c5bc46416d33e6 | 406 | py | Python | super32assembler/super32assembler/preprocessor/asmdirectives.py | Projektstudium-Mikroprozessor/Super32 | d502d2d5885ac0408d06e57e0f5a67fe2a2fee15 | [
"BSD-3-Clause"
] | 1 | 2019-12-07T01:56:31.000Z | 2019-12-07T01:56:31.000Z | super32assembler/super32assembler/preprocessor/asmdirectives.py | Projektstudium-Mikroprozessor/Super32 | d502d2d5885ac0408d06e57e0f5a67fe2a2fee15 | [
"BSD-3-Clause"
] | 42 | 2020-05-15T10:39:30.000Z | 2020-08-30T10:59:43.000Z | super32assembler/preprocessor/asmdirectives.py | xsjad0/Super32 | 75cf5828b17cdbce144447a69ff3d1be7ad601f2 | [
"BSD-3-Clause"
] | 4 | 2019-11-27T15:05:33.000Z | 2020-05-13T06:51:21.000Z | """
Enum Assembler-Directives
"""
from enum import Enum, auto
| 18.454545 | 53 | 0.549261 |
36d90f9194a3f4a5adea321bf4cf9176ed0ded59 | 250 | py | Python | Wyklad/OOP/Sheep.py | tborzyszkowski/PythonWorld | dc776d3ab4569297b6b6681e4390aeedf1262c78 | [
"MIT"
] | 3 | 2019-02-24T07:49:33.000Z | 2022-01-09T11:27:21.000Z | Wyklad/OOP/Sheep.py | tborzyszkowski/PythonWorld | dc776d3ab4569297b6b6681e4390aeedf1262c78 | [
"MIT"
] | null | null | null | Wyklad/OOP/Sheep.py | tborzyszkowski/PythonWorld | dc776d3ab4569297b6b6681e4390aeedf1262c78 | [
"MIT"
] | 17 | 2018-12-09T08:44:52.000Z | 2022-01-25T15:08:04.000Z | from Animal import Animal
| 16.666667 | 47 | 0.704 |
36da2e9adf116505c11742d74e8d8a7c885d7c7d | 1,034 | py | Python | python/python-algorithm-intervew/8-linked-list/16-add-two-numbers-2.py | bum12ark/algorithm | b6e262b0c29a8b5fb551db5a177a40feebc411b4 | [
"MIT"
] | 1 | 2022-03-06T03:49:31.000Z | 2022-03-06T03:49:31.000Z | python/python-algorithm-intervew/8-linked-list/16-add-two-numbers-2.py | bum12ark/algorithm | b6e262b0c29a8b5fb551db5a177a40feebc411b4 | [
"MIT"
] | null | null | null | python/python-algorithm-intervew/8-linked-list/16-add-two-numbers-2.py | bum12ark/algorithm | b6e262b0c29a8b5fb551db5a177a40feebc411b4 | [
"MIT"
] | null | null | null |
if __name__ == '__main__':
solution = Solution()
param1 = ListNode(2, ListNode(4, ListNode(5)))
param2 = ListNode(5, ListNode(6, ListNode(4)))
print(solution.addTwoNumbers(param1, param2).print_list())
| 24.619048 | 72 | 0.500967 |
36dbe66f53ea99cba7463f1defbdf1646e602362 | 15,516 | py | Python | pyjokes/jokes_pl.py | r0d0dendr0n/pyjokes | 382065cba91007302be7fd04c5c35a9957e173b2 | [
"BSD-3-Clause"
] | null | null | null | pyjokes/jokes_pl.py | r0d0dendr0n/pyjokes | 382065cba91007302be7fd04c5c35a9957e173b2 | [
"BSD-3-Clause"
] | null | null | null | pyjokes/jokes_pl.py | r0d0dendr0n/pyjokes | 382065cba91007302be7fd04c5c35a9957e173b2 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Jokes below come from the "jokes_en.py" file.
Translation to Polish: Tomasz Rozynek - provided under CC BY-SA 3.0
"""
neutral = [
"W 2030 roku Beata z ulg usuna Python'a 2.7 ze swoich maszyn. 'No!' westchna, by za chwil przeczyta ogoszenia na temat Python'a 4.4.",
"Zapytanie SQL wchodzi do baru, podchodzi do pierwszej osoby i pyta, 'Czy moemy utworzy relacj?'",
"Kiedy uywasz C++ jak motka, wszystko bdzie Twoim kciukiem.",
"Jak posadzisz milion map przy milionie klawiatur, ktra z nich w kocu napisze dziaajcy program w Javie. Pozostae bd pisa w Perlu.",
"Aby zrozumie rekurencj, musisz najpierw zrozumie rekurencj.",
"'Puk, puk.' 'Kto tam?' ... bardzo duga pauza ... 'Java.'",
"'Puk, puk.' 'Kto tam?' 'C++.'",
"'Puk, p... Asembler.'",
"Ilu programistw potrzeba, eby wymieni arwk? adnego, bo to problem sprztowy.",
"Jak nazywa si obiektowa metoda bogacenia si? Dziedziczenie.",
"Dlaczego dowcipy nie dziaaj w systemie semkowym? Poniewa 7, 10, 11.",
"Ilu programistw potrzeba, aby wymieni arwk? adnego, po prostu ogaszaj ciemno standardem.",
"Dwa wtki wchodz do baru. Barman patrzy na nie i woa, 'Hej! Nie chcemy tu hazardu!'",
"Programici uwielbiaj rozwizywanie problemw. Jeli akurat nie maj adnego do rozwizania, z pewnoci jaki stworz.",
".NET nazywa si .NET, eby przypadkiem nie wywietli si w uniksowym listingu plikw.",
"Sprzt: cz komputera, ktr moesz kopn.",
"Optymista: Szklanka do poowy pena. Pesymista: Szklanka do poowy pusta. Programista: Rozmiar szklanki jest dwa razy wikszy, ni wymagany.",
"W C sami musielimy kodowa bdy. W C++ moemy je po prostu odziedziczy.",
"Dlaczego nie ma konkursw na najmniej czytelny kod w Perlu? Bo nikt nie umiaby wyoni zwycizcy.",
"Odtwarzajc dysk instalacyjny Windowsa od tyu, usyszysz czarn msz. Gorzej, jeli odtworzysz j od przodu, wtedy zainstaluje Windowsa.",
"Ilu programistw potrzeba, aby zabi karalucha? Dwch: jeden go trzyma, a drugi instaluje na nim Windowsa.",
"Do jakiej grupy nale programici z Finlandii? Nerdyckiej.",
"Co mwi kod w Javie do kodu w C? Brakuje Ci klasy.",
"Dlaczego Microsoft nazwa swoj wyszukiwark BING? Bo Indolentnie Naladuje Google.",
"Piraci woaj 'arg!', komputerowi piraci woaj 'argv!'",
"Dziecko: Mamo, dlaczego Soce wschodzi na wschodzie i zachodzi na zachodzie? Ojciec: jeli dziaa, nie dotykaj.",
"Dlaczego programistom myli si Halloween z Boym Narodzeniem? Poniewa OCT 31 == DEC 25.",
"Ilu programistw Prologa potrzeba, eby wymieni arwk? Fasz.",
"Kelner: Poda kaw, lub herbat? Programistka: Tak.",
"Programistka wchodzi do foo...",
"Jak brzmi drugie imi Benoit'a B. Mandelbrot'a? Benoit B. Mandelbrot.",
"Dlaczego zawsze si umiechasz? To moje regularne wyraenie twarzy.",
"Programistka miaa problem. Pomylaa sobie, 'Wiem, rozwi to wtkami!'. ma Teraz problemy. ona dwa",
"Opowiedziabym dowcip o UDP, ale nie wiem, czy by do Ciebie dotar.",
"Testerka wchodzi do baru. Wbiega do baru. Wczoguje si do baru. Taczy wchodzc do baru. Wchodzi tip-topami do baru. Szaruje do baru.",
"Miaem problem, wic pomylaem, e uyj Javy. Teraz mam FabrykaProblemow.",
"Tester wchodzi do baru. Zamawia piwo. Zamawia 0 piw. Zamawia 999999999 piw. Zamawia jaszczurk. Zamawia -1 piw. Zamawia sfdeljknesv.",
"Kierowniczka projektu wchodzi do baru, zamawia drinka. Barman odmawia, ale pomyli nad dodaniem go pniej.",
"Jak wygenerowa prawdziwie losowy cig znakw? Posad studenta pierwszego roku przed Vim'em i powiedz, eby zapisa plik i wyczy edytor.",
"Od duszego czasu uywam Vim'a. Gwnie dlatego, e nadal prbuj go wyczy.",
"Jak pozna, e kto uywa Vim'a? Nie przejmuj si, sam Ci powie.",
"Kelner: On si krztusi! Czy jest na sali doktor? Programista: Jestem uytkownikiem Vim'a.",
"Trjka adminw baz danych wchodzi do NoSQL'owego baru. Po krtkim czasie rozeszli si, poniewa nie mogli utworzy relacji.",
"Jak opisa fabu Incepcji programicie? Uruchamiasz maszyn wirtualn w wirtualce, wewntrz innej wirtualki... wszystko dziaa wolno!",
"W informatyce s tylko dwa trudne problemy: uniewanianie pamici podrcznej, nazewnictwo i pomyki o 1.",
"Istnieje 10 rodzajw ludzi: Ci, ktrzy rozumiej kod binarny oraz Ci, ktrzy go nie rozumiej.",
"Istniej 2 rodzaje ludzi: Ci, ktrzy potrafi ekstrapolowa niekompletne zbiory danych...",
"Istniej II rodzaje ludzi: Ci, ktrzy rozumiej liczby rzymskie i Ci, ktrzy ich nie rozumiej.",
"Istnieje 10 typw ludzi: Ci, ktrzy rozumiej system szesnastkowy oraz 15 pozostaych.",
"Istnieje 10 rodzajw ludzi: Ci, ktrzy rozumiej kod binarny, Ci ktrzy go nie rozumiej oraz Ci, co wiedzieli, e to o systemie trjkowym.",
"Istnieje 10 rodzajw ludzi: Ci, ktrzy rozumiej kod trjkowy, Ci, ktrzy go nie rozumiej oraz Ci, ktrzy nigdy o nim nie syszeli.",
"Jak nazywa si semka hobbitw? Hobbajt.",
"Najlepsze w wartociach logicznych jest to, e nawet jeli si pomylisz, to tylko o 1.",
"Dobry programista zawsze patrzy w obie strony przed przejciem przez ulic jednokierunkow.",
"S dwa sposoby pisania programw bez bdw. Tylko ten trzeci dziaa.",
"Zarzdzanie jakoci skada si w 55% z wody, 30% krwi i 15% ticketw z bugtrackera",
"Sympatyzowanie z Diabem to tak naprawd bycie uprzejmym dla Testerw.",
"Ilu Testerw potrzeba do zmiany arwki? Oni zauwayli, e pokj jest ciemny. Nie rozwizuj problemw, tylko ich szukaj.",
"Programista rozbi auto zjedajc z gry. Przechodzie spyta co si stao. \"Nie wiem. Wniemy go na gr i sprbujmy ponownie.\".",
"Pisanie w PHP jest jak sikanie do basenu. Wszyscy to robili, ale niekoniecznie trzeba si tym chwali publicznie.",
"Dlaczego Tester przeszed przez ulic? eby zepsu dzie wszystkim innym.",
"Ilo dni od ostatniego bdu indeksowania tablicy: -1.",
"Ilo dni od ostatniej pomyki o 1: 0.",
"Szybkie randki s bez sensu. 5 minut to zbyt mao czasu, aby prawidowo wyjani filozofi Unix'a.",
"Microsoft co dwa miesice organizuje \"tydzie produktywnoci\", podczas ktrego uywaj Google zamiast Bing'a",
"Podejcie Schroedinger'a do budowy stron www: Jeli nie ogldasz tego w Internet Explorerze, jest szansa, e bdzie wygldao dobrze.",
"Szukanie dobrego programisty PHP jest jak szukanie igy w stogu siana. Czy raczej stogu siana w igle?",
"Unix jest bardzo przyjazny uytkownikom. Po prostu jest rwnie bardzo wybredny przy wyborze przyjaci.",
"Programistka COBOL'a zarabia miliony naprawiajc problem roku 2000. Decyduje si zamrozi siebie. \"Mamy rok 9999. Znasz COBOL'a, prawda?\"",
"Jzyk C czy w sobie potg asemblera z prostot uycia asemblera.",
"Ekspert SEO wchodzi do baru, bar, pub, miesce spotka, browar, Irlandzki pub, tawerna, barman, piwo, gorzaa, wino, alkohol, spirytus...",
"Co maj wsplnego pyjokes oraz Adobe Flash? Wci otrzymuj aktualizacje, ale nigdy nie staj si lepsze.",
"Dlaczego Waldo nosi tylko paski? Bo nie chce si znale w kropce.",
"Szedem raz ulic, przy ktrej domy byy ponumerowane 8k, 16k, 32k, 64k, 128k, 256k i 512k. To bya podr Alej Pamici.",
"!false, (To zabawne, bo to prawda)",
]
"""
Jokes below come from the "jokes_en.py" file.
Translation to Polish: Tomasz Rozynek - provided under CC BY-SA 3.0
"""
chuck = [
"Kiedy Chuck Norris rzuca wyjtek, to leci on przez cay pokj.",
"Wszystkie tablice, ktre deklaruje Chuck Norris s nieskoczonego rozmiaru, poniewa Chuck Norris nie zna granic.",
"Chuck Norris nie ma opnie w dysku twardym, poniewa dysk twardy wie, e musi si spieszy, eby nie wkurzy Chucka Norrisa.",
"Chuck Norris pisze kod, ktry sam si optymalizuje.",
"Chuck Norris nie porwnuje, poniewa nie ma sobie rwnych.",
"Chuck Norris nie potrzebuje garbage collector'a, poniewa nie wywouje .Dispose(), tylko .DropKick().",
"Pierwszym programem Chucka Norrisa byo kill -9.",
"Chuck Norris przebi bak dot com'w.",
"Wszystkie przegldarki wspieraj kolory #chuck oraz #norris, oznaczajce czarny i niebieski.",
"MySpace tak naprawd nie jest Twj, tylko Chuck'a. Po prostu pozwala Ci go uywa.",
"Chuck Norris moe pisa funkcje rekurencyjne bez warunku stopu, ktre zawsze wracaj.",
"Chuck Norris moe rozwiza wiee Hanoi w jednym ruchu.",
"Chuck Norris zna tylko jeden wzorzec projektowy: Boski obiekt.",
"Chuck Norris ukoczy World of Warcraft.",
"Kierownicy projektu nigdy nie pytaj Chucka Norrisa o oszacowania.",
"Chuck Norris nie dostosowuje si do standardw webowych, poniewa to one dostosowuj si do niego.",
"'U mnie to dziaa' jest zawsze prawd w przypadku Chucka Norrisa.",
"Chuck Norris nie uywa diagramw wyarzania, tylko uderzania.",
"Chuck Norris moe usun Kosz.",
"Broda Chucka Norrisa moe pisa 140 sw na minut.",
"Chuck Norris moe przetestowa ca aplikacj jedn asercj: 'dziaa'.",
"Chuck Norris nie szuka bdw, poniewa to sugeruje, e moe ich nie znale. On likwiduje bdy.",
"Klawiatura Chucka Norris'a nie ma klawisza Ctrl, poniewa nic nie kontroluje Chucka Norrisa.",
"Chuck Norris moe przepeni Twj stos samym spojrzeniem.",
"Dla Chucka Norrisa wszystko zawiera podatnoci.",
"Chuck Norris nie uywa sudo. Powoka wie, e to on i po prostu robi co jej kae.",
"Chuck Norris nie uywa debuggera. Patrzy na kod tak dugo, a sam wyzna bdy.",
"Chuck Norris ma dostp do prywatnych metod.",
"Chuck Norris moe utworzy obiekt klasy abstrakcyjnej.",
"Chuck Norris nie potrzebuje fabryki klas. On instancjonuje interfejsy.",
"Klasa Object dziedziczy po Chucku Norrisie.",
"Dla Chucka Norrisa problemy NP-trudne maj zoono O(1).",
"Chuck Norris zna ostatni cyfr rozwinicia dziesitnego Pi.",
"cze internetowe Chucka Norrisa szybciej wysya, ni pobiera, poniewa nawet dane si go boj.",
"Chuck Norris rozwiza problem komiwojaera w czasie staym: rozbij komiwojaera na N kawakw, po czym wykop kady do innego miasta.",
"adne wyraenie nie moe obsuy ChuckNorrisException.",
"Chuck Norris nie programuje w parach. Pracuje sam.",
"Chuck Norris potrafi pisa aplikacje wielowtkowe przy uyciu jednego wtku.",
"Chuck Norris nie musi uywa AJAX'a, poniewa strony i tak s przeraone jego zwykymi daniami.",
"Chuck Norris nie uywa refleksji. To refleksje uprzejmie prosz go o pomoc.",
"Klawiatura Chucka Norrisa nie ma klawisza Escape, poniewa nikt nie ucieknie przed Chuckiem Norrisem.",
"Chuck Norris moe uy wyszukiwania binarnego na nieposortowanym kontenerze.",
"Chuck Norris nie musi apa wyjtkw. S zbyt przeraone, by si pokaza.",
"Chuck Norris wyszed z nieskoczonej ptli.",
"Jeli Chuck Norris napisze kod z bdami, to one same si poprawi.",
"Hosting Chucka Norrisa ma SLA na poziomie 101%.",
"Klawiatura Chucka Norrisa ma klawisz 'Dowolny'.",
"Chuck Norris moe dosta si do bazy danych bezporednio przez interfejs uytkownika.",
"Programy Chucka Norrisa si nie kocz, tylko gin.",
"Chuck Norris nalega na uywanie jzykw silnie typowanych.",
"Chuck Norris projektuje protokoy bez statusw, da, czy odpowiedzi. Definiuje tylko polecenia.",
"Programy Chucka Norrisa zajmuj 150% procesora, nawet gdy nie s uruchomione.",
"Chuck Norris uruchamia wtki, ktre kocz swoje zadanie, zanim si poprawnie uruchomi.",
"Programy Chucka Norrisa nie akceptuj wejcia.",
"Chuck Norris moe zainstalowa iTunes bez QuickTime'a.",
"Chuck Norris nie potrzebuje systemu operacyjnego.",
"Model OSI Chucka Norrisa ma tylko jedn warstw - fizyczn.",
"Chuck Norris moe poprawnie kompilowa kod z bdami skadniowymi.",
"Kade zapytanie SQL Chucka Norrisa zawiera implikowany 'COMMIT'.",
"Chuck Norris nie potrzebuje rzutowania. Kompilator Chucka Norrisa (KCN) dostrzega wszystko. Do samego koca. Zawsze.",
"Chuck Norris nie wykonuje kodu w cyklach, tylko w uderzeniach.",
"Chuck Norris kompresuje pliki przez kopnicie dysku twardego z pobrotu.",
"Chuck Norris rozwiza problem stopu.",
"Dla Chucka Norrisa P = NP. Jego decyzje s zawsze deterministyczne.",
"Chuck Norris moe pobra wszystko z /dev/null.",
"Nikomu nie udao si programowa z Chuckiem Norrisem i wyj z tego ywym.",
"Nikomu nie udao si odezwa podczas przegldu kodu Chucka Norrisa i wyj z tego ywym.",
"Chuck Norris nie uywa interfejsw graficznych. On rozkazuje z wiersza polece.",
"Chuck Norris nie uywa Oracle'a. On JEST Wyroczni.",
"Chuck Norris moe dokona dereferencji NULL'a.",
"Lista rnic pomidzy Twoim kodem oraz kodem Chucka Norrisa jest nieskoczona.",
"Chuck Norris napisa wtyczk do Eclipsa, ktra dokonaa pierwszego kontaktu z obc cywilizacj.",
"Chuck Norris jest ostatecznym semaforem. Wszystkie wtki si go boj.",
"Nie przejmuj si testami. Przypadki testowe Chucka Norrisa pokrywaj rwnie Twj kod.",
"Kady wos z brody Chucka Norrisa ma swj wkad w najwikszy na wiecie atak DDOS.",
"Komunikaty w loggerze Chucka Norrisa zawsze maj poziom FATAL.",
"Jeli Chuck Norris zepsuje build'a, nie uda Ci si go naprawi, poniewa nie zostaa ani jedna linijka kodu.",
"Chuck Norris pisze jednym palcem. Wskazuje nim na klawiatur, a ona robi reszt roboty.",
"Programy Chucka Norrisa przechodz test Turinga po prostu patrzc si na sdziego.",
"Jeli sprbujesz zabi program Chucka Norrisa, to on zabije Ciebie.",
"Chuck Norris wykonuje nieskoczone ptle w mniej ni 4 sekundy.",
"Chuck Norris moe nadpisa zmienn zablokowan semaforem.",
"Chuck Norris zna warto NULL. Moe te po niej sortowa.",
"Chuck Norris moe zainstalowa 64-bitowy system operacyjny na 32-bitowych maszynach.",
"Chuck Norris moe pisa do strumieni wyjciowych.",
"Chuck Norris moe czyta ze strumieni wejciowych.",
"Chuck Norris nie musi kompilowa swojego kodu. Maszyny nauczyy si interpretowa kod Chuck Norrisa.",
"Chuck Norris jest powodem Niebieskiego Ekranu mierci.",
"Chuck Norris moe utworzy klas, ktre jest jednoczenie abstrakcyjna i finalna.",
"Chuck Norris moe uy czegokolwiek z java.util.*, eby Ci zabi. Nawet javadocs'w.",
"Kod dziaa szybciej, gdy obserwuje go Chuck Norris.",
"Wszyscy lubi profil Chucka Norrisa na Facebook'u, czy im si to podoba, czy nie.",
"Nie moesz ledzi Chucka Norrisa na Twitterze, poniewa to on ledzi Ciebie.",
"Kalkulator Chucka Norrisa ma tylko 3 klawisze: 0, 1 i NAND.",
"Chuck Norris uywa tylko zmiennych globalnych. Nie ma nic do ukrycia.",
"Chuck Norris raz zaimplementowa cay serwer HTTP, uywajc tylko jednego printf'a. Projekt wci si rozwija i jest znany pod nazw Apache.",
"Chuck Norris pisze bezporednio w kodzie binarnym. Potem pisze kod rdowy, jako dokumentacj dla innych programistw.",
"Chuck Norris raz przesun bit tak mocno, e wyldowa w innym komputerze.",
"Jak nazywa si ulubiony framework Chucka Norrisa? Knockout.js.",
]
jokes_pl = {
'neutral': neutral,
'chuck': chuck,
'all': neutral + chuck,
}
| 77.969849 | 147 | 0.743942 |
36dc5f7d17dd68b5094396174551645ca5e9fe1c | 2,335 | py | Python | pele_platform/Utilities/Helpers/launcher.py | gabrii/pele_platform | 64ef9affdd1740fc2e0545c706f30eb2723aa300 | [
"Apache-2.0"
] | null | null | null | pele_platform/Utilities/Helpers/launcher.py | gabrii/pele_platform | 64ef9affdd1740fc2e0545c706f30eb2723aa300 | [
"Apache-2.0"
] | null | null | null | pele_platform/Utilities/Helpers/launcher.py | gabrii/pele_platform | 64ef9affdd1740fc2e0545c706f30eb2723aa300 | [
"Apache-2.0"
] | null | null | null | from dataclasses import dataclass
import pele_platform.Checker.main as ck
import pele_platform.Frag.simulation as fr
import pele_platform.Adaptive.simulation as ad
from pele_platform.Allosteric.main import run_allosteric
import pele_platform.gpcr.main as gpcr
import pele_platform.out_in.main as outin
from pele_platform.PPI.main import run_ppi
import pele_platform.Utilities.Parameters.pele_env as pv
import argparse
| 37.063492 | 93 | 0.68137 |
36dc9e14a8dd2c4fe9c4599b6173dd0c635c5f35 | 1,607 | py | Python | Project/AuditReport/Aw/audit_aw.py | StarryHome/MultiToolsPlatform | 2bd2b7e0700dbf542f0272ece3590a4afde328a4 | [
"MIT"
] | null | null | null | Project/AuditReport/Aw/audit_aw.py | StarryHome/MultiToolsPlatform | 2bd2b7e0700dbf542f0272ece3590a4afde328a4 | [
"MIT"
] | null | null | null | Project/AuditReport/Aw/audit_aw.py | StarryHome/MultiToolsPlatform | 2bd2b7e0700dbf542f0272ece3590a4afde328a4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .pandas_aw import PandasAw
if __name__ == '__main__':
audit = AuditAw()
visit_data_path = r'C:\Users\Administrator\Desktop\test\.xlsx'
visit_demand_path = r'C:\Users\Administrator\Desktop\test\.xlsx'
outliers_path = r'C:\Users\Administrator\Desktop\test\.xlsx'
audit.audit_report(visit_data_path, visit_demand_path, outliers_path) | 30.903846 | 109 | 0.655881 |
36de3480ccf6ebc0ee5035bf6d2e1a0522de31d5 | 812 | py | Python | libs/subsets_of_subset.py | nishio/atcoder | 8db36537b5d8580745d5f98312162506ad7d7ab4 | [
"MIT"
] | 1 | 2021-03-09T04:28:13.000Z | 2021-03-09T04:28:13.000Z | libs/subsets_of_subset.py | nishio/atcoder | 8db36537b5d8580745d5f98312162506ad7d7ab4 | [
"MIT"
] | null | null | null | libs/subsets_of_subset.py | nishio/atcoder | 8db36537b5d8580745d5f98312162506ad7d7ab4 | [
"MIT"
] | null | null | null | """
all subsets of given subset
"""
# --- end of library ---
def debugprint(g):
for x in g:
print(f"{x:06b}")
TEST_1 = """
>>> debugprint(subsets_of_subset(0b010101))
010101
010100
010001
010000
000101
000100
000001
000000
"""
if __name__ == "__main__":
import sys
input = sys.stdin.buffer.readline
read = sys.stdin.buffer.read
if sys.argv[-1] == "-t":
_test()
sys.exit()
| 15.615385 | 59 | 0.566502 |
36df1c98bfbc556da9445e6a4a358b0bfd225e8a | 9,312 | py | Python | models/backbone.py | kakaobrain/sparse-detr | 1ea7a062ca6d1dd57768d65b14352cfd1a65ab52 | [
"Apache-2.0"
] | 83 | 2021-11-29T04:45:39.000Z | 2022-03-30T13:39:46.000Z | models/backbone.py | kakaobrain/sparse-detr | 1ea7a062ca6d1dd57768d65b14352cfd1a65ab52 | [
"Apache-2.0"
] | 4 | 2021-12-18T21:24:50.000Z | 2022-03-18T07:22:04.000Z | models/backbone.py | kakaobrain/sparse-detr | 1ea7a062ca6d1dd57768d65b14352cfd1a65ab52 | [
"Apache-2.0"
] | 3 | 2021-12-29T12:07:20.000Z | 2022-02-11T08:26:16.000Z | # ------------------------------------------------------------------------------------
# Sparse DETR
# Copyright (c) 2021 KakaoBrain. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 [see LICENSE for details]
# ------------------------------------------------------------------------------------
# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR)
# Copyright (c) 2020 SenseTime. All Rights Reserved.
# ------------------------------------------------------------------------------------
# Modified from DETR (https://github.com/facebookresearch/detr)
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# ------------------------------------------------------------------------------------
"""
Backbone modules.
"""
from collections import OrderedDict
import torch
import torch.nn.functional as F
import torchvision
from torch import nn
from torchvision.models._utils import IntermediateLayerGetter
from typing import Dict, List
from models import swin_transformer
from util.misc import NestedTensor, is_main_process
from .position_encoding import build_position_encoding
def test_backbone(backbone):
imgs = [
torch.randn(2, 3, 633, 122),
torch.randn(2, 3, 322, 532),
torch.randn(2, 3, 236, 42),
]
return [backbone(img).shape for img in imgs]
def build_backbone(args):
# test_backbone(torchvision.models.resnet50())
position_embedding = build_position_encoding(args)
train_backbone = args.lr_backbone > 0
return_interm_layers = args.masks or (args.num_feature_levels > 1)
backbone = Backbone(args.backbone, train_backbone, return_interm_layers, args.dilation, args)
model = Joiner(backbone, position_embedding)
return model
| 38.962343 | 107 | 0.578393 |
36dfbf17404b4b4327586ca76fafeed5dd3aea90 | 496 | py | Python | resonate-carla/risk_calculation/function_test.py | scope-lab-vu/Resonate-Dynamic-Risk | 46972bdb0a2b6b08cc188a9f1f6567971c9d263d | [
"MIT"
] | 3 | 2021-08-15T05:02:17.000Z | 2022-03-16T11:25:45.000Z | resonate-carla/risk_calculation/function_test.py | scope-lab-vu/Resonate-Dynamic-Risk | 46972bdb0a2b6b08cc188a9f1f6567971c9d263d | [
"MIT"
] | null | null | null | resonate-carla/risk_calculation/function_test.py | scope-lab-vu/Resonate-Dynamic-Risk | 46972bdb0a2b6b08cc188a9f1f6567971c9d263d | [
"MIT"
] | 2 | 2021-03-21T02:35:17.000Z | 2021-06-02T22:40:07.000Z | from bowtie_diagram import BowTie
import matplotlib.pyplot as plt
EXAMPLE_MONITOR_VALUES = [x for x in range(-5, 21)]
bowtie = BowTie()
state = {"monitor_values": {"lec_martingale": None}}
true_y_vals = []
true_x_vals = []
for x_val in EXAMPLE_MONITOR_VALUES:
true_x_vals.append(x_val)
state["monitor_values"]["lec_martingale"] = x_val
true_y_vals.append(bowtie.prob_b1(state))
plt.scatter(true_x_vals, true_y_vals)
plt.xlabel("Log Martingale")
plt.ylabel("P(B1 | S)")
plt.show() | 24.8 | 53 | 0.737903 |
36dfdaf21f66ae9305bc8e42cb69c1de214c4d13 | 3,760 | py | Python | hw_asr/model/dsmodel.py | ivan-gorin/asr_project_template | 6a9f908d7f287ac2a66d5740fa6c73133825845a | [
"MIT"
] | null | null | null | hw_asr/model/dsmodel.py | ivan-gorin/asr_project_template | 6a9f908d7f287ac2a66d5740fa6c73133825845a | [
"MIT"
] | null | null | null | hw_asr/model/dsmodel.py | ivan-gorin/asr_project_template | 6a9f908d7f287ac2a66d5740fa6c73133825845a | [
"MIT"
] | null | null | null | #from https://www.assemblyai.com/blog/end-to-end-speech-recognition-pytorch/
from torch import nn
import torch.nn.functional as F
from hw_asr.base import BaseModel
| 35.471698 | 116 | 0.614894 |
36e0a5cff93ebca1eb7f6ddcf88fb764dd56d580 | 9,198 | py | Python | reminders/menu.py | elanorstark/reminder_pi | e6419f9bce29a1a06e0fee1b9e79156779a08c8b | [
"MIT"
] | null | null | null | reminders/menu.py | elanorstark/reminder_pi | e6419f9bce29a1a06e0fee1b9e79156779a08c8b | [
"MIT"
] | null | null | null | reminders/menu.py | elanorstark/reminder_pi | e6419f9bce29a1a06e0fee1b9e79156779a08c8b | [
"MIT"
] | null | null | null | import datetime
from typing import List
from reminders.events import Buttons, Alerts
from reminders.screen import Screen
# highest level, things that can be in a list menu
# an item in a menu that does something other than going to another menu
# an action item that is displayed on a menu with a checkbox
# parent for menus that can be displayed as their own screen
# menu for the home screen
# no back button available
# menu that stores and displays a list of ListMenuItem
# menu for reaching the task time editing menu, and to edit on and complete
# menu for editing a task's time
# menu which is put at top of stack when backlight is turned off
# menu to display alert and delay or mark complete
| 31.717241 | 112 | 0.607741 |
36e0bc6b9fd730df1ea36e866d1ae6f2849b3e37 | 2,127 | py | Python | custom_components/goldair_climate/configuration.py | jwhite/homeassistant-goldair-climate | cca1831a1d257c507f3831ca053478e1e7643c75 | [
"MIT"
] | 8 | 2019-05-31T00:17:13.000Z | 2021-01-12T21:43:21.000Z | custom_components/goldair_climate/configuration.py | jwhite/homeassistant-goldair-climate | cca1831a1d257c507f3831ca053478e1e7643c75 | [
"MIT"
] | 25 | 2019-07-04T06:46:30.000Z | 2021-07-15T03:13:46.000Z | custom_components/goldair_climate/configuration.py | nicole-ashley/homeassistant-goldair-climate | df5f895db20d826b673142e785477944497d9402 | [
"MIT"
] | 9 | 2019-11-22T02:46:55.000Z | 2021-07-04T21:57:41.000Z | import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_NAME
from .const import (
CONF_CHILD_LOCK,
CONF_CLIMATE,
CONF_DEVICE_ID,
CONF_DISPLAY_LIGHT,
CONF_LOCAL_KEY,
CONF_TYPE,
CONF_TYPE_AUTO,
CONF_TYPE_DEHUMIDIFIER,
CONF_TYPE_FAN,
CONF_TYPE_GECO_HEATER,
CONF_TYPE_GPCV_HEATER,
CONF_TYPE_GPPH_HEATER,
)
INDIVIDUAL_CONFIG_SCHEMA_TEMPLATE = [
{"key": CONF_NAME, "type": str, "required": True, "option": False},
{"key": CONF_HOST, "type": str, "required": True, "option": True},
{"key": CONF_DEVICE_ID, "type": str, "required": True, "option": False},
{"key": CONF_LOCAL_KEY, "type": str, "required": True, "option": True},
{
"key": CONF_TYPE,
"type": vol.In(
[
CONF_TYPE_AUTO,
CONF_TYPE_GPPH_HEATER,
CONF_TYPE_DEHUMIDIFIER,
CONF_TYPE_FAN,
CONF_TYPE_GECO_HEATER,
CONF_TYPE_GPCV_HEATER,
]
),
"required": False,
"default": CONF_TYPE_AUTO,
"option": True,
},
{
"key": CONF_CLIMATE,
"type": bool,
"required": False,
"default": True,
"option": True,
},
{
"key": CONF_DISPLAY_LIGHT,
"type": bool,
"required": False,
"default": False,
"option": True,
},
{
"key": CONF_CHILD_LOCK,
"type": bool,
"required": False,
"default": False,
"option": True,
},
]
| 25.023529 | 76 | 0.550071 |
36e0e9a9d4e28937589b02ccb2d38ccef3931ed6 | 255 | py | Python | 20-29/nlp23.py | emergent/nlp100.python | 636546ce1c4368faa6685eec315773c5c9fb424a | [
"Apache-2.0"
] | null | null | null | 20-29/nlp23.py | emergent/nlp100.python | 636546ce1c4368faa6685eec315773c5c9fb424a | [
"Apache-2.0"
] | null | null | null | 20-29/nlp23.py | emergent/nlp100.python | 636546ce1c4368faa6685eec315773c5c9fb424a | [
"Apache-2.0"
] | null | null | null | from nlp20 import get_england
import re
str = get_england()
lines = str.split('\n')
p = re.compile(r'^(=+)\s*(.+?)\s*=+')
for l in lines:
m = re.search(p, l)
if m is not None:
level = len(m.group(1)) - 1
print(m.group(2), level)
| 19.615385 | 37 | 0.556863 |
36e117d0d57d57188bd69bce4d500df94875ceb8 | 4,913 | py | Python | platform_reports/prometheus_grammars.py | neuro-inc/platform-reports | 161c18733370235af0b63a772de49343e956c35c | [
"Apache-2.0"
] | null | null | null | platform_reports/prometheus_grammars.py | neuro-inc/platform-reports | 161c18733370235af0b63a772de49343e956c35c | [
"Apache-2.0"
] | 9 | 2021-12-23T03:10:40.000Z | 2022-03-31T03:15:52.000Z | platform_reports/prometheus_grammars.py | neuro-inc/platform-reports | 161c18733370235af0b63a772de49343e956c35c | [
"Apache-2.0"
] | null | null | null | PROMQL = """
start: query
// Binary operations are defined separately in order to support precedence
?query\
: or_match
| matrix
| subquery
| offset
?or_match\
: and_unless_match
| or_match OR grouping? and_unless_match
?and_unless_match\
: comparison_match
| and_unless_match (AND | UNLESS) grouping? comparison_match
?comparison_match\
: sum_match
| comparison_match /==|!=|>=|<=|>|</ BOOL? grouping? sum_match
?sum_match\
: product_match
| sum_match /\\+|-/ grouping? product_match
?product_match\
: unary
| product_match /\\*|\\/|%/ grouping? unary
?unary\
: power_match
| /\\+|-/ power_match
?power_match\
: atom
| atom /\\^/ grouping? power_match
?atom\
: function
| aggregation
| instant_query
| NUMBER
| STRING
| "(" query ")"
// Selectors
instant_query\
: METRIC_NAME ("{" label_matcher_list? "}")? -> instant_query_with_metric
| "{" label_matcher_list "}" -> instant_query_without_metric
label_matcher_list: label_matcher ("," label_matcher)*
label_matcher: label_name /=~|=|!=|!~/ STRING
matrix: query "[" DURATION "]"
subquery: query "[" DURATION ":" DURATION? "]"
offset: query OFFSET DURATION
// Function
function: function_name parameter_list
parameter_list: "(" (query ("," query)*)? ")"
?function_name\
: ABS
| ABSENT
| ABSENT_OVER_TIME
| CEIL
| CHANGES
| CLAMP_MAX
| CLAMP_MIN
| DAY_OF_MONTH
| DAY_OF_WEEK
| DAYS_IN_MONTH
| DELTA
| DERIV
| EXP
| FLOOR
| HISTOGRAM_QUANTILE
| HOLT_WINTERS
| HOUR
| IDELTA
| INCREASE
| IRATE
| LABEL_JOIN
| LABEL_REPLACE
| LN
| LOG2
| LOG10
| MINUTE
| MONTH
| PREDICT_LINEAR
| RATE
| RESETS
| ROUND
| SCALAR
| SORT
| SORT_DESC
| SQRT
| TIME
| TIMESTAMP
| VECTOR
| YEAR
| AVG_OVER_TIME
| MIN_OVER_TIME
| MAX_OVER_TIME
| SUM_OVER_TIME
| COUNT_OVER_TIME
| QUANTILE_OVER_TIME
| STDDEV_OVER_TIME
| STDVAR_OVER_TIME
// Aggregations
aggregation\
: aggregation_operator parameter_list
| aggregation_operator (by | without) parameter_list
| aggregation_operator parameter_list (by | without)
by: BY label_name_list
without: WITHOUT label_name_list
?aggregation_operator\
: SUM
| MIN
| MAX
| AVG
| GROUP
| STDDEV
| STDVAR
| COUNT
| COUNT_VALUES
| BOTTOMK
| TOPK
| QUANTILE
// Vector one-to-one/one-to-many joins
grouping: (on | ignoring) (group_left | group_right)?
on: ON label_name_list
ignoring: IGNORING label_name_list
group_left: GROUP_LEFT label_name_list
group_right: GROUP_RIGHT label_name_list
// Label names
label_name_list: "(" (label_name ("," label_name)*)? ")"
?label_name: keyword | LABEL_NAME
?keyword\
: AND
| OR
| UNLESS
| BY
| WITHOUT
| ON
| IGNORING
| GROUP_LEFT
| GROUP_RIGHT
| OFFSET
| BOOL
| aggregation_operator
| function_name
// Keywords
// Function names
ABS: "abs"
ABSENT: "absent"
ABSENT_OVER_TIME: "absent_over_time"
CEIL: "ceil"
CHANGES: "changes"
CLAMP_MAX: "clamp_max"
CLAMP_MIN: "clamp_min"
DAY_OF_MONTH: "day_of_month"
DAY_OF_WEEK: "day_of_week"
DAYS_IN_MONTH: "days_in_month"
DELTA: "delta"
DERIV: "deriv"
EXP: "exp"
FLOOR: "floor"
HISTOGRAM_QUANTILE: "histogram_quantile"
HOLT_WINTERS: "holt_winters"
HOUR: "hour"
IDELTA: "idelta"
INCREASE: "increase"
IRATE: "irate"
LABEL_JOIN: "label_join"
LABEL_REPLACE: "label_replace"
LN: "ln"
LOG2: "log2"
LOG10: "log10"
MINUTE: "minute"
MONTH: "month"
PREDICT_LINEAR: "predict_linear"
RATE: "rate"
RESETS: "resets"
ROUND: "round"
SCALAR: "scalar"
SORT: "sort"
SORT_DESC: "sort_desc"
SQRT: "sqrt"
TIME: "time"
TIMESTAMP: "timestamp"
VECTOR: "vector"
YEAR: "year"
AVG_OVER_TIME: "avg_over_time"
MIN_OVER_TIME: "min_over_time"
MAX_OVER_TIME: "max_over_time"
SUM_OVER_TIME: "sum_over_time"
COUNT_OVER_TIME: "count_over_time"
QUANTILE_OVER_TIME: "quantile_over_time"
STDDEV_OVER_TIME: "stddev_over_time"
STDVAR_OVER_TIME: "stdvar_over_time"
// Aggregation operators
SUM: "sum"
MIN: "min"
MAX: "max"
AVG: "avg"
GROUP: "group"
STDDEV: "stddev"
STDVAR: "stdvar"
COUNT: "count"
COUNT_VALUES: "count_values"
BOTTOMK: "bottomk"
TOPK: "topk"
QUANTILE: "quantile"
// Aggregation modifiers
BY: "by"
WITHOUT: "without"
// Join modifiers
ON: "on"
IGNORING: "ignoring"
GROUP_LEFT: "group_left"
GROUP_RIGHT: "group_right"
// Logical operators
AND: "and"
OR: "or"
UNLESS: "unless"
OFFSET: "offset"
BOOL: "bool"
NUMBER: /[0-9]+(\\.[0-9]+)?/
STRING\
: "'" /([^'\\\\]|\\\\.)*/ "'"
| "\\"" /([^\\"\\\\]|\\\\.)*/ "\\""
DURATION: DIGIT+ ("s" | "m" | "h" | "d" | "w" | "y")
METRIC_NAME: (LETTER | "_" | ":") (DIGIT | LETTER | "_" | ":")*
LABEL_NAME: (LETTER | "_") (DIGIT | LETTER | "_")*
%import common.DIGIT
%import common.LETTER
%import common.WS
%ignore WS
"""
| 17.996337 | 77 | 0.65113 |
36e1fd31cd58507e88abf55b7c02a2da45a269b3 | 2,750 | py | Python | usersmanagement/models.py | Open-CMMS/openCMMS_backend | 56511ebac83a5dc1fb8768a98bc675e88530a447 | [
"BSD-3-Clause"
] | 3 | 2021-03-08T19:14:38.000Z | 2022-02-01T17:57:31.000Z | usersmanagement/models.py | Open-CMMS/openCMMS_backend | 56511ebac83a5dc1fb8768a98bc675e88530a447 | [
"BSD-3-Clause"
] | null | null | null | usersmanagement/models.py | Open-CMMS/openCMMS_backend | 56511ebac83a5dc1fb8768a98bc675e88530a447 | [
"BSD-3-Clause"
] | null | null | null | """This file contain the model for the usermanagement app."""
from django.contrib.auth.models import AbstractUser, Group, Permission
from django.db import models
| 28.350515 | 92 | 0.627273 |
36e3612bbbacdd9cd6a33c5bc043ceb7c94b8118 | 572 | py | Python | resrc/utils/templatetags/gravatar.py | theWhiteFox/resrc | d62bcf3ba2a55f50ae38a1e606072ee3d6025da5 | [
"MIT"
] | 274 | 2015-01-02T08:57:58.000Z | 2022-03-11T11:44:44.000Z | resrc/utils/templatetags/gravatar.py | ninjaCheery/resrc | 8af3a1a3617fd305a2c8aecffb609ed3e9c1addc | [
"MIT"
] | 8 | 2015-05-19T02:54:49.000Z | 2016-07-07T18:10:40.000Z | resrc/utils/templatetags/gravatar.py | ninjaCheery/resrc | 8af3a1a3617fd305a2c8aecffb609ed3e9c1addc | [
"MIT"
] | 112 | 2015-01-03T18:59:23.000Z | 2019-10-08T11:49:18.000Z | # -*- coding: utf-8 -*-:
from django import template
import urllib
import hashlib
register = template.Library()
register.simple_tag(gravatar)
| 26 | 86 | 0.631119 |
36e397fd23e48d333c1f759c070a0a56a3fe0024 | 11,149 | py | Python | utils/torch_utils.py | misads/torch_image_template | db55be6fcebdb6b0c5c739e505b8a7a2eb81c3c1 | [
"MIT"
] | 5 | 2019-12-23T05:13:15.000Z | 2020-04-09T03:47:53.000Z | utils/torch_utils.py | misads/torch_image_template | db55be6fcebdb6b0c5c739e505b8a7a2eb81c3c1 | [
"MIT"
] | null | null | null | utils/torch_utils.py | misads/torch_image_template | db55be6fcebdb6b0c5c739e505b8a7a2eb81c3c1 | [
"MIT"
] | null | null | null | # encoding=utf-8
"""
Misc PyTorch utils
Author: xuhaoyu@tju.edu.cn
update 12.7
Usage:
`from torch_utils import *`
`func_name()` # to call functions in this file
"""
from datetime import datetime
import math
import os
import torch
import torch.nn as nn
from tensorboardX import SummaryWriter
##############################
# Functional utils
##############################
from utils.misc_utils import format_num
def clamp(x, min=0.01, max=0.99):
"""
value > max will be set to max
value < min will be set to min
:param x: input tensor
:param min:
:param max:
:return:
"""
return torch.clamp(x, min, max)
def repeat(x: torch.Tensor, *sizes):
"""
Example:
>>> t = repeat(t, 1, 3, 1, 1)
# t = t.repeat(1, 3, 1, 1) or t = torch.cat([t, t, t], dim=1)
:param x:
:param sizes:
:return:
"""
return x.repeat(*sizes)
def tensor2im(x: torch.Tensor, norm=False, dtype='float32'):
"""
:param x: [n, c, h, w] float32 type
:param dtype:
:return:
"""
if norm:
x = (x + 1) / 2
x[x > 1] = 1
x[x < 0] = 0
return x.detach().cpu().data[0]
##############################
# Network utils
##############################
##############################
# Abstract Meters class
##############################
##############################
# Checkpoint helper
##############################
def load_ckpt(model, ckpt_path):
"""
Example:
class Model(nn.Module):
....
model = Model().cuda()
load_ckpt(model, 'model.pt')
:param model: object of a subclass of nn.Module
:param ckpt_path: *.pt file to load
:return:
"""
model.load_state_dict(torch.load(ckpt_path))
def save_ckpt(model, ckpt_path):
"""
Example:
class Model(nn.Module):
....
model = Model().cuda()
save_ckpt(model, 'model.pt')
:param model: object of a subclass of nn.Module
:param ckpt_path: *.pt file to save
:return:
"""
torch.save(model.state_dict(), ckpt_path)
##############################
# LR_Scheduler
##############################
"""
TensorBoard
Example:
writer = create_summary_writer(os.path.join(self.basedir, 'logs'))
write_meters_loss(writer, 'train', avg_meters, iteration)
write_loss(writer, 'train', 'F1', 0.78, iteration)
write_image(writer, 'train', 'input', img, iteration)
# shell
tensorboard --logdir {base_path}/logs
"""
def write_loss(writer: SummaryWriter, prefix, loss_name: str, value: float, iteration):
"""
Example:
write_loss(writer, 'train', 'F1', 0.78, iteration)
:param writer: writer created by create_summary_writer()
:param prefix: e.g. for '/train/loss1' is 'train'
:param loss_name:
:param value:
:param iteration:
:return:
"""
writer.add_scalar(
os.path.join(prefix, loss_name), value, iteration)
def write_image(writer: SummaryWriter, prefix, image_name: str, img, iteration, dataformats='CHW'):
"""
Example:
write_image(writer, 'train', 'input', img, iteration)
:param writer: writer created by create_summary_writer()
:param prefix:
:param image_name:
:param img: image Tensor, should be channel first. Specific size of [C, H, W].
:param iteration:
:param dataformats: 'CHW' or 'HWC' or 'NCHW'''
:return:
"""
writer.add_image(
os.path.join(prefix, image_name), img, iteration, dataformats=dataformats)
def write_meters_loss(writer: SummaryWriter, prefix, avg_meters: Meters, iteration):
"""
Example:
writer = create_summary_writer(os.path.join(self.basedir, 'logs'))
ema_meters = ExponentialMovingAverage(0.98)
for i in range(100):
ema_meters.update({'f1': i, 'f2': i*0.5})
write_meters_loss(writer, 'train', ema_meters, i)
:param writer:
:param prefix:
:param avg_meters: avg_meters param should be a Meters subclass
:param iteration:
:return:
"""
for key in avg_meters.keys():
meter = avg_meters[key]
writer.add_scalar(
os.path.join(prefix, key), meter, iteration)
| 28.296954 | 101 | 0.544713 |
36e43c1fea8564dd6886b6925030fdbb9a39b677 | 19,421 | py | Python | library/route_vpn.py | sebbbastien/ansible-stonesoft | ebc0d1c0720f8d79224ae58a80d3e9155bda4385 | [
"Apache-2.0"
] | null | null | null | library/route_vpn.py | sebbbastien/ansible-stonesoft | ebc0d1c0720f8d79224ae58a80d3e9155bda4385 | [
"Apache-2.0"
] | null | null | null | library/route_vpn.py | sebbbastien/ansible-stonesoft | ebc0d1c0720f8d79224ae58a80d3e9155bda4385 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2017 David LePage
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: route_vpn
short_description: Create a route based VPN
description:
- Create a route based VPN. Route VPN's are typically created between a managed
Stonesoft FW and a 3rd party device (AWS, Azure, etc). You must pre-create
the internal FW prior to running this module. If doing an IPSEC wrapped VPN,
you must also specify a tunnel interface for which to bind (must be pre-created)
and specify an IP address/interface id to specify the ISAKMP listener.
version_added: '2.5'
options:
name:
description:
- The name for this route VPN.
required: true
type: str
type:
description:
- The type of IPSEC vpn to create
type: str
choices: ['ipsec', 'gre']
default: ipsec
enabled:
description:
- Whether the VPN is enabled or disabled
type: bool
local_gw:
description:
- Represents the locally managed Stonesoft FW gateway. If the remote_gw is also
a Stonesoft managed device, use the same parameters to define
type: str
suboptions:
name:
description:
- The name of the Stonesoft FW gateway
type: str
required: true
tunnel_interface:
description:
- The ID for the tunnel interface
type: str
required: true
interface_id:
description:
- The interface ID to enable IPSEC. If multiple IP addresses exist
on the interface, IPSEC will be enabled on all. Use I(interface_ip) as
an alternative.
type: str
required: true
address:
description:
- An interface IP addresses to enable IPSEC. If there are multiple IP addresses
on a single interface specified with I(interface_id) and you want to bind to
only that address
type: str
required: false
remote_gw:
description:
- The name of the remote GW. If the remote gateway is an Stonesoft FW, it must
pre-exist. Use the local_gw documentation for settings. If it is an External Gateway,
this module will create the gateway based on the gateway settings provided if it
doesn't already exist. This documents an External Gateway configuration. See also
the external_gateway module for additional external endpoint settings.
type: str
suboptions:
name:
description:
- The name of the External Gateway. If the gateway does not exist, it will be created
if you provide the I(address) and I(networks) parameters.
type: str
required: true
preshared_key:
description:
- If this is an External Gateway, you must provide a pre-shared key to be used between
the gateways. If the gateway is another Stonesoft FW, a key will be auto-generated.
type: str
type:
description:
- Set to external_gateway if this is an external gateway element type
type: str
vpn_site:
description:
- Defines the VPN site for the protected networks on other end of external gateway
type: dict
suboptions:
name:
description:
- Name of VPN site
type: str
required: true
network:
description:
- A valid element type from SMC. Typically this is network or host. List elements
should be valid names of the specified element
type: list
external_endpoint:
description:
- The external endpoint gateways where the RBVPN will terminate. Any options that are
supported by the smcpython ExternalEndpoint.create constructor are supported values
for this definition
type: list
required: true
suboptions:
name:
description:
- Name of the external endpoint
type: str
required: True
address:
description:
- A valid IP address of the external gateway
type: str
required: true
enabled:
description:
- Whether to enable the gateway.
type: bool
tags:
description:
- Provide an optional category tag to the engine. If the category does not
exist, it will be created
type: list
state:
description:
- Specify a create or delete operation
required: false
default: present
choices:
- present
- absent
extends_documentation_fragment: stonesoft
notes:
- Login credential information is either obtained by providing them directly
to the task/play, specifying an alt_filepath to read the credentials from to
the play, or from environment variables (in that order). See
U(http://smc-python.readthedocs.io/en/latest/pages/session.html) for more
information.
requirements:
- smc-python
author:
- David LePage (@gabstopper)
'''
EXAMPLES = '''
- name: Route VPN between internal engine and 3rd party external gateway
register: result
route_vpn:
smc_logging:
level: 10
path: ansible-smc.log
enabled: true
local_gw:
address: 50.50.50.1
name: newcluster
tunnel_interface: '1001'
name: myrbvpn
remote_gw:
external_endpoint:
- address: 33.33.33.41
enabled: true
name: extgw3 (33.33.33.41)
connection_type: 'Active 1'
- address: 34.34.34.34
enabled: true
name: endpoint2 (34.34.34.34)
connection_type: 'Active 1'
- address: 44.44.44.44
enabled: false
name: extgw4 (44.44.44.44)
connection_type: 'Active 1'
- address: 33.33.33.50
enabled: false
name: endpoint1 (33.33.33.50)
connection_type: 'Active 1'
name: extgw3
preshared_key: '********'
type: external_gateway
vpn_site:
name: extgw3-site
network:
- network-172.18.15.0/24
- network-172.18.1.0/24
- network-172.18.2.0/24
- name: Create a new Route VPN with internal gateways
route_vpn:
smc_logging:
level: 10
path: ansible-smc.log
name: myrbvpn
type: ipsec
local_gw:
name: newcluster
tunnel_interface: 1001
interface_id: 1
#address: 2.2.2.2
remote_gw:
name: myfw
tunnel_interface: 1000
interface_id: 0
tags:
- footag
'''
RETURN = '''
changed:
description: Whether or not the change succeeded
returned: always
type: bool
state:
description: The current state of the element
return: always
type: dict
'''
import traceback
from ansible.module_utils.stonesoft_util import (
StonesoftModuleBase, Cache)
try:
from smc.vpn.route import RouteVPN, TunnelEndpoint
from smc.vpn.elements import ExternalGateway
from smc.core.engine import Engine
from smc.api.exceptions import SMCException
except ImportError:
pass
if __name__ == '__main__':
main() | 37.204981 | 118 | 0.563771 |
36e64aadf7aac130d35406e0cf99b998faa79a22 | 6,407 | py | Python | tfx/experimental/pipeline_testing/pipeline_recorder_utils.py | ntakouris/tfx | deb618730dc7675c8e9dc75e03b8ca795d49653d | [
"Apache-2.0"
] | 1 | 2020-06-09T03:50:59.000Z | 2020-06-09T03:50:59.000Z | tfx/experimental/pipeline_testing/pipeline_recorder_utils.py | tommywei110/tfx | 2152534c81dbc06dc90de37c56e4d63bf810f150 | [
"Apache-2.0"
] | null | null | null | tfx/experimental/pipeline_testing/pipeline_recorder_utils.py | tommywei110/tfx | 2152534c81dbc06dc90de37c56e4d63bf810f150 | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Recording pipeline from MLMD metadata."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
from typing import Iterable, List, Mapping, Optional, Text, Tuple
from absl import logging
import tensorflow as tf
from tfx.orchestration import metadata
from tfx.utils import io_utils
from ml_metadata.proto import metadata_store_pb2
def _get_execution_dict(
metadata_connection: metadata.Metadata
) -> Mapping[Text, List[metadata_store_pb2.Execution]]:
"""Returns a dictionary holding list of executions for all run_id in MLMD.
Args:
metadata_connection: Instance of metadata.Metadata for I/O to MLMD.
Returns:
A dictionary that holds list of executions for a run_id.
"""
execution_dict = collections.defaultdict(list)
for execution in metadata_connection.store.get_executions():
execution_run_id = execution.properties['run_id'].string_value
execution_dict[execution_run_id].append(execution)
return execution_dict
def _get_latest_executions(
metadata_connection: metadata.Metadata,
pipeline_name: Text) -> List[metadata_store_pb2.Execution]:
"""Fetches executions associated with the latest context.
Args:
metadata_connection: Instance of metadata.Metadata for I/O to MLMD.
pipeline_name: Name of the pipeline to rerieve the latest executions for.
Returns:
List of executions for the latest run of a pipeline with the given
pipeline_name.
"""
pipeline_run_contexts = [
c for c in metadata_connection.store.get_contexts_by_type(
metadata._CONTEXT_TYPE_PIPELINE_RUN) # pylint: disable=protected-access
if c.properties['pipeline_name'].string_value == pipeline_name
]
latest_context = max(
pipeline_run_contexts, key=lambda c: c.last_update_time_since_epoch)
return metadata_connection.store.get_executions_by_context(latest_context.id)
def record_pipeline(output_dir: Text, metadata_db_uri: Optional[Text],
host: Optional[Text], port: Optional[int],
pipeline_name: Optional[Text],
run_id: Optional[Text]) -> None:
"""Record pipeline run with run_id to output_dir.
For the beam pipeline, metadata_db_uri is required. For KFP pipeline,
host and port should be specified. If run_id is not specified, then
pipeline_name ought to be specified in order to fetch the latest execution
for the specified pipeline.
Args:
output_dir: Directory path where the pipeline outputs should be recorded.
metadata_db_uri: Uri to metadata db.
host: Hostname of the metadata grpc server
port: Port number of the metadata grpc server.
pipeline_name: Pipeline name, which is required if run_id isn't specified.
run_id: Pipeline execution run_id.
Raises:
ValueError: In cases of invalid arguments:
- metadata_db_uri is None or host and/or port is None.
- run_id is None and pipeline_name is None.
FileNotFoundError: if the source artifact uri does not already exist.
"""
if host is not None and port is not None:
metadata_config = metadata_store_pb2.MetadataStoreClientConfig(
host=host, port=port)
elif metadata_db_uri is not None:
metadata_config = metadata.sqlite_metadata_connection_config(
metadata_db_uri)
else:
raise ValueError('For KFP, host and port are required. '
'For beam pipeline, metadata_db_uri is required.')
with metadata.Metadata(metadata_config) as metadata_connection:
if run_id is None:
if pipeline_name is None:
raise ValueError('If the run_id is not specified,'
' pipeline_name should be specified')
# fetch executions of the most recently updated execution context.
executions = _get_latest_executions(metadata_connection, pipeline_name)
else:
execution_dict = _get_execution_dict(metadata_connection)
if run_id in execution_dict:
executions = execution_dict[run_id]
else:
raise ValueError(
'run_id {} is not recorded in the MLMD metadata'.format(run_id))
execution_ids = [e.id for e in executions]
for src_uri, dest_uri in _get_paths(metadata_connection, execution_ids,
output_dir):
if not tf.io.gfile.exists(src_uri):
raise FileNotFoundError('{} does not exist'.format(src_uri))
io_utils.copy_dir(src_uri, dest_uri)
logging.info('Pipeline Recorded at %s', output_dir)
| 39.795031 | 82 | 0.74044 |
36e67ff06717bc841187da318c7c341f30def84e | 16,034 | py | Python | src/third_party/wiredtiger/lang/python/setup_pip.py | SunguckLee/real-mongodb | fef0e44fafc6d3709a84101327e7d2f54dd18d88 | [
"Apache-2.0"
] | 4 | 2018-02-06T01:53:12.000Z | 2018-02-20T01:47:36.000Z | src/third_party/wiredtiger/lang/python/setup_pip.py | SunguckLee/real-mongodb | fef0e44fafc6d3709a84101327e7d2f54dd18d88 | [
"Apache-2.0"
] | null | null | null | src/third_party/wiredtiger/lang/python/setup_pip.py | SunguckLee/real-mongodb | fef0e44fafc6d3709a84101327e7d2f54dd18d88 | [
"Apache-2.0"
] | 3 | 2018-02-06T01:53:18.000Z | 2021-07-28T09:48:15.000Z | #!/usr/bin/env python
#
# Public Domain 2014-2016 MongoDB, Inc.
# Public Domain 2008-2014 WiredTiger, Inc.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# This script builds a Python source distribution that can built be installed
# via pip install. This must be run in a git repository to determine the files
# to package. Also as a prerequisite, SWIG must be run as the generated files
# are part of the package. To create the distribution, in this directory, run
# "python setup_pip.py sdist", this creates a tar.gz file under ./dist .
from __future__ import print_function
import os, os.path, re, shutil, site, sys
from setuptools import setup, Distribution
from distutils.extension import Extension
import distutils.sysconfig
import distutils.ccompiler
from distutils.errors import CompileError, LinkError
import subprocess
from subprocess import call
import setuptools.command.install
import setuptools.command.build_ext
# msg --
# Print a message to stderr.
# die --
# For failures, show a message and exit.
# build_commands --
# Run a sequence of commands, and die if any fail.
# check_needed_dependencies --
# Make a quick check of any needed library dependencies, and
# add to the library path and include path as needed. If a library
# is not found, it is not definitive.
# find_executable --
# Locate an executable in the PATH.
# get_build_path --
# Create a PATH that can be used for installation. Apparently,
# installation commands are run with a restricted PATH, and
# autoreconf/aclocal will not normally be found.
# get_compile_flags --
# Get system specific compile flags. Return a triple: C preprocessor
# flags, C compilation flags and linker flags.
# get_sources_curdir --
# Get a list of sources from the current directory
# get_wiredtiger_versions --
# Read the version information from the RELEASE_INFO file.
# get_library_dirs
# Build a plausible set of library directories.
# source_filter
# Make any needed changes to the sources list. Any entry that
# needs to be moved is returned in a dictionary.
################################################################
# Do some initial setup and checks.
this_abs_script = os.path.abspath(__file__)
this_dir = os.path.dirname(this_abs_script)
pip_command = None
for arg in sys.argv[1:]:
if arg[0] != '-' and pip_command == None:
pip_command = arg
break
if this_dir.endswith(os.sep + os.path.join('lang', 'python')):
wt_dir = os.path.dirname(os.path.dirname(this_dir))
os.chdir(wt_dir)
elif os.path.isfile(os.path.join(this_dir, 'LICENSE')):
wt_dir = this_dir
else:
die('running from an unknown directory')
python3 = (sys.version_info[0] > 2)
if python3:
die('Python3 is not yet supported')
# Ensure that Extensions won't be built for 32 bit,
# that won't work with WiredTiger.
if sys.maxsize < 2**32:
die('need to be running on a 64 bit system, and have a 64 bit Python')
python_rel_dir = os.path.join('lang', 'python')
build_dir = os.path.join(wt_dir, 'build_posix')
makefile = os.path.join(build_dir, 'Makefile')
built_sentinal = os.path.join(build_dir, 'built.txt')
conf_make_dir = 'build_posix'
wt_swig_lib_name = os.path.join(python_rel_dir, '_wiredtiger.so')
################################################################
# Put together build options for the WiredTiger extension.
short_description = 'high performance, scalable, production quality, ' + \
'NoSQL, Open Source extensible platform for data management'
long_description = 'WiredTiger is a ' + short_description + '.\n\n' + \
open(os.path.join(wt_dir, 'README')).read()
wt_ver, wt_full_ver = get_wiredtiger_versions(wt_dir)
build_path = get_build_path()
# We only need a small set of directories to build a WT library,
# we also include any files at the top level.
source_regex = r'^(?:(?:api|build_posix|ext|lang/python|src|dist)/|[^/]*$)'
# The builtins that we include in this distribution.
builtins = [
# [ name, libname, instructions ]
[ 'snappy', 'snappy',
'Note: a suitable version of snappy can be found at\n' + \
' https://github.com/google/snappy/releases/download/' + \
'1.1.3/snappy-1.1.3.tar.gz\n' + \
'It can be installed via: yum install snappy snappy-devel' + \
'or via: apt-get install libsnappy-dev' ],
[ 'zlib', 'z',
'Need to install zlib\n' + \
'It can be installed via: apt-get install zlib1g' ]
]
builtin_names = [b[0] for b in builtins]
builtin_libraries = [b[1] for b in builtins]
# Here's the configure/make operations we perform before the python extension
# is linked.
configure_cmds = [
'./makemake --clean-and-make',
'./reconf',
# force building a position independent library; it will be linked
# into a single shared library with the SWIG interface code.
'CFLAGS="${CFLAGS:-} -fPIC -DPIC" ' + \
'../configure --enable-python --with-builtins=' + ','.join(builtin_names)
]
# build all the builtins, at the moment they are all compressors.
make_cmds = []
for name in builtin_names:
make_cmds.append('(cd ext/compressors/' + name + '/; make)')
make_cmds.append('make libwiredtiger.la')
inc_paths = [ os.path.join(build_dir, 'src', 'include'), build_dir, '.' ]
lib_paths = [ '.' ] # wiredtiger.so is moved into the top level directory
check_needed_dependencies(builtins, inc_paths, lib_paths)
cppflags, cflags, ldflags = get_compile_flags(inc_paths, lib_paths)
# If we are creating a source distribution, create a staging directory
# with just the right sources. Put the result in the python dist directory.
if pip_command == 'sdist':
sources, movers = source_filter(get_sources_curdir())
stage_dir = os.path.join(python_rel_dir, 'stage')
shutil.rmtree(stage_dir, True)
os.makedirs(stage_dir)
shutil.copy2(this_abs_script, os.path.join(stage_dir, 'setup.py'))
for f in sources:
d = os.path.join(stage_dir, os.path.dirname(f))
if not os.path.isdir(d):
os.makedirs(d)
if f in movers:
src = movers[f]
else:
src = f
# Symlinks are not followed in setup, we need to use real files.
shutil.copy2(src, os.path.join(stage_dir, f))
os.chdir(stage_dir)
sys.argv.append('--dist-dir=' + os.path.join('..', 'dist'))
else:
sources = [ os.path.join(python_rel_dir, 'wiredtiger_wrap.c') ]
wt_ext = Extension('_wiredtiger',
sources = sources,
extra_compile_args = cflags + cppflags,
extra_link_args = ldflags,
libraries = builtin_libraries,
extra_objects = [ os.path.join(build_dir, '.libs', 'libwiredtiger.a') ],
include_dirs = inc_paths,
library_dirs = lib_paths,
)
extensions = [ wt_ext ]
env = { "CFLAGS" : ' '.join(cflags),
"CPPFLAGS" : ' '.join(cppflags),
"LDFLAGS" : ' '.join(ldflags),
"PATH" : build_path }
setup(
name = 'wiredtiger',
version = wt_full_ver,
author = 'The WiredTiger Development Team, part of MongoDB',
author_email = 'info@wiredtiger.com',
description = short_description,
license='GPL2,GPL3,Commercial',
long_description = long_description,
url = 'http://source.wiredtiger.com/',
keywords = 'scalable NoSQL database datastore engine open source',
packages = ['wiredtiger'],
ext_package = 'wiredtiger',
ext_modules = extensions,
include_package_data = True,
distclass = BinaryDistribution,
package_dir = { 'wiredtiger' : '.' },
cmdclass = { 'install': WTInstall, 'build_ext': WTBuildExt },
package_data = {
'wiredtiger' : [ wt_swig_lib_name, '*.py' ]
},
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: C',
'Programming Language :: C++',
'Programming Language :: Python',
'Programming Language :: Java',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: POSIX :: BSD',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: SunOS/Solaris',
]
)
if pip_command == 'sdist':
shutil.rmtree(os.path.join(this_dir, 'stage'))
| 39.202934 | 81 | 0.641574 |
36e6a531b83457a4c48394e73a9fc94d96c25f64 | 77 | py | Python | 2022-02-24-ftx-rest-api-python/local_settings.py | georgehaan/analyzingalpha | f1f821e8d74d64addf410bfd205cb089ddf5517e | [
"Unlicense"
] | null | null | null | 2022-02-24-ftx-rest-api-python/local_settings.py | georgehaan/analyzingalpha | f1f821e8d74d64addf410bfd205cb089ddf5517e | [
"Unlicense"
] | null | null | null | 2022-02-24-ftx-rest-api-python/local_settings.py | georgehaan/analyzingalpha | f1f821e8d74d64addf410bfd205cb089ddf5517e | [
"Unlicense"
] | null | null | null | ftxus = {
'api_key':'YOUR_API_KEY',
'api_secret':'YOUR_API_SECRET'
}
| 15.4 | 34 | 0.636364 |
36e87b1e11d644470443480a35f8b9e8b72438cd | 4,387 | py | Python | src/rechub/parameters.py | yusanshi/easy-rec | 86db0bbd1eb0caf94c4b0ec4198bf49e64f65f24 | [
"MIT"
] | null | null | null | src/rechub/parameters.py | yusanshi/easy-rec | 86db0bbd1eb0caf94c4b0ec4198bf49e64f65f24 | [
"MIT"
] | null | null | null | src/rechub/parameters.py | yusanshi/easy-rec | 86db0bbd1eb0caf94c4b0ec4198bf49e64f65f24 | [
"MIT"
] | null | null | null | import argparse
from distutils.util import strtobool
| 38.823009 | 99 | 0.56713 |
36e9553b230e4e00a0c8f9a0c28cdd825854c4a3 | 4,955 | py | Python | course_difficulty.py | ewang26/dailytimedschedule | 1d891828af67caab47ef6286051da7e84b980b2a | [
"MIT"
] | null | null | null | course_difficulty.py | ewang26/dailytimedschedule | 1d891828af67caab47ef6286051da7e84b980b2a | [
"MIT"
] | null | null | null | course_difficulty.py | ewang26/dailytimedschedule | 1d891828af67caab47ef6286051da7e84b980b2a | [
"MIT"
] | null | null | null | # Koki Kapoor
# CSC 630
# Course Difficulty.py file
# have each homework assignment be ranked based on difficulty of the course and on difficulty of the assignment itself
# list_of_courses_and_difficulty only takes into consideration the difficulty of the course, not the assignment
from array import *
# install numpy in terminal with:
# dictionaries mapping difficulty level to their assigned descriptions
# commented this out to redefine the course difficulty and workload separately
"""
difficulty_levels = {
1:'Easy and quick',
2:'Easy but time-consuming',
3:'Medium',
4:'Hard material, quick work',
5:'Hard, tedious, and time-consuming'
}
"""
# difficulty_levels2 refers to the difficulty of the course's material, not how much time it takes
# ie, there can be a very time-consuming course that has easy material
difficulty_levels = {
1:'Easy',
2:'Easy-Medium',
3:'Medium',
4:'Medium-Hard',
5:'Hard'
}
#dictionary mapping the amount of time taken on a course's workload (which includes studying, tests, etc)
workload_levels = {
1:'1-1.9 hours',
2:'1.9-2.9 hours',
3:'2.9-3.9 hours',
4:'3.9-4.9 hours',
5:'4.9-5.9 hours',
6:'6+ hours'
}
if __name__ == "__main__":
set_courses_and_difficulties()
coursecheck()
# A refined way to obtain the "difficulty of an assignment in a numerical form
# The course difficulty can weigh heavier and then the assignment diffculty can be added
# The modified parameters of this method are difficulty_level (of the course material) and workload_level (how much time you need to spend on the course)
| 36.703704 | 166 | 0.70333 |
36e98b5ce7e26ba1ac762413ca0565df029c2001 | 1,826 | py | Python | src/esss_fix_format/hooks.py | nicoddemus/esss_fix_format | 1f46e0d1c05cc88fd47be2f0b0f120d8265a759e | [
"MIT"
] | 20 | 2016-12-05T12:09:27.000Z | 2021-11-23T21:57:59.000Z | src/esss_fix_format/hooks.py | nicoddemus/esss_fix_format | 1f46e0d1c05cc88fd47be2f0b0f120d8265a759e | [
"MIT"
] | 43 | 2016-07-20T12:21:16.000Z | 2022-03-14T20:31:07.000Z | src/esss_fix_format/hooks.py | nicoddemus/esss_fix_format | 1f46e0d1c05cc88fd47be2f0b0f120d8265a759e | [
"MIT"
] | 8 | 2016-09-27T20:02:44.000Z | 2021-04-16T14:58:08.000Z | import abc
import textwrap
def _add_hook(hook):
name = hook.name()
if name not in _HOOKS:
_HOOKS[name] = hook
else:
raise KeyError(f"A hook named '{name}' already exists")
# All hooks available by default
_HOOKS = {}
_add_hook(FixFormatGitHook())
def get_default_hook(name):
"""
:param unicode name: Name of a hook.
:rtype: GitHook
:return: A Git hook object.
"""
return _HOOKS[name]
| 23.714286 | 99 | 0.557503 |
36e9aa3443706da87ee4f539703a4f5d9195cf72 | 166 | py | Python | Solutions/print all subset.py | Adityasriram0901/Python-Thunder | 192920c48092ce1783d56c7b45cdd7e7a50246fa | [
"MIT"
] | 81 | 2020-09-25T15:02:11.000Z | 2020-10-12T14:20:31.000Z | Solutions/print all subset.py | Adityasriram0901/Python-Thunder | 192920c48092ce1783d56c7b45cdd7e7a50246fa | [
"MIT"
] | 196 | 2020-09-25T13:52:13.000Z | 2020-10-12T20:20:00.000Z | Solutions/print all subset.py | Adityasriram0901/Python-Thunder | 192920c48092ce1783d56c7b45cdd7e7a50246fa | [
"MIT"
] | 209 | 2020-09-25T16:15:46.000Z | 2020-10-12T20:08:08.000Z | a = [1, 2, 3, 4]
print(subset(a, n=4))
| 13.833333 | 52 | 0.415663 |
36eb37aac32d06e68b8f0f03ae15c8cd3b04fb1f | 49 | py | Python | trees/dasgupta/__init__.py | islamazhar/trees | 502565c5bf02503c7bece09cddd93f9368da02c3 | [
"MIT"
] | null | null | null | trees/dasgupta/__init__.py | islamazhar/trees | 502565c5bf02503c7bece09cddd93f9368da02c3 | [
"MIT"
] | null | null | null | trees/dasgupta/__init__.py | islamazhar/trees | 502565c5bf02503c7bece09cddd93f9368da02c3 | [
"MIT"
] | null | null | null | from trees.dasgupta.costtree import DasguptaTree
| 24.5 | 48 | 0.877551 |
36eb6ff512aad2f53a0ace07b5c62237d039ba4a | 11,810 | py | Python | examples/references/segmentation/pascal_voc2012/code/scripts/training.py | 1nF0rmed/ignite | cfcd667e3cb9d67b67d928d12fa3ccdac05f7a3e | [
"BSD-3-Clause"
] | null | null | null | examples/references/segmentation/pascal_voc2012/code/scripts/training.py | 1nF0rmed/ignite | cfcd667e3cb9d67b67d928d12fa3ccdac05f7a3e | [
"BSD-3-Clause"
] | null | null | null | examples/references/segmentation/pascal_voc2012/code/scripts/training.py | 1nF0rmed/ignite | cfcd667e3cb9d67b67d928d12fa3ccdac05f7a3e | [
"BSD-3-Clause"
] | null | null | null | # This a training script launched with py_config_runner
# It should obligatory contain `run(config, **kwargs)` method
import sys
from collections.abc import Mapping
from pathlib import Path
import torch
from apex import amp
from dataflow.datasets import VOCSegmentationOpencv
from py_config_runner.config_utils import TRAINVAL_CONFIG, assert_config, get_params
from py_config_runner.utils import set_seed
from utils import exp_tracking
from utils.handlers import predictions_gt_images_handler
import ignite
import ignite.distributed as idist
from ignite.contrib.engines import common
from ignite.engine import Engine, Events, create_supervised_evaluator
from ignite.handlers import DiskSaver
from ignite.metrics import ConfusionMatrix, IoU, mIoU
from ignite.utils import setup_logger
# Adds "code" folder to python path
sys.path.insert(0, Path(__file__).parent.parent.as_posix())
def run(config, **kwargs):
"""This is the main method to run the training. As this training script is launched with `py_config_runner`
it should obligatory contain `run(config, **kwargs)` method.
"""
assert torch.cuda.is_available(), torch.cuda.is_available()
assert torch.backends.cudnn.enabled, "Nvidia/Amp requires cudnn backend to be enabled."
with idist.Parallel(backend="nccl") as parallel:
logger = setup_logger(name="Pascal-VOC12 Training", distributed_rank=idist.get_rank())
assert_config(config, TRAINVAL_CONFIG)
# The following attributes are automatically added by py_config_runner
assert hasattr(config, "config_filepath") and isinstance(config.config_filepath, Path)
assert hasattr(config, "script_filepath") and isinstance(config.script_filepath, Path)
if idist.get_rank() == 0 and exp_tracking.has_clearml:
try:
from clearml import Task
except ImportError:
# Backwards-compatibility for legacy Trains SDK
from trains import Task
task = Task.init("Pascal-VOC12 Training", config.config_filepath.stem)
task.connect_configuration(config.config_filepath.as_posix())
log_basic_info(logger, config)
config.output_path = Path(exp_tracking.get_output_path())
# dump python files to reproduce the run
exp_tracking.log_artifact(config.config_filepath.as_posix())
exp_tracking.log_artifact(config.script_filepath.as_posix())
exp_tracking.log_params(get_params(config, TRAINVAL_CONFIG))
try:
parallel.run(training, config, logger=logger)
except KeyboardInterrupt:
logger.info("Catched KeyboardInterrupt -> exit")
except Exception as e: # noqa
logger.exception("")
raise e
| 35.896657 | 120 | 0.676545 |
36ec49281113de21af1d91215fc919058901c862 | 17,387 | py | Python | src/instaBot.py | pabloqb2000/py-instabot | 2bfdd51d588050d370d069db5d0352d29fd4560d | [
"Apache-2.0"
] | null | null | null | src/instaBot.py | pabloqb2000/py-instabot | 2bfdd51d588050d370d069db5d0352d29fd4560d | [
"Apache-2.0"
] | null | null | null | src/instaBot.py | pabloqb2000/py-instabot | 2bfdd51d588050d370d069db5d0352d29fd4560d | [
"Apache-2.0"
] | null | null | null | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from tqdm import tqdm
import random
from EmailSender import *
| 38.897092 | 256 | 0.572554 |
36ede9c0901ffceceb90ea9e2eb43efe24230727 | 813 | py | Python | BattleCity-NES/main.py | iOsnaaente/Kata-train_Code | 22cdf9d087bad879875c1f70029bda0771242c50 | [
"MIT"
] | null | null | null | BattleCity-NES/main.py | iOsnaaente/Kata-train_Code | 22cdf9d087bad879875c1f70029bda0771242c50 | [
"MIT"
] | null | null | null | BattleCity-NES/main.py | iOsnaaente/Kata-train_Code | 22cdf9d087bad879875c1f70029bda0771242c50 | [
"MIT"
] | null | null | null | #! usr/bin/dev python
from stages import Stages #Le as fases
from code import tanks #Responsvel pelos tanques do player
from images import imagens #imagens do jogo
import pygame
import random
screen_Dimension=[32*20,32*20]
pygame.init()
screen = pygame.display.set_mode(screen_Dimension)
pygame.display.set_caption("My_Poor_NES_Batlle_City")
clock = pygame.time.Clock()
Fase_1 = Stages.Stages(screen)
Fase_1.readStage(1)
Tank = tanks.PlayerTank(imagens.blueTank, [64,64], screen)
while True:
screen.fill([0,0,0])
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
Tank.move(event)
Fase_1.plotStage()
Tank.plot()
pygame.display.update()
clock.tick(60)
| 16.591837 | 62 | 0.719557 |
36ee554e3410e965f70042cc4e96c4361520515d | 6,271 | py | Python | n_queens.py | lkk7/n-queens-genetic-solver | d8b87b49970e58d4599618eb014c1b12e4f471fa | [
"MIT"
] | null | null | null | n_queens.py | lkk7/n-queens-genetic-solver | d8b87b49970e58d4599618eb014c1b12e4f471fa | [
"MIT"
] | null | null | null | n_queens.py | lkk7/n-queens-genetic-solver | d8b87b49970e58d4599618eb014c1b12e4f471fa | [
"MIT"
] | null | null | null | from typing import Dict
from numba import njit
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams['image.cmap'] = 'binary'
def read_parameters(filename: str) -> Dict[str, float]:
"""Read parameters from a file to a dictionary and return it."""
parameters = {}
with open(filename, "r") as file:
for line in file.readlines():
if line != '\n':
line_split = line.split()
try:
parameters[line_split[0]] = int(line_split[2])
except ValueError:
parameters[line_split[0]] = float(line_split[2])
if len(parameters) != 6:
raise RuntimeError("Incorrect list of parameters in " + filename)
return parameters
def random_population(population_size: int, board_size: int) -> np.ndarray:
"""Return a random population of solutions."""
return np.array([np.random.permutation(board_size)
for _ in range(population_size)], dtype=np.int32)
def plot_genome_expression(genome: np.ndarray) -> None:
"""Plot a solution represented by the given genome."""
points = np.zeros((genome.shape[0], genome.shape[0]))
for i, g in enumerate(genome):
points[i, g] = 1
_, ax = plt.subplots(figsize=(10, 10))
ax.imshow(points, cmap='Purples')
ax.grid(True)
ax.set_xlim(-0.5, genome.shape[0] - 0.5)
ax.set_ylim(-0.5, genome.shape[0] - 0.5)
ax.set_xticks([i + 0.5 for i in range(genome.shape[0])])
ax.set_yticks([i + 0.5 for i in range(genome.shape[0])])
ax.set_xticklabels([])
ax.set_yticklabels([])
plt.tick_params(axis='both', which='both', bottom=False, left=False)
plt.title("$N = {}$".format(genome.shape[0]), size=15)
plt.show()
def main() -> None:
parameters = read_parameters('parameters.txt')
population = random_population(parameters['pop_size'], parameters['N'])
generation_data = []
best_member_id = 0
winner_gen = parameters['generations']
for i in range(1, parameters['generations'] + 1):
selected = selection(population, parameters['n_best'])
population = crossover(population, selected)
mutation(population)
gen_fit = fitness(population)
best_member_id = np.argmax(gen_fit)
generation_data.append([i, gen_fit.mean(), gen_fit[best_member_id]])
if gen_fit[best_member_id] == 1.0:
print("\nWinner (gen. {}):\n{}".format(
i, str(population[best_member_id])))
winner_gen = i
break
if i % 50 == 0:
print("Gen", i)
if parameters['plot_winner_genome']:
plot_genome_expression(population[best_member_id])
if __name__ == "__main__":
main()
| 38.237805 | 76 | 0.591293 |
36f0a039978f0025fa6da35feb5807f99a23cd6a | 1,362 | py | Python | tests/plugins/test_ustreamtv.py | RomanKornev/streamlink | acdefee0822b9c10628b91a166f9abe084e44800 | [
"BSD-2-Clause"
] | 2 | 2019-09-17T15:01:47.000Z | 2019-09-21T16:26:50.000Z | tests/plugins/test_ustreamtv.py | RomanKornev/streamlink | acdefee0822b9c10628b91a166f9abe084e44800 | [
"BSD-2-Clause"
] | 1 | 2020-06-02T02:36:30.000Z | 2020-06-02T02:36:30.000Z | tests/plugins/test_ustreamtv.py | bumplzz69/streamlink | 34abc43875d7663ebafa241573dece272e93d88b | [
"BSD-2-Clause"
] | 1 | 2020-08-12T08:27:22.000Z | 2020-08-12T08:27:22.000Z | import unittest
from streamlink import Streamlink
try:
from unittest.mock import ANY, MagicMock, call
except ImportError:
from mock import ANY, MagicMock, call
from streamlink.plugins.ustreamtv import UStreamTV
| 30.954545 | 105 | 0.64978 |
36f2445925b38eafa6fa76d91317ba20cacff47f | 1,241 | py | Python | test/unit/object/test_collaboration_allowlist_entry.py | box/box-python-sdk | 5c6766a17bac0315bede7a1f5909c912d194a793 | [
"Apache-2.0"
] | 367 | 2015-02-10T05:55:45.000Z | 2022-03-16T23:39:58.000Z | test/unit/object/test_collaboration_allowlist_entry.py | box/box-python-sdk | 5c6766a17bac0315bede7a1f5909c912d194a793 | [
"Apache-2.0"
] | 686 | 2015-02-10T01:21:28.000Z | 2022-03-31T11:40:22.000Z | test/unit/object/test_collaboration_allowlist_entry.py | box/box-python-sdk | 5c6766a17bac0315bede7a1f5909c912d194a793 | [
"Apache-2.0"
] | 260 | 2015-02-16T17:35:06.000Z | 2022-03-20T17:45:28.000Z | # coding: utf-8
from __future__ import unicode_literals, absolute_import
from boxsdk.config import API
| 42.793103 | 118 | 0.767929 |
36f7aca45d40f82d8142db3d4804603a2675f264 | 1,463 | py | Python | jumpy/setup.py | bharadwaj1098/brax | 3108a0535b9b59725c97ef35732ed0378c0fd5cc | [
"Apache-2.0"
] | 1,162 | 2021-06-03T20:15:05.000Z | 2022-03-31T19:53:06.000Z | jumpy/setup.py | bharadwaj1098/brax | 3108a0535b9b59725c97ef35732ed0378c0fd5cc | [
"Apache-2.0"
] | 160 | 2021-06-05T02:32:39.000Z | 2022-03-31T11:39:58.000Z | jumpy/setup.py | bharadwaj1098/brax | 3108a0535b9b59725c97ef35732ed0378c0fd5cc | [
"Apache-2.0"
] | 117 | 2021-06-04T17:18:21.000Z | 2022-03-30T18:04:48.000Z | # Copyright 2021 The Brax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""setup.py for Jumpy.
Install for development:
pip intall -e .
"""
from setuptools import setup
setup(
name="brax-jumpy",
version="0.0.1",
description=("Common backend for JAX or numpy."),
author="Brax Authors",
author_email="no-reply@google.com",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="http://github.com/google/brax",
license="Apache 2.0",
py_modules=["jumpy"],
install_requires=[
"jax",
"jaxlib",
"numpy",
],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
| 29.857143 | 74 | 0.673274 |
36f7ebcb27998b0af7e58a152f1c6385a165aa9d | 33,411 | py | Python | simulation/Distance2.py | vivirodrigues/carrinheiros-heuristics | 92c8c4a8384f8e3a86e9c53b41bcb2ab001de5f5 | [
"MIT"
] | null | null | null | simulation/Distance2.py | vivirodrigues/carrinheiros-heuristics | 92c8c4a8384f8e3a86e9c53b41bcb2ab001de5f5 | [
"MIT"
] | null | null | null | simulation/Distance2.py | vivirodrigues/carrinheiros-heuristics | 92c8c4a8384f8e3a86e9c53b41bcb2ab001de5f5 | [
"MIT"
] | null | null | null | import json
import scipy.stats
import matplotlib.pyplot as plt
import scipy.stats as st
from decimal import Decimal, ROUND_HALF_UP
from xml.dom import minidom
import numpy as np
files = [
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_SPFA_nearest_neighbor.xml'
]
files_i = [
#'../../data/results/m43.96267779776494_m19.944747838679202_m43.929659815391865_m19.905049264605925_0_distance_heuristic_SPFA_ci_distance',
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_SPFA_closest_insertion.xml'
]
files_d = [#'../../data/results/m43.96267779776494_m19.944747838679202_m43.929659815391865_m19.905049264605925_0_distance_heuristic_SPFA_fi_distance'
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_length_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_length_heuristic_SPFA_further_insertion.xml'
]
files_b = [#'../../data/results/m43.957018117658315_m19.931545102455843_m43.931890481507786_m19.907162672548026_0_distance_heuristic_SPFA_nn'
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_weight_heuristic_SPFA_nearest_neighbor.xml',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_weight_heuristic_SPFA_nearest_neighbor.xml'
]
files_i_b = [#'../../data/results/m43.957018117658315_m19.931545102455843_m43.931890481507786_m19.907162672548026_0_distance_heuristic_SPFA_nn'
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_weight_heuristic_SPFA_closest_insertion.xml',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_weight_heuristic_SPFA_closest_insertion.xml'
]
files_d_b = [#'../../data/results/m43.957018117658315_m19.931545102455843_m43.931890481507786_m19.907162672548026_0_distance_heuristic_SPFA_nn'
'../data/results/m38.49999603681327_m12.962358080558504_m38.47398437502447_m12.932893255527242_0_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500671812913836_m12.96339552158351_m38.47352508877093_m12.932765988234031_1_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50194412971296_m12.961982380453897_m38.472997875909336_m12.933973466644028_2_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.5014109499298_m12.960872502034725_m38.47423998586774_m12.935033565792027_3_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50102106363388_m12.962638092503209_m38.474525144844954_m12.932374557163948_4_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.49922134252434_m12.962995897766534_m38.47172032605714_m12.933032796134958_5_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.49989452416794_m12.961981434109553_m38.47288011285585_m12.932171368514155_6_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50237887905613_m12.960648819826947_m38.472913582758295_m12.934273386456828_7_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.5015370998344_m12.962186005531471_m38.47261478466609_m12.934002015361491_8_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50073006631474_m12.961333960783888_m38.4725327574897_m12.932373724953635_9_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50096584572687_m12.96121100042776_m38.47440076442133_m12.934017719276726_10_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50082829471482_m12.960720017172312_m38.47384043859295_m12.933596799909374_11_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.501118552381065_m12.962947784137462_m38.47426226643149_m12.932564078786635_12_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.502373456830234_m12.962333491657414_m38.47477812160141_m12.93271906374045_13_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50148403583942_m12.965290796965846_m38.471909395581456_m12.932729360653218_14_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.501890924160584_m12.961062102765782_m38.4732392389171_m12.933884816602236_15_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.5007597052321_m12.961099590741043_m38.473517022103756_m12.933269493665131_16_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50151426278066_m12.96224952417061_m38.473343947418165_m12.932595128870267_17_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50340379765633_m12.963068504924866_m38.473898022861405_m12.932939179700924_18_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.501402782516365_m12.962743981859667_m38.47361068224981_m12.929892203606808_19_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500951062259055_m12.964628446152132_m38.47375669394401_m12.93455351878407_20_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500486678608006_m12.963212145332431_m38.474758327361364_m12.933328833777356_21_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50234447884447_m12.961648999633914_m38.474661277554_m12.93489642987398_22_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50229159113205_m12.961490473565357_m38.474209563384555_m12.933428060221484_23_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500568338650666_m12.963562146885746_m38.47357849097421_m12.93225101151055_24_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50115701483925_m12.9612635544437_m38.47509217365817_m12.933188948092502_25_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50186554346796_m12.961718758432754_m38.47355380440904_m12.934289622568668_26_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50165434807298_m12.96187628063375_m38.47332172286755_m12.933277161490693_27_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50177737556065_m12.962596650290932_m38.472904517360526_m12.933331456516722_28_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.50009702898103_m12.96036292373261_m38.47412281703678_m12.934711892250165_29_30_weight_heuristic_SPFA_further_insertion.xml',
'../data/results/m38.500734794836475_m12.961295117029927_m38.473498428492356_m12.932937589096973_30_30_weight_heuristic_SPFA_further_insertion.xml'
]
values_t = []
values_i = []
values_d = []
values_t_b = []
values_i_b = []
values_d_b = []
for a in range(len(files)):
file = minidom.parse(files[a])
tag = file.getElementsByTagName('tripinfo')
duration = [float(node.attributes['routeLength'].value) for node in tag]
values_t.append(duration[0] / 1000)
file = minidom.parse(files_i[a])
tag = file.getElementsByTagName('tripinfo')
duration = [float(node.attributes['routeLength'].value) for node in tag]
values_i.append(duration[0] / 1000)
# 1, 13
file = minidom.parse(files_d[a])
tag = file.getElementsByTagName('tripinfo')
duration = [float(node.attributes['routeLength'].value) for node in tag]
values_d.append(duration[0] / 1000)
file = minidom.parse(files_b[a])
tag = file.getElementsByTagName('tripinfo')
duration = [float(node.attributes['routeLength'].value) for node in tag]
values_t_b.append(duration[0] / 1000)
file = minidom.parse(files_i_b[a])
tag = file.getElementsByTagName('tripinfo')
duration = [float(node.attributes['routeLength'].value) for node in tag]
values_i_b.append(duration[0] / 1000)
file = minidom.parse(files_d_b[a])
tag = file.getElementsByTagName('tripinfo')
duration = [float(node.attributes['routeLength'].value) for node in tag]
values_d_b.append(duration[0] / 1000)
m, h = mean_confidence_interval(values_t, 0.95)
m1, h1 = mean_confidence_interval(values_i, 0.95)
m2, h2 = mean_confidence_interval(values_d, 0.95)
m_b, h_b = mean_confidence_interval(values_t_b, 0.95)
m1_b, h1_b = mean_confidence_interval(values_i_b, 0.95)
m2_b, h2_b = mean_confidence_interval(values_d_b, 0.95)
medias = [m, m1, m2]
erros = [h, h1, h2]
medias_b = [m_b, m1_b, m2_b]
erros_b = [h_b, h1_b, h2_b]
print("medias, SDP", medias)
print('Nearest Neighbor', 'Closest Insertion', 'Further Insertion')
print("medias, LWP", medias_b)
print("erros, SDP", erros)
print("erros, LWP", erros_b)
# define sample data
# data = values # [12, 12, 13, 13, 15, 16, 17, 22, 23, 25, 26, 27, 28, 28, 29]
# create 95% confidence interval for population mean weight
# print(st.t.interval(alpha=0.95, df=len(data) - 1, loc=np.mean(data), scale=st.sem(data)))
labels = ['Nearest Neighbor', 'Closest Insertion', 'Further Insertion']
x = np.arange(len(labels)) # the label locations
width = 0.25 # 0.35 # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(x - width / 2, medias, width, yerr=erros, label='SDP', zorder=10)
r2 = ax.bar(x + width / 2, medias_b, width, yerr=erros_b, label='LWP', zorder=10)
# Add some text for labels, title and custom x-axis tick labels, etc.
# ax.set_ylabel('Potncia mdia (W)', fontdict='bold')
plt.ylabel('Time [h]', fontweight="bold", fontsize=11)
plt.ylim(0, max(medias) + 2)
plt.grid(True, which="both", ls="-", linewidth=0.1, color='0.10', zorder=0)
ax.set_xticks(x)
ax.set_xticklabels(labels)
ax.legend(numpoints=1, loc="upper left", ncol=2, prop={'size': 10})
fig.tight_layout()
plt.show()
| 103.12037 | 160 | 0.829427 |
36f815fa18399e9d17f81a9738794e259e786f45 | 9,134 | py | Python | spatial_interpolators/radial_basis.py | tsutterley/spatial-interpolators | 6949807dd3ee4cbc7cd9bd323dbf3304fbd19ca2 | [
"MIT"
] | 18 | 2018-09-14T04:12:01.000Z | 2021-08-03T11:14:45.000Z | spatial_interpolators/radial_basis.py | tsutterley/spatial-interpolators | 6949807dd3ee4cbc7cd9bd323dbf3304fbd19ca2 | [
"MIT"
] | 2 | 2021-07-08T16:17:10.000Z | 2022-01-04T16:26:55.000Z | spatial_interpolators/radial_basis.py | tsutterley/spatial-interpolators | 6949807dd3ee4cbc7cd9bd323dbf3304fbd19ca2 | [
"MIT"
] | 3 | 2018-09-19T06:34:42.000Z | 2019-10-03T12:22:23.000Z | #!/usr/bin/env python
u"""
radial_basis.py
Written by Tyler Sutterley (01/2022)
Interpolates data using radial basis functions
CALLING SEQUENCE:
ZI = radial_basis(xs, ys, zs, XI, YI, polynomial=0,
smooth=smooth, epsilon=epsilon, method='inverse')
INPUTS:
xs: scaled input X data
ys: scaled input Y data
zs: input data
XI: scaled grid X for output ZI
YI: scaled grid Y for output ZI
OUTPUTS:
ZI: interpolated data grid
OPTIONS:
smooth: smoothing weights
metric: distance metric to use (default euclidean)
epsilon: adjustable constant for distance functions
default is mean Euclidean distance
polynomial: polynomial order if augmenting radial basis functions
default None: no polynomials
method: radial basis function
multiquadric
inverse_multiquadric or inverse (default)
inverse_quadratic
gaussian
linear (first-order polyharmonic spline)
cubic (third-order polyharmonic spline)
quintic (fifth-order polyharmonic spline)
thin_plate: thin-plate spline
PYTHON DEPENDENCIES:
numpy: Scientific Computing Tools For Python (https://numpy.org)
scipy: Scientific Tools for Python (https://docs.scipy.org/doc/)
REFERENCES:
R. L. Hardy, Multiquadric equations of topography and other irregular
surfaces, J. Geophys. Res., 76(8), 1905-1915, 1971.
M. Buhmann, "Radial Basis Functions", Cambridge Monographs on Applied and
Computational Mathematics, 2003.
UPDATE HISTORY:
Updated 01/2022: added function docstrings
Updated 07/2021: using scipy spatial distance routines
Updated 09/2017: using rcond=-1 in numpy least-squares algorithms
Updated 01/2017: epsilon in polyharmonic splines (linear, cubic, quintic)
Updated 08/2016: using format text within ValueError, edit constant vector
added low-order polynomial option (previously used default constant)
Updated 01/2016: new hierarchical_radial_basis function
that first reduces to points within distance. added cutoff option
Updated 10/2014: added third dimension (spherical)
Written 08/2014
"""
from __future__ import print_function, division
import numpy as np
import scipy.spatial
def radial_basis(xs, ys, zs, XI, YI, smooth=0.0, metric='euclidean',
epsilon=None, method='inverse', polynomial=None):
"""
Interpolates data using radial basis functions
Arguments
---------
xs: scaled input x-coordinates
ys: scaled input y-coordinates
zs: input data
XI: scaled output x-coordinates for data grid
YI: scaled output y-coordinates for data grid
Keyword arguments
-----------------
smooth: smoothing weights
metric: distance metric to use (default euclidean)
epsilon: adjustable constant for distance functions
method: radial basis function
- multiquadric
- inverse_multiquadric or inverse (default)
- inverse_quadratic
- gaussian
- linear (first-order polyharmonic spline)
- cubic (third-order polyharmonic spline)
- quintic (fifth-order polyharmonic spline)
- thin_plate: thin-plate spline
polynomial: polynomial order if augmenting radial basis functions
Returns
-------
ZI: interpolated data grid
"""
#-- remove singleton dimensions
xs = np.squeeze(xs)
ys = np.squeeze(ys)
zs = np.squeeze(zs)
XI = np.squeeze(XI)
YI = np.squeeze(YI)
#-- size of new matrix
if (np.ndim(XI) == 1):
nx = len(XI)
else:
nx,ny = np.shape(XI)
#-- Check to make sure sizes of input arguments are correct and consistent
if (len(zs) != len(xs)) | (len(zs) != len(ys)):
raise Exception('Length of X, Y, and Z must be equal')
if (np.shape(XI) != np.shape(YI)):
raise Exception('Size of XI and YI must be equal')
#-- create python dictionary of radial basis function formulas
radial_basis_functions = {}
radial_basis_functions['multiquadric'] = multiquadric
radial_basis_functions['inverse_multiquadric'] = inverse_multiquadric
radial_basis_functions['inverse'] = inverse_multiquadric
radial_basis_functions['inverse_quadratic'] = inverse_quadratic
radial_basis_functions['gaussian'] = gaussian
radial_basis_functions['linear'] = poly_spline1
radial_basis_functions['cubic'] = poly_spline3
radial_basis_functions['quintic'] = poly_spline5
radial_basis_functions['thin_plate'] = thin_plate
#-- check if formula name is listed
if method in radial_basis_functions.keys():
RBF = radial_basis_functions[method]
else:
raise ValueError("Method {0} not implemented".format(method))
#-- Creation of data distance matrix
#-- Data to Data
if (metric == 'brute'):
#-- use linear algebra to compute euclidean distances
Rd = distance_matrix(
np.array([xs, ys]),
np.array([xs, ys])
)
else:
#-- use scipy spatial distance routines
Rd = scipy.spatial.distance.cdist(
np.array([xs, ys]).T,
np.array([xs, ys]).T,
metric=metric)
#-- shape of distance matrix
N,M = np.shape(Rd)
#-- if epsilon is not specified
if epsilon is None:
#-- calculate norm with mean euclidean distance
uix,uiy = np.nonzero(np.tri(N,M=M,k=-1))
epsilon = np.mean(Rd[uix,uiy])
#-- possible augmentation of the PHI Matrix with polynomial Vectors
if polynomial is None:
#-- calculate radial basis function for data-to-data with smoothing
PHI = RBF(epsilon, Rd) + np.eye(N,M=M)*smooth
DMAT = zs.copy()
else:
#-- number of polynomial coefficients
nt = (polynomial**2 + 3*polynomial)//2 + 1
#-- calculate radial basis function for data-to-data with smoothing
PHI = np.zeros((N+nt,M+nt))
PHI[:N,:M] = RBF(epsilon, Rd) + np.eye(N,M=M)*smooth
#-- augmentation of PHI matrix with polynomials
POLY = polynomial_matrix(xs,ys,polynomial)
DMAT = np.concatenate(([zs,np.zeros((nt))]),axis=0)
#-- augment PHI matrix
for t in range(nt):
PHI[:N,M+t] = POLY[:,t]
PHI[N+t,:M] = POLY[:,t]
#-- Computation of the Weights
w = np.linalg.lstsq(PHI,DMAT[:,np.newaxis],rcond=-1)[0]
#-- Computation of distance Matrix
#-- Computation of distance Matrix (data to mesh points)
if (metric == 'brute'):
#-- use linear algebra to compute euclidean distances
Re = distance_matrix(
np.array([XI.flatten(),YI.flatten()]),
np.array([xs,ys])
)
else:
#-- use scipy spatial distance routines
Re = scipy.spatial.distance.cdist(
np.array([XI.flatten(),YI.flatten()]).T,
np.array([xs, ys]).T,
metric=metric)
#-- calculate radial basis function for data-to-mesh matrix
E = RBF(epsilon,Re)
#-- possible augmentation of the Evaluation Matrix with polynomial vectors
if polynomial is not None:
P = polynomial_matrix(XI.flatten(),YI.flatten(),polynomial)
E = np.concatenate(([E, P]),axis=1)
#-- calculate output interpolated array (or matrix)
if (np.ndim(XI) == 1):
ZI = np.squeeze(np.dot(E,w))
else:
ZI = np.zeros((nx,ny))
ZI[:,:] = np.dot(E,w).reshape(nx,ny)
#-- return the interpolated array (or matrix)
return ZI
#-- define radial basis function formulas
#-- calculate Euclidean distances between points as matrices
#-- calculate polynomial matrix to augment radial basis functions
| 33.214545 | 78 | 0.637837 |
36fae5ad374222c00d5bde1c50b8adc1fc9b19c3 | 465 | py | Python | oldstuff/api1.py | miusuarioamigo/python-Le | dbb653255dab7d11b87f25eec94bcce63a86aa42 | [
"MIT"
] | null | null | null | oldstuff/api1.py | miusuarioamigo/python-Le | dbb653255dab7d11b87f25eec94bcce63a86aa42 | [
"MIT"
] | null | null | null | oldstuff/api1.py | miusuarioamigo/python-Le | dbb653255dab7d11b87f25eec94bcce63a86aa42 | [
"MIT"
] | null | null | null | from flask import Flask, jsonify, request
app = Flask(__name__)
if __name__ == "__main__":
app.run(debug=True)
| 25.833333 | 51 | 0.612903 |
36fb1e4b44269afa44164c5c335b64583671d7bf | 5,129 | py | Python | tests/mock/tests/settings.py | magicjoey/django-knowledge | ce6faa904a88e5d4f565763bc1d5cd07e6b5c5bd | [
"ISC"
] | 199 | 2015-01-22T05:07:30.000Z | 2022-03-28T06:59:46.000Z | tests/mock/tests/settings.py | tzangms/django-knowledge | 8238b1f4c1c6e12acb7f3fc327346776379a7a68 | [
"0BSD"
] | 3 | 2015-10-20T09:48:58.000Z | 2018-03-14T21:16:29.000Z | tests/mock/tests/settings.py | tzangms/django-knowledge | 8238b1f4c1c6e12acb7f3fc327346776379a7a68 | [
"0BSD"
] | 78 | 2015-02-09T02:23:16.000Z | 2021-12-25T07:02:08.000Z | from mock.tests.base import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.template.defaultfilters import slugify
from knowledge import settings
from knowledge.models import Question, Response
from knowledge.forms import QuestionForm, ResponseForm
| 32.66879 | 105 | 0.580425 |
36fd537a07164889366087995d08455fc14bd19e | 828 | py | Python | Batch_sentiment/spark_hashtag.py | malli3131/SparkApps | b24763eaf6411cba3c22a4c070a45d6fe96dfa1d | [
"Apache-2.0"
] | 3 | 2018-01-17T05:51:10.000Z | 2018-11-22T16:59:53.000Z | Batch_sentiment/spark_hashtag.py | malli3131/SparkApps | b24763eaf6411cba3c22a4c070a45d6fe96dfa1d | [
"Apache-2.0"
] | 2 | 2016-12-15T13:15:42.000Z | 2016-12-15T13:19:19.000Z | Batch_sentiment/spark_hashtag.py | malli3131/SparkApps | b24763eaf6411cba3c22a4c070a45d6fe96dfa1d | [
"Apache-2.0"
] | 4 | 2018-02-12T06:37:04.000Z | 2020-01-04T11:30:24.000Z | import re
import string
import sys
from pyspark import SparkContext
exclude = set(string.punctuation)
sc = SparkContext("local", "Finidng Hash Tags")
rmPunc = sc.broadcast(exclude)
mydata = sc.textFile("hdfs://<hostname>:<port>/path/to/parsedata<first job output>")
wordsRDD = mydata.flatMap( lambda line : line.split("\t")[1].split(" "))
tagsRDD = wordsRDD.map( lambda word : get_hash_tag(word, rmPunc.value))
hashtagsRDD = tagsRDD.filter( lambda word : word is not None)
hashtagsRDD.saveAsTextFile("hdfs://<hostname>:<port>/path/to/hashtags")
| 30.666667 | 84 | 0.695652 |
7fc0ed53e23bdf7182409dab9a83d9dcb7cb0ae5 | 417 | py | Python | backend/apps/risks/urls.py | intellisense/risks | e98b8c6e5694b895603f7ff1b3c04b6057aa1136 | [
"MIT"
] | null | null | null | backend/apps/risks/urls.py | intellisense/risks | e98b8c6e5694b895603f7ff1b3c04b6057aa1136 | [
"MIT"
] | null | null | null | backend/apps/risks/urls.py | intellisense/risks | e98b8c6e5694b895603f7ff1b3c04b6057aa1136 | [
"MIT"
] | null | null | null | from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^risks/$', views.RiskTypeList.as_view(), name='risks_list'),
url(r'^risks/(?P<pk>[0-9]+)/$', views.RiskTypeDetail.as_view(), name='risk_details'),
url(r'^fields/$', views.FieldTypes.as_view(), name='field_types'),
]
urlpatterns = format_suffix_patterns(urlpatterns)
| 34.75 | 89 | 0.729017 |
7fc0f798553336843920795f3c9cd1c0cfdb4288 | 534 | py | Python | src/main.py | sguzman/Dbase_Channel_Grab | 30a9e3abd72ed93cd3c7ea80d44b664a0a76d8af | [
"Unlicense"
] | null | null | null | src/main.py | sguzman/Dbase_Channel_Grab | 30a9e3abd72ed93cd3c7ea80d44b664a0a76d8af | [
"Unlicense"
] | null | null | null | src/main.py | sguzman/Dbase_Channel_Grab | 30a9e3abd72ed93cd3c7ea80d44b664a0a76d8af | [
"Unlicense"
] | null | null | null | import json
import bs4
import requests
url_base = 'https://dbase.tube/chart/channels/subscribers/all?page=%s&spf=navigate'
max_page = 19084
html_doc = requests.get(url_base).text
for i in range(max_page):
url = url_base % i
hot_bod = requests.get(url).text
json_blob = json.loads(hot_bod)
html_body = json_blob['body']['spf_content']
soup = bs4.BeautifulSoup(html_body, 'html.parser')
for j in soup.findAll('a', class_='list__item'):
channel_raw = j['href']
print(channel_raw.split('/')[2])
| 28.105263 | 83 | 0.687266 |
7fc44269a458fb1cbf6dc4894b2532e5211304c0 | 1,166 | py | Python | kanban_board/admin.py | Zeerooth/django-kanban-board | d390635017199a90da666bba3a74cafc86838884 | [
"BSD-3-Clause"
] | null | null | null | kanban_board/admin.py | Zeerooth/django-kanban-board | d390635017199a90da666bba3a74cafc86838884 | [
"BSD-3-Clause"
] | 2 | 2021-06-10T17:52:06.000Z | 2021-09-22T18:00:26.000Z | kanban_board/admin.py | Zeerooth/django-kanban-board | d390635017199a90da666bba3a74cafc86838884 | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from ordered_model.admin import OrderedStackedInline, OrderedInlineModelAdminMixin
from kanban_board.models import KanbanBoard, KanbanBoardState, Workflow, KanbanBoardElement
admin.site.register(KanbanBoard, KanbanBoardAdmin)
admin.site.register(KanbanBoardState)
admin.site.register(Workflow, WorkflowAdmin)
| 37.612903 | 101 | 0.762436 |
7fc4576c38452997b1f8bd1ddca0fc4d69cf33db | 16,705 | py | Python | certbot-nginx/certbot_nginx/tests/nginxparser_test.py | jcollie/certbot | 1df778859b7ace699c02039b269abd426058a237 | [
"Apache-2.0"
] | 4 | 2020-04-09T21:57:23.000Z | 2020-04-11T13:26:54.000Z | certbot-nginx/certbot_nginx/tests/nginxparser_test.py | jcollie/certbot | 1df778859b7ace699c02039b269abd426058a237 | [
"Apache-2.0"
] | 32 | 2019-02-20T14:51:48.000Z | 2019-02-27T10:11:34.000Z | certbot-nginx/certbot_nginx/tests/nginxparser_test.py | jcollie/certbot | 1df778859b7ace699c02039b269abd426058a237 | [
"Apache-2.0"
] | 1 | 2020-02-06T15:04:02.000Z | 2020-02-06T15:04:02.000Z | """Test for certbot_nginx.nginxparser."""
import copy
import operator
import tempfile
import unittest
from pyparsing import ParseException
from certbot_nginx.nginxparser import (
RawNginxParser, loads, load, dumps, dump, UnspacedList)
from certbot_nginx.tests import util
FIRST = operator.itemgetter(0)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| 37.879819 | 95 | 0.449386 |
7fc49c5390bfb96b900f097bb43b1a2528a313d1 | 6,522 | py | Python | pysnmp-with-texts/Intel-Common-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/Intel-Common-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/Intel-Common-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module Intel-Common-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Intel-Common-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:54:14 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, ObjectIdentity, iso, Integer32, Bits, Counter64, Counter32, Gauge32, NotificationType, Unsigned32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, TimeTicks, enterprises = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "ObjectIdentity", "iso", "Integer32", "Bits", "Counter64", "Counter32", "Gauge32", "NotificationType", "Unsigned32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "TimeTicks", "enterprises")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
intel = MibIdentifier((1, 3, 6, 1, 4, 1, 343))
identifiers = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2))
experimental = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 3))
information_technology = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 4)).setLabel("information-technology")
sysProducts = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 5))
mib2ext = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 6))
hw = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 7))
wekiva = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 111))
systems = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1))
objects = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 2))
comm_methods = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 3)).setLabel("comm-methods")
pc_systems = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 1)).setLabel("pc-systems")
proxy_systems = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 2)).setLabel("proxy-systems")
hub_systems = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3)).setLabel("hub-systems")
switch_systems = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 4)).setLabel("switch-systems")
local_proxy_1 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 3, 1)).setLabel("local-proxy-1")
pc_novell_1 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 3, 2)).setLabel("pc-novell-1")
express10_100Stack = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 1)).setLabel("express10-100Stack")
express12TX = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 2))
express24TX = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 3))
expressReserved = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 4))
expressBridge = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 6))
express210_12 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 7)).setLabel("express210-12")
express210_24 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 8)).setLabel("express210-24")
express220_12 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 9)).setLabel("express220-12")
express220_24 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 10)).setLabel("express220-24")
express300Stack = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 11))
express320_16 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 12)).setLabel("express320-16")
express320_24 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 1, 1, 3, 13)).setLabel("express320-24")
pc_products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 1)).setLabel("pc-products")
hub_products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 2)).setLabel("hub-products")
proxy = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 3))
print_products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 4)).setLabel("print-products")
network_products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 5)).setLabel("network-products")
snmp_agents = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 6)).setLabel("snmp-agents")
nic_products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 7)).setLabel("nic-products")
server_management = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 10)).setLabel("server-management")
switch_products = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 11)).setLabel("switch-products")
i2o = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 120))
express110 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 2, 1))
netport_1 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 4, 1)).setLabel("netport-1")
netport_2 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 4, 2)).setLabel("netport-2")
netport_express = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 4, 3)).setLabel("netport-express")
lanDesk = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 5, 1))
ld_alarms = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 5, 1, 1)).setLabel("ld-alarms")
internetServer_2 = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 5, 2)).setLabel("internetServer-2")
iS_alarms = MibIdentifier((1, 3, 6, 1, 4, 1, 343, 2, 5, 2, 1)).setLabel("iS-alarms")
mibBuilder.exportSymbols("Intel-Common-MIB", express220_24=express220_24, express110=express110, snmp_agents=snmp_agents, switch_systems=switch_systems, objects=objects, proxy=proxy, lanDesk=lanDesk, express12TX=express12TX, mib2ext=mib2ext, experimental=experimental, express210_24=express210_24, sysProducts=sysProducts, netport_1=netport_1, internetServer_2=internetServer_2, intel=intel, pc_novell_1=pc_novell_1, products=products, express320_24=express320_24, proxy_systems=proxy_systems, express320_16=express320_16, identifiers=identifiers, express300Stack=express300Stack, wekiva=wekiva, express10_100Stack=express10_100Stack, hub_systems=hub_systems, ld_alarms=ld_alarms, server_management=server_management, switch_products=switch_products, i2o=i2o, netport_express=netport_express, network_products=network_products, expressBridge=expressBridge, express220_12=express220_12, local_proxy_1=local_proxy_1, systems=systems, comm_methods=comm_methods, express210_12=express210_12, pc_products=pc_products, hub_products=hub_products, expressReserved=expressReserved, netport_2=netport_2, pc_systems=pc_systems, hw=hw, express24TX=express24TX, print_products=print_products, information_technology=information_technology, iS_alarms=iS_alarms, nic_products=nic_products)
| 103.52381 | 1,274 | 0.713891 |
7fc5dfd088a228987587fd982a1eb94c9c4b2b71 | 4,430 | py | Python | src/python/pants/jvm/resolve/coursier_setup.py | Eric-Arellano/pants | aaa9756bc4f2cc97bb97851a4295a0de85f374b1 | [
"Apache-2.0"
] | null | null | null | src/python/pants/jvm/resolve/coursier_setup.py | Eric-Arellano/pants | aaa9756bc4f2cc97bb97851a4295a0de85f374b1 | [
"Apache-2.0"
] | 12 | 2022-01-06T23:20:22.000Z | 2022-03-17T05:06:37.000Z | src/python/pants/jvm/resolve/coursier_setup.py | Eric-Arellano/pants | aaa9756bc4f2cc97bb97851a4295a0de85f374b1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import textwrap
from dataclasses import dataclass
from typing import ClassVar, Iterable
from pants.core.util_rules.external_tool import (
DownloadedExternalTool,
ExternalToolRequest,
TemplatedExternalTool,
)
from pants.engine.fs import CreateDigest, Digest, FileContent, MergeDigests
from pants.engine.platform import Platform
from pants.engine.rules import Get, MultiGet, collect_rules, rule
COURSIER_POST_PROCESSING_SCRIPT = textwrap.dedent(
"""\
import json
import sys
from pathlib import PurePath
from shutil import copyfile
report = json.load(open(sys.argv[1]))
classpath = set()
for dep in report['dependencies']:
file_path = PurePath(dep['file'])
classpath_dest = f"classpath/{file_path.name}"
if classpath_dest in classpath:
raise Exception(f"Found duplicate jar name {file_path.name}, which isn't currently supported")
classpath.add(classpath_dest)
copyfile(file_path, classpath_dest)
"""
)
COURSIER_WRAPPER_SCRIPT = textwrap.dedent(
"""\
set -eux
coursier_exe="$1"
shift
json_output_file="$1"
shift
"$coursier_exe" fetch --json-output-file="$json_output_file" "$@"
/bin/mkdir -p classpath
/usr/bin/python3 coursier_post_processing_script.py "$json_output_file"
"""
)
def rules():
return [*collect_rules()]
| 31.41844 | 106 | 0.66614 |