hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c4fbf075eb619be044717172093dd9b2c09291e8 | 1,579 | py | Python | dbms/tests/integration/helpers/test_tools.py | qqiangwu/ClickHouse | 4700d375d8238ca3d7f39a1d0001f173272bbf3a | [
"Apache-2.0"
] | 4 | 2020-04-27T13:03:31.000Z | 2020-10-15T09:51:13.000Z | dbms/tests/integration/helpers/test_tools.py | RCcode/ClickHouse | ccfb51b8dd680a8ad2863e0b2f4e32364b86daf2 | [
"Apache-2.0"
] | 8 | 2018-11-21T09:45:25.000Z | 2018-11-21T13:53:40.000Z | dbms/tests/integration/helpers/test_tools.py | RCcode/ClickHouse | ccfb51b8dd680a8ad2863e0b2f4e32364b86daf2 | [
"Apache-2.0"
] | 2 | 2018-12-17T13:08:09.000Z | 2022-01-26T08:50:20.000Z | import difflib
import time
| 38.512195 | 159 | 0.645978 |
c4fd54fcbdcdf92be66e00442b256526af744c5c | 1,799 | py | Python | nlp/name_extractor.py | Karthik-Venkatesh/ATOM | d369d8436b71b3af0f5810200c0927d0097f4330 | [
"Apache-2.0"
] | 1 | 2022-02-23T14:54:12.000Z | 2022-02-23T14:54:12.000Z | nlp/name_extractor.py | Karthik-Venkatesh/atom | d369d8436b71b3af0f5810200c0927d0097f4330 | [
"Apache-2.0"
] | 21 | 2018-12-27T04:47:17.000Z | 2019-01-16T06:00:53.000Z | nlp/name_extractor.py | Karthik-Venkatesh/ATOM | d369d8436b71b3af0f5810200c0927d0097f4330 | [
"Apache-2.0"
] | null | null | null | #
# name_extractor.py
# ATOM
#
# Created by Karthik V.
# Updated copyright on 16/1/19 5:54 PM.
#
# Copyright 2019 Karthik Venkatesh. All rights reserved.
#
# Reference
# Question link: https://stackoverflow.com/questions/20290870/improving-the-extraction-of-human-names-with-nltk
# Answer link: https://stackoverflow.com/a/49500219/5019015
import nltk
from nltk.corpus import wordnet
| 27.676923 | 111 | 0.581434 |
c4fdb48e5cd50b1b15ab2ef20eb3414a3c36712f | 1,402 | py | Python | tfx/dsl/components/base/node_registry.py | johnPertoft/tfx | c6335684a54651adbcbe50aa52918b9b9948326e | [
"Apache-2.0"
] | null | null | null | tfx/dsl/components/base/node_registry.py | johnPertoft/tfx | c6335684a54651adbcbe50aa52918b9b9948326e | [
"Apache-2.0"
] | null | null | null | tfx/dsl/components/base/node_registry.py | johnPertoft/tfx | c6335684a54651adbcbe50aa52918b9b9948326e | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Node registry."""
import threading
from typing import Any, FrozenSet
# To resolve circular dependency caused by type annotations.
base_node = Any # base_node.py imports this module.
_node_registry = _NodeRegistry()
def register_node(node: 'base_node.BaseNode'):
"""Register a node in the local thread."""
_node_registry.register(node)
def registered_nodes() -> FrozenSet['base_node.BaseNode']:
"""Get registered nodes in the local thread."""
return frozenset(_node_registry.registered_nodes())
| 30.478261 | 74 | 0.745364 |
c4ffd1431af57438c9e464015a0b8f4bd42bff19 | 20,283 | py | Python | tests/unit/api/test_api.py | Mattlk13/datadogpy | 80c8711359e45b2091c230f726da201616d5e4c9 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/api/test_api.py | Mattlk13/datadogpy | 80c8711359e45b2091c230f726da201616d5e4c9 | [
"BSD-3-Clause"
] | null | null | null | tests/unit/api/test_api.py | Mattlk13/datadogpy | 80c8711359e45b2091c230f726da201616d5e4c9 | [
"BSD-3-Clause"
] | null | null | null | # stdlib
from copy import deepcopy
from functools import wraps
import os
import tempfile
from time import time
# 3p
import mock
# datadog
from datadog import initialize, api, util
from datadog.api import (
Distribution,
Metric,
ServiceCheck
)
from datadog.api.exceptions import ApiError, ApiNotInitialized
from datadog.util.compat import is_p3k
from tests.unit.api.helper import (
DatadogAPIWithInitialization,
DatadogAPINoInitialization,
MyCreatable,
MyUpdatable,
MyDeletable,
MyGetable,
MyListable,
MyListableSubResource,
MyAddableSubResource,
MyUpdatableSubResource,
MyDeletableSubResource,
MyActionable,
API_KEY,
APP_KEY,
API_HOST,
HOST_NAME,
FAKE_PROXY
)
from tests.util.contextmanagers import EnvVars
| 35.521891 | 117 | 0.598087 |
f201ebc36fc057f334b79aad55bb830784413968 | 1,420 | py | Python | keras_train.py | jmeisele/mlflow_demo | 4414e9251fda0ab8c59e8bc776d04ef1f9fede10 | [
"MIT"
] | null | null | null | keras_train.py | jmeisele/mlflow_demo | 4414e9251fda0ab8c59e8bc776d04ef1f9fede10 | [
"MIT"
] | null | null | null | keras_train.py | jmeisele/mlflow_demo | 4414e9251fda0ab8c59e8bc776d04ef1f9fede10 | [
"MIT"
] | null | null | null | """
Author: Jason Eisele
Email: jeisele@shipt.com
Date: August 1, 2020
"""
import argparse
import keras
import tensorflow as tf
import cloudpickle
parser = argparse.ArgumentParser(
description='Train a Keras feed-forward network for MNIST classification')
parser.add_argument('--batch-size', '-b', type=int, default=128)
parser.add_argument('--epochs', '-e', type=int, default=1)
parser.add_argument('--learning_rate', '-l', type=float, default=0.05)
parser.add_argument('--num-hidden-units', '-n', type=float, default=512)
parser.add_argument('--dropout', '-d', type=float, default=0.25)
parser.add_argument('--momentum', '-m', type=float, default=0.85)
args = parser.parse_args()
mnist = keras.datasets.mnist
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train, X_test = X_train / 255.0, X_test / 255.0
model = keras.models.Sequential([
keras.layers.Flatten(input_shape=X_train[0].shape),
keras.layers.Dense(args.num_hidden_units, activation=tf.nn.relu),
keras.layers.Dropout(args.dropout),
keras.layers.Dense(10, activation=tf.nn.softmax)
])
optimizer = keras.optimizers.SGD(lr=args.learning_rate, momentum=args.momentum)
model.compile(optimizer=optimizer,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(X_train, y_train, epochs=args.epochs, batch_size=args.batch_size)
test_loss, test_acc = model.evaluate(X_test, y_test, verbose=2)
| 34.634146 | 79 | 0.738028 |
f203d39ef60e2f5a44270e70b2db9a749876f722 | 5,788 | py | Python | 4USCityEmotion/get_flickr_photos.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
] | 3 | 2021-07-15T15:58:52.000Z | 2021-07-16T13:22:47.000Z | 4USCityEmotion/get_flickr_photos.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
] | null | null | null | 4USCityEmotion/get_flickr_photos.py | HCH2CHO/EmotionMap | bc572b4182637dcdd65e9a13c92f2fa0d9a3d680 | [
"MIT"
] | 4 | 2017-08-04T12:41:06.000Z | 2019-01-31T14:55:10.000Z | # coding:utf-8
# version:python3.5.1
# author:kyh
import flickrapi
import datetime
import psycopg2
import time
# flickr
#
def db_connect():
try:
connection = psycopg2.connect(database="PlaceEmotion", user="postgres",
password="postgres", host="127.0.0.1", port="5432")
cursor = connection.cursor()
print("Database Connection has been opened completely!")
return connection, cursor
except Exception as e:
with open('log.txt','a') as log:
log.writelines(str(e))
#
# flickr api
#
#
#
#
if __name__ == '__main__':
db_connection, db_cursor = db_connect()
flickr, api_key = query_api(db_connection, db_cursor)
location, lat, lon= query_location(db_connection, db_cursor)
while location is not None:
compute_time(db_connection, db_cursor, location, lat, lon, flickr)
location, lat, lon= query_location(db_connection, db_cursor)
print("All locations have been recorded!")
release_api(db_connection, db_cursor, api_key)
close_connection(db_connection)
| 35.292683 | 118 | 0.604008 |
f204e6f22d0c9b479799a0897aaa41e742212566 | 5,767 | py | Python | Lianjia/LianjiaErShouFang.py | Detailscool/YHSpider | ab1276c9167f70fed3ccff17e02fb62d51e4a469 | [
"MIT"
] | 1 | 2017-05-04T08:10:34.000Z | 2017-05-04T08:10:34.000Z | Lianjia/LianjiaErShouFang.py | Detailscool/YHSpider | ab1276c9167f70fed3ccff17e02fb62d51e4a469 | [
"MIT"
] | null | null | null | Lianjia/LianjiaErShouFang.py | Detailscool/YHSpider | ab1276c9167f70fed3ccff17e02fb62d51e4a469 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
import requests
from bs4 import BeautifulSoup
import sys
import csv
reload(sys)
sys.setdefaultencoding('utf-8')
if __name__ == '__main__':
url_main = 'http://gz.lianjia.com'
f = open(u'.csv', 'wb')
f.write(unicode('\xEF\xBB\xBF', 'utf-8')) #
writer = csv.writer(f)
writer.writerow(['', '', '', '', '()', '(/)',
'', '', '', '', '', '', ''])
res = requests.get(url_main+'ershoufang')
res = res.text.encode(res.encoding).decode('utf-8')
soup = BeautifulSoup(res, 'html.parser')
# print soup.prettify()
districts = soup.find(name='div', attrs={'data-role':'ershoufang'}) # <div data-role="ershoufang">
# soup.select()
for district in districts.find_all(name='a'):
print district['title']
district_name = district.text # '', '', '', ''......
url = '%s%s' % (url_main, district['href'])
# print url
res = requests.get(url)
res = res.text.encode(res.encoding).decode('utf-8')
soup = BeautifulSoup(res,'html.parser')
# print soup.prettify()
page = soup.find('div', {'class':'page-box house-lst-page-box'})
if not page: #
continue
total_pages = dict(eval(page['page-data']))['totalPage'] #
# print total_pages
for j in range(1, total_pages+1):
url_page = '%spg%d/' % (url, j)
res = requests.get(url_page)
res = res.text.encode(res.encoding).decode('utf-8')
soup = BeautifulSoup(res, 'html.parser')
# print soup.prettify()
sells = soup.find(name='ul', attrs={'class':'sellListContent', 'log-mod':'list'})
if not sells:
continue
# <a class="title" data-bl="list" data-el="ershoufang" data-log_index="1" href="XX" target="_blank">
titles = soup.find_all(name='a', attrs={'class':'title', 'data-bl':'list', 'data-el':'ershoufang'})
# <a data-el="region" data-log_index="1" href="X" target="_blank">
regions = sells.find_all(name='a', attrs={'data-el':'region'})
infos = sells.find_all(name='div', class_='houseInfo') # <div class="houseInfo">
infos2 = sells.find_all(name='div', class_='positionInfo') # <div class="positionInfo">
prices = sells.find_all(name='div', class_='totalPrice') # <div class="totalPrice">
unit_prices = sells.find_all(name='div', class_='unitPrice') # <div class="unitPrice" data-hid="X" data-price="X" data-rid="X">
subways = sells.find_all(name='span', class_='subway') # <span class="subway">
taxs = sells.find_all(name='span', class_='taxfree') # <span class="taxfree">
N = max(len(titles), len(regions), len(prices), len(unit_prices), len(subways), len(taxs), len(infos), len(infos2))
# for title, region, price, unit_price, subway, tax, info, info2 in zip(titles, regions, prices, unit_prices, subways, taxs, infos, infos2):
for i in range(N):
room_type = area = orientation = decoration = elevator = floor = year = slab_tower = None
title = titles[i] if len(titles) > i else None
region = regions[i] if len(regions) > i else None
price = prices[i] if len(prices) > i else None
unit_price = unit_prices[i] if len(unit_prices) > i else None
subway = subways[i] if len(subways) > i else None
tax = taxs[i] if len(taxs) > i else None
info = infos[i] if len(infos) > i else None
info2 = infos2[i] if len(infos2) > i else None
if title:
print 'Title: ', title.text
if region:
region = region.text
if price:
price = price.text
price = price[:price.find('')]
if unit_price:
unit_price = unit_price.span.text.strip()
unit_price = unit_price[:unit_price.find('/')]
if unit_price.find('') != -1:
unit_price = unit_price[2:]
if subway:
subway = subway.text.strip()
if tax:
tax = tax.text.strip()
if info:
info = info.text.split('|')
room_type = info[1].strip() #
area = info[2].strip() #
area = area[:area.find('')]
orientation = info[3].strip().replace(' ', '') #
decoration = '-'
if len(info) > 4: #
decoration = info[4].strip() #
elevator = ''
if len(info) > 5:
elevator = info[5].strip() #
if info2:
info2 = filter(not_empty, info2.text.split(' '))
floor = info2[0].strip()
info2 = info2[1]
year = info2[:info2.find('')]
slab_tower = info2[info2.find('')+1:]
print district_name, region, room_type, area, price, unit_price, tax, orientation, decoration, elevator, floor, year, slab_tower
writer.writerow([district_name, region, room_type, area, price, unit_price, tax, orientation, decoration, elevator, floor, year, slab_tower])
# break
# break
# break
f.close()
| 50.147826 | 157 | 0.521935 |
f20509812fdd5299a74f5fce1649f221aa576b8e | 1,165 | py | Python | test/test_main.py | bluesheeptoken/PyGolf | 421117d1fa1c197b475112e5655fbf7693d475a2 | [
"MIT"
] | 7 | 2020-04-25T19:54:01.000Z | 2022-03-10T21:54:51.000Z | test/test_main.py | bluesheeptoken/PyGolf | 421117d1fa1c197b475112e5655fbf7693d475a2 | [
"MIT"
] | 1 | 2020-04-28T08:13:12.000Z | 2020-04-28T08:19:00.000Z | test/test_main.py | bluesheeptoken/PyGolf | 421117d1fa1c197b475112e5655fbf7693d475a2 | [
"MIT"
] | 1 | 2020-04-25T20:35:57.000Z | 2020-04-25T20:35:57.000Z | import argparse
import tempfile
import unittest
from pygolf.__main__ import get_arguments_warning, read_input_code, shorten
| 33.285714 | 80 | 0.661803 |
f205b580166717e0b19f49119e8357e063a3858d | 545 | py | Python | Session_01/koch.py | UP-RS-ESP/GEW-DAP05-2018 | 04ca0327b4a4ea5b6869e3e985672639651771e8 | [
"MIT"
] | 2 | 2018-11-16T12:44:33.000Z | 2021-12-20T06:34:22.000Z | Session_01/koch.py | UP-RS-ESP/GEW-DAP05-2018 | 04ca0327b4a4ea5b6869e3e985672639651771e8 | [
"MIT"
] | null | null | null | Session_01/koch.py | UP-RS-ESP/GEW-DAP05-2018 | 04ca0327b4a4ea5b6869e3e985672639651771e8 | [
"MIT"
] | null | null | null | import sys
import numpy as np
from matplotlib import pyplot as pl
xr = [1,]
yr = [1,]
koch(xr[0], yr[0], 1, 0, 5)
pl.plot(xr, yr, 'r.-', lw = 0.5)
ax = pl.gca()
ax.set_aspect('equal')
pl.grid()
pl.show()
| 20.185185 | 64 | 0.519266 |
f206882462f0a5905d5255d1814f64fdc9855a48 | 2,995 | py | Python | core/views.py | xuhang57/atmosphere | f53fea2a74ee89ccc8852906799b1d9a7e9178b7 | [
"BSD-3-Clause"
] | null | null | null | core/views.py | xuhang57/atmosphere | f53fea2a74ee89ccc8852906799b1d9a7e9178b7 | [
"BSD-3-Clause"
] | null | null | null | core/views.py | xuhang57/atmosphere | f53fea2a74ee89ccc8852906799b1d9a7e9178b7 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Core views to provide custom operations
"""
import uuid
from datetime import datetime
from django.http import HttpResponseRedirect
from threepio import logger
from atmosphere import settings
from django_cyverse_auth.decorators import atmo_login_required
from django_cyverse_auth.models import Token as AuthToken
from core.models import AtmosphereUser as DjangoUser
| 39.933333 | 89 | 0.642738 |
f207285596d5e7ef8253ffc7d5fe5e11b93828ce | 1,714 | py | Python | indico/util/serializer.py | jgrigera/indico | b5538f2755bc38a02313d079bac831ee3dfb44ab | [
"MIT"
] | 1 | 2018-11-12T21:29:26.000Z | 2018-11-12T21:29:26.000Z | indico/util/serializer.py | jgrigera/indico | b5538f2755bc38a02313d079bac831ee3dfb44ab | [
"MIT"
] | 9 | 2020-09-08T09:25:57.000Z | 2022-01-13T02:59:05.000Z | indico/util/serializer.py | jgrigera/indico | b5538f2755bc38a02313d079bac831ee3dfb44ab | [
"MIT"
] | 3 | 2020-07-20T09:09:44.000Z | 2020-10-19T00:29:49.000Z | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from enum import Enum
from indico.core.errors import IndicoError
from indico.core.logger import Logger
| 35.708333 | 92 | 0.524504 |
f2082b7572a268703ff36753a9f8e86b4e7ec828 | 814 | py | Python | step/lambdas/get_image_status.py | mbeacom/cloudendure-python | b854b1b2ea47c18a8ef03908abcdd653b77684ac | [
"MIT"
] | 7 | 2019-06-28T23:30:47.000Z | 2019-08-23T16:57:12.000Z | step/lambdas/get_image_status.py | mbeacom/cloudendure-python | b854b1b2ea47c18a8ef03908abcdd653b77684ac | [
"MIT"
] | 27 | 2019-06-14T20:39:10.000Z | 2019-08-30T17:20:40.000Z | step/lambdas/get_image_status.py | mbeacom/cloudendure-python | b854b1b2ea47c18a8ef03908abcdd653b77684ac | [
"MIT"
] | 2 | 2019-08-23T16:50:20.000Z | 2019-08-30T18:22:23.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Check the state of an AWS AMI."""
from __future__ import annotations
import json
from typing import Any, Dict
import boto3
print("Loading function get_image_status")
ec2_client = boto3.client("ec2")
# {
# "instance_id": "i-identifier",
# "kms_id": "KMS ID",
# "account": "account_number",
# "instance_status": "should be there if in loop"
# "migrated_ami_id": "ami-identifier"
# }
def lambda_handler(event: Dict[str, Any], context: Any) -> str:
"""Handle signaling and entry into the AWS Lambda."""
print("Received event: " + json.dumps(event, indent=2))
migrated_ami_id: str = event["migrated_ami_id"]
ami_state: Dict[str, Any] = ec2_client.describe_images(ImageIds=[migrated_ami_id])
return ami_state["Images"][0]["State"]
| 24.666667 | 86 | 0.683047 |
f2095b25bea143e9b82c7fbfb9522beac7c96f69 | 344 | py | Python | starfish/types.py | kne42/starfish | 78b348c9756f367221dcca725cfa5107e5520b33 | [
"MIT"
] | null | null | null | starfish/types.py | kne42/starfish | 78b348c9756f367221dcca725cfa5107e5520b33 | [
"MIT"
] | null | null | null | starfish/types.py | kne42/starfish | 78b348c9756f367221dcca725cfa5107e5520b33 | [
"MIT"
] | null | null | null | # constants
from starfish.core.types import ( # noqa: F401
Axes,
Clip,
Coordinates,
CORE_DEPENDENCIES,
Features,
LOG,
OverlapStrategy,
PHYSICAL_COORDINATE_DIMENSION,
PhysicalCoordinateTypes,
STARFISH_EXTRAS_KEY,
TransformType,
)
from starfish.core.types import CoordinateValue, Number # noqa: F401
| 21.5 | 69 | 0.715116 |
f209fda8f0cfe43f72b6eb3a30447ef4d992f64f | 6,764 | py | Python | python/alertsActor/rules/dangerKey.py | sdss/twistedAlertsActor | 857588f6da39b7716263f8bd8e3f1be8bb4ce0f7 | [
"BSD-3-Clause"
] | null | null | null | python/alertsActor/rules/dangerKey.py | sdss/twistedAlertsActor | 857588f6da39b7716263f8bd8e3f1be8bb4ce0f7 | [
"BSD-3-Clause"
] | null | null | null | python/alertsActor/rules/dangerKey.py | sdss/twistedAlertsActor | 857588f6da39b7716263f8bd8e3f1be8bb4ce0f7 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
#
# dangerKey.py
#
# Created by John Donor on 10 April 2019
import re, time
from yaml import YAMLObject
from alertsActor import log
| 29.797357 | 95 | 0.563128 |
f20a036a9143b93d4e11c864b212d417d5d17645 | 22,382 | py | Python | jsonsubschema/old/_jsonschema.py | lukeenterprise/json-subschema | f273d62ed1517f5a83a57abf148232ed927a771a | [
"Apache-2.0"
] | 1 | 2019-08-01T15:28:26.000Z | 2019-08-01T15:28:26.000Z | jsonsubschema/old/_jsonschema.py | lukeenterprise/json-subschema | f273d62ed1517f5a83a57abf148232ed927a771a | [
"Apache-2.0"
] | null | null | null | jsonsubschema/old/_jsonschema.py | lukeenterprise/json-subschema | f273d62ed1517f5a83a57abf148232ed927a771a | [
"Apache-2.0"
] | null | null | null | '''
Created on June 24, 2019
@author: Andrew Habib
'''
import copy
import json
import sys
import math
import numbers
import intervals as I
from abc import ABC, abstractmethod
from greenery.lego import parse
from intervals import inf as infinity
import config
import _constants
from canoncalization import canoncalize_object
from _normalizer import lazy_normalize
from _utils import (
validate_schema,
print_db,
is_sub_interval_from_optional_ranges,
is_num,
is_list,
is_dict,
is_empty_dict_or_none,
is_dict_or_true,
one
)
def JSONNumericFactory(s):
if s.get("type") == "number":
if s.get("multipleOf") and float(s.get("multipleOf")).is_integer():
s["type"] = "integer"
if s.get("minimum") != None: # -I.inf:
s["minimum"] = math.floor(s.get("minimum")) if s.get(
"exclusiveMinimum") else math.ceil(s.get("minimum"))
if s.get("maximum") != None: # I.inf:
s["maximum"] = math.ceil(s.get("maximum")) if s.get(
"exclusiveMaximum") else math.floor(s.get("maximum"))
return JSONTypeInteger(s)
else:
return JSONTypeNumber(s)
else:
return JSONTypeInteger(s)
typeToConstructor = {
"string": JSONTypeString,
"integer": JSONNumericFactory,
"number": JSONNumericFactory,
"boolean": JSONTypeBoolean,
"null": JSONTypeNull,
"array": JSONTypeArray,
"object": JSONTypeObject
}
boolToConstructor = {
"anyOf": JSONanyOf,
"allOf": JSONallOf,
"oneOf": JSONoneOf,
"not": JSONnot
}
if __name__ == "__main__":
s1_file = sys.argv[1]
s2_file = sys.argv[2]
print("Loading json schemas from:\n{}\n{}\n".format(s1_file, s2_file))
#######################################
with open(s1_file, 'r') as f1:
s1 = json.load(f1, cls=JSONSchemaSubtypeFactory)
with open(s2_file, 'r') as f2:
s2 = json.load(f2, cls=JSONSchemaSubtypeFactory)
print(s1)
print(s2)
print("Usage scenario 1:", s1.isSubtype(s2))
#######################################
with open(s1_file, 'r') as f1:
s1 = json.load(f1)
with open(s2_file, 'r') as f2:
s2 = json.load(f2)
print(s1)
print(s2)
print("Usage scenario 2:", JSONSubtypeChecker(s1, s2).isSubtype()) | 33.556222 | 148 | 0.524752 |
f20a9c6a0a0f41308a9f256ea4ec3d2997af5cd5 | 6,388 | py | Python | eruditio/shared_apps/django_community/utils.py | genghisu/eruditio | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | [
"BSD-3-Clause",
"MIT"
] | null | null | null | eruditio/shared_apps/django_community/utils.py | genghisu/eruditio | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | [
"BSD-3-Clause",
"MIT"
] | null | null | null | eruditio/shared_apps/django_community/utils.py | genghisu/eruditio | 5f8f3b682ac28fd3f464e7a993c3988c1a49eb02 | [
"BSD-3-Clause",
"MIT"
] | null | null | null | """
Various utilities functions used by django_community and
other apps to perform authentication related tasks.
"""
import hashlib, re
import django.forms as forms
from django.core.exceptions import ObjectDoesNotExist
from django.forms import ValidationError
import django.http as http
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth import logout as auth_logout
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django_community.models import UserOpenID, UserProfile
def openid_logout(request):
"""
Clears session which effectively logs out the current
OpenId user.
"""
request.session.flush()
def handle_logout(request):
"""
Log out.
"""
auth_logout(request)
def get_logged_user(request):
"""
Returns the current user who is logged in, checks for openid user first,
then for regular user, return None if no user is currently logged in
"""
if settings.OPENID_ENABLED and hasattr(request, 'openid'):
user = UserOpenID.objects.get_for_openid(request, request.openid)
if not user:
user = request.user
return user
def handle_login(request, data):
"""
Logs the user in based on form data from django_community.LoginForm.
"""
user = authenticate(username = data.get('username', None),
password = data.get('password', None))
user_object = User.objects.get(username = data.get('username', None))
if user is not None:
login(request, user)
return user
def handle_signup(request, data):
"""
Signs a user up based on form data from django_community.SignupForm.
"""
from django.contrib.auth.models import get_hexdigest
username = data.get('username', None)
email = data.get('email', None)
password = data.get('password', None)
try:
user = User.objects.get(username = username, email = email)
except ObjectDoesNotExist:
user = User(username = username, email = email)
user.save()
user.set_password(password)
user_profile = UserProfile.objects.get_user_profile(user)
user = authenticate(username = username, password = password)
login(request, user)
return user
def get_or_create_from_openid(openid):
"""
Returns an User with the given openid or
creates a new user and associates openid with that user.
"""
try:
user = User.objects.get(username = openid)
except ObjectDoesNotExist:
password = hashlib.sha256(openid).hexdigest()
user = User(username = openid, email = '', password = password)
user.save()
user.display_name = "%s_%s" % ('user', str(user.id))
user.save()
return user
def generate_random_user_name():
"""
Generates a random user name user_{user_id}_{salt}
to be used for creating new users.
"""
import random
current_users = User.objects.all().order_by('-id')
if current_users:
next_id = current_users[0].id + 1
else:
next_id = 1
random_salt = random.randint(1, 5000)
return 'user_%s_%s' % (str(next_id), str(random_salt))
def create_user_from_openid(request, openid):
"""
Creates a new User object associated with the given
openid.
"""
from django_community.config import OPENID_FIELD_MAPPING
from django_utils.request_helpers import get_ip
username = generate_random_user_name()
profile_attributes = {}
for attribute in OPENID_FIELD_MAPPING.keys():
mapped_attribute = OPENID_FIELD_MAPPING[attribute]
if openid.sreg and openid.sreg.get(attribute, ''):
profile_attributes[mapped_attribute] = openid.sreg.get(attribute, '')
new_user = User(username = username)
new_user.save()
new_openid = UserOpenID(openid = openid.openid, user = new_user)
new_openid.save()
new_user_profile = UserProfile.objects.get_user_profile(new_user)
for filled_attribute in profile_attributes.keys():
setattr(new_user, filled_attribute, profile_attributes[filled_attribute])
new_user_profile.save()
return new_user
def get_anon_user(request):
"""
Returns an anonmymous user corresponding to this IP address if one exists.
Else create an anonymous user and return it.
"""
try:
anon_user = User.objects.get(username = generate_anon_user_name(request))
except ObjectDoesNotExist:
anon_user = create_anon_user(request)
return anon_user
def create_anon_user(request):
"""
Creates a new anonymous user based on the ip provided by the request
object.
"""
anon_user_name = generate_anon_user_name(request)
anon_user = User(username = anon_user_name)
anon_user.save()
user_profile = UserProfile(user = anon_user, display_name = 'anonymous')
user_profile.save()
return anon_user
def generate_anon_user_name(request):
"""
Generate an anonymous user name based on and ip address.
"""
from django_utils.request_helpers import get_ip
ip = get_ip(request)
return "anon_user_%s" % (str(ip))
def is_anon_user(user):
"""
Determine if an user is anonymous or not.
"""
return user.username[0:10] == 'anon_user_'
def is_random(name):
"""
Determine if a user has a randomly generated display name.
"""
if len(name.split('_')) and name.startswith('user'):
return True
else:
return False
def process_ax_data(user, ax_data):
"""
Process OpenID AX data.
"""
import django_openidconsumer.config
emails = ax_data.get(django_openidconsumer.config.URI_GROUPS.get('email').get('type_uri', ''), '')
display_names = ax_data.get(django_openidconsumer.config.URI_GROUPS.get('alias').get('type_uri', ''), '')
if emails and not user.email.strip():
user.email = emails[0]
user.save()
if not user.profile.display_name.strip() or is_random(user.profile.display_name):
if display_names:
user.profile.display_name = display_names[0]
elif emails:
user.profile.display_name = emails[0].split('@')[0]
user.profile.save() | 32.262626 | 109 | 0.681277 |
f20ad7ae21fec4c62f9a2ffdfad7aa4815cb96a9 | 1,909 | py | Python | launch/test_motion.launch.py | RoboJackets/robocup-software | ae2920b8b98213e625d0565dd67005e7a8595fac | [
"Apache-2.0"
] | 200 | 2015-01-26T01:45:34.000Z | 2022-03-19T13:05:31.000Z | launch/test_motion.launch.py | RoboJackets/robocup-software | ae2920b8b98213e625d0565dd67005e7a8595fac | [
"Apache-2.0"
] | 1,254 | 2015-01-03T01:57:35.000Z | 2022-03-16T06:32:21.000Z | launch/test_motion.launch.py | RoboJackets/robocup-software | ae2920b8b98213e625d0565dd67005e7a8595fac | [
"Apache-2.0"
] | 206 | 2015-01-21T02:03:18.000Z | 2022-02-01T17:57:46.000Z | import os
from pathlib import Path
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import Node
| 36.711538 | 85 | 0.677842 |
f20b33fbb1accee4936549d7e876ab92878ab6ba | 1,856 | py | Python | demo.py | nikp29/eDensiometer | e85a861c2faefb1911cf7b9cf10ee180afe85f13 | [
"FSFAP"
] | 2 | 2020-03-23T20:08:21.000Z | 2021-06-06T21:15:40.000Z | demo.py | nikp29/eDensiometer | e85a861c2faefb1911cf7b9cf10ee180afe85f13 | [
"FSFAP"
] | 1 | 2022-02-12T03:46:54.000Z | 2022-02-12T03:46:54.000Z | demo.py | nikp29/eDensiometer | e85a861c2faefb1911cf7b9cf10ee180afe85f13 | [
"FSFAP"
] | null | null | null | # A Rapid Proof of Concept for the eDensiometer
# Copyright 2018, Nikhil Patel. All Rights Reserved. Created with contributions from Billy Pierce.
# Imports
from PIL import Image
from pprint import pprint
import numpy as np
import time as time_
start = millis()
# Constants
# BRIGHT_CUTOFF = 175
RED_CUTOFF = 200
GREEN_CUTOFF = 150
BLUE_CUTOFF = 200
# Pull from test.jpg image in local directory
temp = np.asarray(Image.open('test.jpg'))
print(temp.shape)
# Variable Initialization
result = np.zeros((temp.shape[0], temp.shape[1], temp.shape[2]))
temp_bright = np.zeros((temp.shape[0], temp.shape[1]))
count_total = 0
count_open = 0
# Cycle through image
for row in range(0, temp.shape[0]):
for element in range(0, temp.shape[1]):
count_total += 1
temp_bright[row, element] = (int(temp[row][element][0]) + int(temp[row][element][1]) + int(temp[row][element][2]))/3
# bright = temp_bright[row][element] > BRIGHT_CUTOFF
red_enough = temp[row][element][0] > RED_CUTOFF
green_enough = temp[row][element][1] > GREEN_CUTOFF
blue_enough = temp[row][element][2] > BLUE_CUTOFF
if red_enough and green_enough and blue_enough:
# print(temp[row, element])
count_open += 1
result[row, element] = [255, 255, 255]
# Save filtered image as final.jpg
final = Image.fromarray(result.astype('uint8'), 'RGB')
final.save('final.jpg')
# Return/Print Percent Coverage
percent_open = count_open/count_total
percent_cover = 1 - percent_open
end = millis()
print("Percent Open: " + str(percent_open))
print("Percent Cover: " + str(percent_cover))
runtime = end-start
print("Runtime in MS: " + str(runtime)) | 30.933333 | 124 | 0.696659 |
f20c450c0dce05186c845a952d08081cb7846ab5 | 1,833 | py | Python | chart/script/provenance_ycsb_thruput.py | RUAN0007/nusthesis | 932367195171da2d1c82870cc5b96c0e760b4ca8 | [
"MIT"
] | null | null | null | chart/script/provenance_ycsb_thruput.py | RUAN0007/nusthesis | 932367195171da2d1c82870cc5b96c0e760b4ca8 | [
"MIT"
] | null | null | null | chart/script/provenance_ycsb_thruput.py | RUAN0007/nusthesis | 932367195171da2d1c82870cc5b96c0e760b4ca8 | [
"MIT"
] | null | null | null | import sys
import os
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import matplotlib as mpl
import config
if __name__ == "__main__":
sys.exit(main()) | 30.55 | 135 | 0.651391 |
f20d43c8664dcca2ef65c9dd2e88a696d94a4ea3 | 3,157 | py | Python | core/handlers/filters_chat.py | Smashulica/nebula8 | 010df165e3cc61e0154d20310fa972482ec0e7be | [
"Apache-2.0"
] | null | null | null | core/handlers/filters_chat.py | Smashulica/nebula8 | 010df165e3cc61e0154d20310fa972482ec0e7be | [
"Apache-2.0"
] | null | null | null | core/handlers/filters_chat.py | Smashulica/nebula8 | 010df165e3cc61e0154d20310fa972482ec0e7be | [
"Apache-2.0"
] | null | null | null | from core.utilities.functions import delete_message
from core.utilities.message import message
from core.database.repository.group import GroupRepository
"""
This function allows you to terminate the type
of file that contains a message on telegram and filter it
""" | 44.464789 | 105 | 0.631929 |
f20eb1617a65a8d8e7031e114930d28913b16142 | 4,287 | py | Python | mipsplusplus/parser.py | alexsocha/mipsplusplus | ee7f87605682fe0b219f754069bf11da80c0312a | [
"MIT"
] | 1 | 2021-01-04T09:35:50.000Z | 2021-01-04T09:35:50.000Z | mipsplusplus/parser.py | alexsocha/mipsplusplus | ee7f87605682fe0b219f754069bf11da80c0312a | [
"MIT"
] | null | null | null | mipsplusplus/parser.py | alexsocha/mipsplusplus | ee7f87605682fe0b219f754069bf11da80c0312a | [
"MIT"
] | null | null | null | from mipsplusplus import utils
from mipsplusplus import operations
OPERATOR_ORDERING = [
['addressof', 'not', 'neg'],
['*', '/', '%'],
['+', '-'],
['<<', '>>', '<<<', '>>>'],
['<', '>', '<=', '>='],
['==', '!='],
['and', 'or', 'xor', 'nor'],
['as']
]
EXPR_OPERATORS = set([op for ops in OPERATOR_ORDERING for op in ops] + ['(', ')'])
| 32.233083 | 106 | 0.585258 |
f20f24fc882a6bfe17d609d7a92bcf0cfdf1dd3a | 464 | py | Python | learn-to-code-with-python/10-Lists-Iteration/iterate-in-reverse-with-the-reversed-function.py | MaciejZurek/python_practicing | 0a426f2aed151573e1f8678e0239ff596d92bbde | [
"MIT"
] | null | null | null | learn-to-code-with-python/10-Lists-Iteration/iterate-in-reverse-with-the-reversed-function.py | MaciejZurek/python_practicing | 0a426f2aed151573e1f8678e0239ff596d92bbde | [
"MIT"
] | null | null | null | learn-to-code-with-python/10-Lists-Iteration/iterate-in-reverse-with-the-reversed-function.py | MaciejZurek/python_practicing | 0a426f2aed151573e1f8678e0239ff596d92bbde | [
"MIT"
] | null | null | null | the_simpsons = ["Homer", "Marge", "Bart", "Lisa", "Maggie"]
print(the_simpsons[::-1])
for char in the_simpsons[::-1]:
print(f"{char} has a total of {len(char)} characters.")
print(reversed(the_simpsons))
print(type(reversed(the_simpsons))) # generator object
for char in reversed(the_simpsons): # laduje za kazda iteracja jeden element listy, a nie cala liste od razu, dobre przy duzych listach
print(f"{char} has a total of {len(char)} characters.")
| 33.142857 | 135 | 0.709052 |
f20f3b3cdb095ea301a3efa6ea5c8c922e9be8db | 640 | py | Python | ghiaseddin/scripts/download-dataset-lfw10.py | yassersouri/ghiaseddin | a575f2375729e7586ae7c682f8505dbb7619e622 | [
"MIT"
] | 44 | 2016-09-07T11:04:10.000Z | 2022-03-14T07:38:17.000Z | ghiaseddin/scripts/download-dataset-lfw10.py | yassersouri/ghiaseddin | a575f2375729e7586ae7c682f8505dbb7619e622 | [
"MIT"
] | 1 | 2016-09-06T23:33:54.000Z | 2016-09-06T23:33:54.000Z | ghiaseddin/scripts/download-dataset-lfw10.py | yassersouri/ghiaseddin | a575f2375729e7586ae7c682f8505dbb7619e622 | [
"MIT"
] | 13 | 2016-09-17T15:31:06.000Z | 2021-05-22T07:28:46.000Z | from subprocess import call
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import settings
data_zip_path = os.path.join(settings.lfw10_root, "LFW10.zip")
data_url = "http://cvit.iiit.ac.in/images/Projects/relativeParts/LFW10.zip"
# Downloading the data zip and extracting it
call(["wget",
"--continue", # do not download things again
"--tries=0", # try many times to finish the download
"--output-document=%s" % data_zip_path, # save it to the appropriate place
data_url])
call(["unzip -d %s %s" % (settings.lfw10_root, data_zip_path)], shell=True)
| 33.684211 | 85 | 0.714063 |
f210443ae14873f6d0154e4872180eb345a39221 | 9,874 | py | Python | hops/dist_allreduce.py | Limmen/hops-util-py | 99263edcd052dbb554f0cde944fbdc748dc95f06 | [
"Apache-2.0"
] | null | null | null | hops/dist_allreduce.py | Limmen/hops-util-py | 99263edcd052dbb554f0cde944fbdc748dc95f06 | [
"Apache-2.0"
] | null | null | null | hops/dist_allreduce.py | Limmen/hops-util-py | 99263edcd052dbb554f0cde944fbdc748dc95f06 | [
"Apache-2.0"
] | null | null | null | """
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import pydoop.hdfs
import subprocess
import os
import stat
import sys
import threading
import time
import socket
from hops import hdfs as hopshdfs
from hops import tensorboard
from hops import devices
from hops import util
import coordination_server
run_id = 0
def launch(spark_session, notebook):
""" Run notebook pointed to in HopsFS as a python file in mpirun
Args:
:spark_session: SparkSession object
:notebook: The path in HopsFS to the notebook
"""
global run_id
print('\nStarting TensorFlow job, follow your progress on TensorBoard in Jupyter UI! \n')
sys.stdout.flush()
sc = spark_session.sparkContext
app_id = str(sc.applicationId)
conf_num = int(sc._conf.get("spark.executor.instances"))
#Each TF task should be run on 1 executor
nodeRDD = sc.parallelize(range(conf_num), conf_num)
server = coordination_server.Server(conf_num)
server_addr = server.start()
#Force execution on executor, since GPU is located on executor
nodeRDD.foreachPartition(prepare_func(app_id, run_id, notebook, server_addr))
print('Finished TensorFlow job \n')
print('Make sure to check /Logs/TensorFlow/' + app_id + '/runId.' + str(run_id) + ' for logfile and TensorBoard logdir')
def get_ip_address():
"""Simple utility to get host IP address"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
# The code generated by this function will be called in an eval, which changes the working_dir and cuda_visible_devices for process running mpirun | 36.435424 | 295 | 0.594896 |
f21153cc2731f7ee87405f5ca13164bed51c9656 | 713 | py | Python | api/migrations/versions/e956985ff509_.py | SnSation/Pokemart | c91dcd155ae3abe343781b3d26211d2463d41ff3 | [
"MIT"
] | null | null | null | api/migrations/versions/e956985ff509_.py | SnSation/Pokemart | c91dcd155ae3abe343781b3d26211d2463d41ff3 | [
"MIT"
] | null | null | null | api/migrations/versions/e956985ff509_.py | SnSation/Pokemart | c91dcd155ae3abe343781b3d26211d2463d41ff3 | [
"MIT"
] | null | null | null | """empty message
Revision ID: e956985ff509
Revises: 4b471bbc0004
Create Date: 2020-12-02 22:47:08.536332
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e956985ff509'
down_revision = '4b471bbc0004'
branch_labels = None
depends_on = None
| 24.586207 | 123 | 0.71108 |
f2125363bb0906c29b0070780b0f856daaf2354c | 926 | py | Python | tests/features/steps/ahk_steps.py | epth/ahk | 3a09830b10bf93d6dabda5f055665024570ff6c8 | [
"MIT"
] | 1 | 2021-02-16T14:16:58.000Z | 2021-02-16T14:16:58.000Z | tests/features/steps/ahk_steps.py | epth/ahk | 3a09830b10bf93d6dabda5f055665024570ff6c8 | [
"MIT"
] | null | null | null | tests/features/steps/ahk_steps.py | epth/ahk | 3a09830b10bf93d6dabda5f055665024570ff6c8 | [
"MIT"
] | null | null | null | from behave.matchers import RegexMatcher
from ahk import AHK
from behave_classy import step_impl_base
Base = step_impl_base()
AHKSteps().register() | 29.870968 | 86 | 0.596112 |
f2160ab0d4f01e332dfeaf82b6dd74a2d6cbaae5 | 1,703 | py | Python | snakewm/apps/games/pong/bat.py | sigmaister/snakeware_os | 4a821c2a0dc7762c4ab35053286f5e23125386d0 | [
"MIT"
] | 1,621 | 2020-05-29T06:49:27.000Z | 2022-03-15T08:20:08.000Z | snakewm/apps/games/pong/bat.py | sigmaister/snakeware_os | 4a821c2a0dc7762c4ab35053286f5e23125386d0 | [
"MIT"
] | 119 | 2020-05-29T17:10:45.000Z | 2021-12-19T23:43:17.000Z | snakewm/apps/games/pong/bat.py | sigmaister/snakeware_os | 4a821c2a0dc7762c4ab35053286f5e23125386d0 | [
"MIT"
] | 125 | 2020-05-29T07:43:22.000Z | 2022-03-18T22:13:08.000Z | import pygame
from pygame.locals import *
| 28.383333 | 88 | 0.570757 |
f21845fdd846667effc17afb28dfd50fe6d29229 | 3,527 | py | Python | mimic/model/rackspace_image_store.py | ksheedlo/mimic | c84b6a0d336e8a37a685b5d71537aec5e44d9a8f | [
"Apache-2.0"
] | 141 | 2015-01-07T19:28:31.000Z | 2022-02-11T06:04:13.000Z | mimic/model/rackspace_image_store.py | ksheedlo/mimic | c84b6a0d336e8a37a685b5d71537aec5e44d9a8f | [
"Apache-2.0"
] | 575 | 2015-01-04T20:23:08.000Z | 2019-10-04T08:20:04.000Z | mimic/model/rackspace_image_store.py | ksheedlo/mimic | c84b6a0d336e8a37a685b5d71537aec5e44d9a8f | [
"Apache-2.0"
] | 63 | 2015-01-09T20:39:41.000Z | 2020-07-06T14:20:56.000Z | """
An image store representing Rackspace specific images
"""
from __future__ import absolute_import, division, unicode_literals
import attr
from six import iteritems
from mimic.model.rackspace_images import (RackspaceWindowsImage,
RackspaceCentOSPVImage, RackspaceCentOSPVHMImage,
RackspaceCoreOSImage, RackspaceDebianImage,
RackspaceFedoraImage, RackspaceFreeBSDImage,
RackspaceGentooImage, RackspaceOpenSUSEImage,
RackspaceRedHatPVImage, RackspaceRedHatPVHMImage,
RackspaceUbuntuPVImage, RackspaceUbuntuPVHMImage,
RackspaceVyattaImage, RackspaceScientificImage,
RackspaceOnMetalCentOSImage, RackspaceOnMetalCoreOSImage,
RackspaceOnMetalDebianImage, RackspaceOnMetalFedoraImage,
RackspaceOnMetalUbuntuImage)
from mimic.model.rackspace_images import create_rackspace_images
| 48.986111 | 99 | 0.588602 |
f218482525c6f07411100d66a18c105ea0a2d6c8 | 926 | py | Python | samples/noxfile_config.py | ikuleshov/python-analytics-admin | f3d6fa78292878e7470806be0c116c6ca589eec5 | [
"Apache-2.0"
] | null | null | null | samples/noxfile_config.py | ikuleshov/python-analytics-admin | f3d6fa78292878e7470806be0c116c6ca589eec5 | [
"Apache-2.0"
] | null | null | null | samples/noxfile_config.py | ikuleshov/python-analytics-admin | f3d6fa78292878e7470806be0c116c6ca589eec5 | [
"Apache-2.0"
] | null | null | null | TEST_CONFIG_OVERRIDE = {
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
"gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
"envs": {
"GA_TEST_PROPERTY_ID": "276206997",
"GA_TEST_ACCOUNT_ID": "199820965",
"GA_TEST_USER_LINK_ID": "103401743041912607932",
"GA_TEST_PROPERTY_USER_LINK_ID": "105231969274497648555",
"GA_TEST_ANDROID_APP_DATA_STREAM_ID": "2828100949",
"GA_TEST_IOS_APP_DATA_STREAM_ID": "2828089289",
"GA_TEST_WEB_DATA_STREAM_ID": "2828068992",
},
}
| 46.3 | 70 | 0.712743 |
f219a6a5d2eba5cb99ad3a1f9f919f6e65e608c6 | 77 | py | Python | settings/channel_archiver/NIH.pressure_downstream_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
] | null | null | null | settings/channel_archiver/NIH.pressure_downstream_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
] | 1 | 2019-10-22T21:28:31.000Z | 2019-10-22T21:39:12.000Z | settings/channel_archiver/NIH.pressure_downstream_settings.py | bopopescu/Lauecollect | 60ae2b05ea8596ba0decf426e37aeaca0bc8b6be | [
"MIT"
] | 2 | 2019-06-06T15:06:46.000Z | 2020-07-20T02:03:22.000Z | filename = '//mx340hs/data/anfinrud_1903/Archive/NIH.pressure_downstream.txt' | 77 | 77 | 0.831169 |
f219b4c368dddd54cf8c1f93b4bad8299a4df851 | 17,181 | py | Python | sfepy/terms/terms_navier_stokes.py | vondrejc/sfepy | 8e427af699c4b2858eb096510057abb3ae7e28e8 | [
"BSD-3-Clause"
] | null | null | null | sfepy/terms/terms_navier_stokes.py | vondrejc/sfepy | 8e427af699c4b2858eb096510057abb3ae7e28e8 | [
"BSD-3-Clause"
] | null | null | null | sfepy/terms/terms_navier_stokes.py | vondrejc/sfepy | 8e427af699c4b2858eb096510057abb3ae7e28e8 | [
"BSD-3-Clause"
] | 2 | 2019-01-14T03:12:34.000Z | 2021-05-25T11:44:50.000Z | import numpy as nm
from sfepy.linalg import dot_sequences
from sfepy.terms.terms import Term, terms
from sfepy.terms.terms_diffusion import LaplaceTerm
| 28.635 | 79 | 0.521623 |
f21a51bd13a2f891e2303ec8e105009193f93ecb | 422 | py | Python | saleor/unurshop/crawler/migrations/0013_auto_20210921_0452.py | nlkhagva/saleor | 0d75807d08ac49afcc904733724ac870e8359c10 | [
"CC-BY-4.0"
] | null | null | null | saleor/unurshop/crawler/migrations/0013_auto_20210921_0452.py | nlkhagva/saleor | 0d75807d08ac49afcc904733724ac870e8359c10 | [
"CC-BY-4.0"
] | 1 | 2022-02-15T03:31:12.000Z | 2022-02-15T03:31:12.000Z | saleor/unurshop/crawler/migrations/0013_auto_20210921_0452.py | nlkhagva/ushop | abf637eb6f7224e2d65d62d72a0c15139c64bb39 | [
"CC-BY-4.0"
] | null | null | null | # Generated by Django 3.1.1 on 2021-09-21 04:52
from django.db import migrations, models
| 22.210526 | 80 | 0.618483 |
f21a701d87cd77a1cae7afc78f9f781cba559ff4 | 2,713 | py | Python | src/cli/examples/oss-fuzz-target.py | gdhuper/onefuzz | 6aca32ed9c8318aa81887eeaacff03c406c0e98c | [
"MIT"
] | 1 | 2020-10-27T08:05:57.000Z | 2020-10-27T08:05:57.000Z | src/cli/examples/oss-fuzz-target.py | gdhuper/onefuzz | 6aca32ed9c8318aa81887eeaacff03c406c0e98c | [
"MIT"
] | 1 | 2021-02-15T00:38:32.000Z | 2021-02-15T00:38:32.000Z | src/cli/examples/oss-fuzz-target.py | gdhuper/onefuzz | 6aca32ed9c8318aa81887eeaacff03c406c0e98c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import logging
import os
import sys
import tempfile
from subprocess import PIPE, CalledProcessError, check_call # nosec
from typing import List, Optional
from onefuzztypes.models import NotificationConfig
from onefuzztypes.primitives import PoolName
from onefuzz.api import Command, Onefuzz
from onefuzz.cli import execute_api
SANITIZERS = ["address", "dataflow", "memory", "undefined"]
if __name__ == "__main__":
sys.exit(main())
| 29.48913 | 87 | 0.557317 |
f21c03303c0e86780d94fa0daa72a6287b00df39 | 3,721 | py | Python | stubs/workspaces.py | claytonbrown/troposphere | bf0f1e48b14f578de0221d50f711467ad716ca87 | [
"BSD-2-Clause"
] | null | null | null | stubs/workspaces.py | claytonbrown/troposphere | bf0f1e48b14f578de0221d50f711467ad716ca87 | [
"BSD-2-Clause"
] | null | null | null | stubs/workspaces.py | claytonbrown/troposphere | bf0f1e48b14f578de0221d50f711467ad716ca87 | [
"BSD-2-Clause"
] | null | null | null | from . import AWSObject, AWSProperty
from .validators import *
from .constants import *
# -------------------------------------------
| 56.378788 | 228 | 0.685837 |
f21e91816629e68e43e8282cecfca50b522c0148 | 5,718 | py | Python | hystrix/command.py | grofers/hystrix-py | 9876b39980bc8dcb334fcb0ee8c15d6949112203 | [
"Apache-2.0"
] | 93 | 2015-01-29T10:10:49.000Z | 2021-12-05T08:45:04.000Z | hystrix/command.py | grofers/hystrix-py | 9876b39980bc8dcb334fcb0ee8c15d6949112203 | [
"Apache-2.0"
] | 1 | 2018-01-30T00:32:37.000Z | 2018-01-30T00:32:37.000Z | hystrix/command.py | grofers/hystrix-py | 9876b39980bc8dcb334fcb0ee8c15d6949112203 | [
"Apache-2.0"
] | 20 | 2015-09-18T02:04:24.000Z | 2020-03-25T10:31:07.000Z | """
Used to wrap code that will execute potentially risky functionality
(typically meaning a service call over the network) with fault and latency
tolerance, statistics and performance metrics capture, circuit breaker and
bulkhead functionality.
"""
from __future__ import absolute_import
import logging
import six
from hystrix.group import Group
from hystrix.command_metrics import CommandMetrics
from hystrix.command_properties import CommandProperties
log = logging.getLogger(__name__)
# TODO: Change this to an AbstractCommandMetaclass
# TODO: Change this to inherit from an AbstractCommand
| 38.635135 | 79 | 0.64603 |
f21f243b3b146cb9c4185deea25898637e21bb4c | 12,079 | py | Python | glue/viewers/table/qt/data_viewer.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
] | 550 | 2015-01-08T13:51:06.000Z | 2022-03-31T11:54:47.000Z | glue/viewers/table/qt/data_viewer.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
] | 1,362 | 2015-01-03T19:15:52.000Z | 2022-03-30T13:23:11.000Z | glue/viewers/table/qt/data_viewer.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
] | 142 | 2015-01-08T13:08:00.000Z | 2022-03-18T13:25:57.000Z | import os
from functools import lru_cache
import numpy as np
from qtpy.QtCore import Qt
from qtpy import QtCore, QtGui, QtWidgets
from matplotlib.colors import ColorConverter
from glue.utils.qt import get_qapp
from glue.config import viewer_tool
from glue.core import BaseData, Data
from glue.utils.qt import load_ui
from glue.viewers.common.qt.data_viewer import DataViewer
from glue.viewers.common.qt.toolbar import BasicToolbar
from glue.viewers.common.tool import CheckableTool
from glue.viewers.common.layer_artist import LayerArtist
from glue.core.subset import ElementSubsetState
from glue.utils.colors import alpha_blend_colors
from glue.utils.qt import mpl_to_qt_color, messagebox_on_error
from glue.core.exceptions import IncompatibleAttribute
from glue.viewers.table.compat import update_table_viewer_state
try:
import dask.array as da
DASK_INSTALLED = True
except ImportError:
DASK_INSTALLED = False
__all__ = ['TableViewer', 'TableLayerArtist']
COLOR_CONVERTER = ColorConverter()
def get_layer_artist(self, cls, layer=None, layer_state=None):
return cls(self, self.state, layer=layer, layer_state=layer_state)
| 35.215743 | 92 | 0.637304 |
f21fb8769d6f7f12c55b09713729ab92490aa213 | 1,781 | py | Python | azure-servicefabric/azure/servicefabric/models/restore_partition_description.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-servicefabric/azure/servicefabric/models/restore_partition_description.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-servicefabric/azure/servicefabric/models/restore_partition_description.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2018-10-16T13:08:23.000Z | 2018-10-16T13:08:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
| 37.893617 | 87 | 0.638967 |
f2222e7a4067aa3e2de0115ba3b31e143ef1fc7b | 5,438 | py | Python | psq/queue.py | Tomesco/bookshelf-demo-project | 9d422f3aa04edbb3312d3e177caf699653ed6a73 | [
"Apache-2.0"
] | 210 | 2015-07-29T16:50:01.000Z | 2022-03-02T15:24:52.000Z | psq/queue.py | Tomesco/bookshelf-demo-project | 9d422f3aa04edbb3312d3e177caf699653ed6a73 | [
"Apache-2.0"
] | 60 | 2015-12-03T23:15:57.000Z | 2021-01-21T09:25:42.000Z | psq/queue.py | Tomesco/bookshelf-demo-project | 9d422f3aa04edbb3312d3e177caf699653ed6a73 | [
"Apache-2.0"
] | 47 | 2015-12-21T06:09:36.000Z | 2021-09-04T13:20:21.000Z | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from contextlib import contextmanager
import functools
import logging
from uuid import uuid4
import google.cloud.exceptions
from .globals import queue_context
from .storage import Storage
from .task import Task, TaskResult
from .utils import dumps, measure_time, unpickle, UnpickleError
logger = logging.getLogger(__name__)
PUBSUB_OBJECT_PREFIX = 'psq'
| 34.417722 | 79 | 0.658698 |
f22234640a48085e7e67ec5bc155d8fda74563b6 | 2,453 | py | Python | mac/google-cloud-sdk/lib/surface/access_context_manager/levels/update.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | null | null | null | mac/google-cloud-sdk/lib/surface/access_context_manager/levels/update.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | 4 | 2020-07-21T12:51:46.000Z | 2022-01-22T10:29:25.000Z | mac/google-cloud-sdk/lib/surface/access_context_manager/levels/update.py | bopopescu/cndw | ee432efef88a4351b355f3d6d5350defc7f4246b | [
"Apache-2.0"
] | 1 | 2020-07-25T18:17:57.000Z | 2020-07-25T18:17:57.000Z | # -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud access-context-manager levels update` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.accesscontextmanager import levels as levels_api
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.accesscontextmanager import levels
from googlecloudsdk.command_lib.accesscontextmanager import policies
| 32.706667 | 76 | 0.772523 |
f2235aff62f649f7be3dedbcbc6809a427c1c2ca | 775 | py | Python | convert.py | lordcodingsound/autodj | dc43c8a8bd07006d02a5a7d5d2ae74d2eb9bf685 | [
"MIT"
] | null | null | null | convert.py | lordcodingsound/autodj | dc43c8a8bd07006d02a5a7d5d2ae74d2eb9bf685 | [
"MIT"
] | null | null | null | convert.py | lordcodingsound/autodj | dc43c8a8bd07006d02a5a7d5d2ae74d2eb9bf685 | [
"MIT"
] | null | null | null | import wave
import struct
import subprocess
import os
import opusenc
import base64
import zlib
import sys
tmp = sys.argv[1] + ".wav"
subprocess.Popen(["ffmpeg", "-i", sys.argv[1], "-ar", "48000", "-ac", "2", "-y", tmp], stdout=subprocess.PIPE, stderr=subprocess.PIPE).wait()
f = open(sys.argv[2], "wb")
e = zlib.compressobj(9)
c = 0
b = ""
opusenc.initialize(256000)
wf = wave.open(tmp)
while True:
rc = wf.readframes(480)
if len(rc) != 1920:
break
opus = opusenc.encode(rc)
b += base64.b64encode(opus).decode("utf-8") + "\n"
c += 1
if c >= 100:
c = 0
f.write(e.compress(b.encode()) + e.flush(zlib.Z_SYNC_FLUSH))
b = ""
f.write(e.compress(b.encode()) + e.flush(zlib.Z_SYNC_FLUSH))
f.close()
wf.close()
os.remove(tmp)
| 20.394737 | 142 | 0.616774 |
f2243459193cb30ca1ec87a1cec0d50174acfaea | 170 | py | Python | polliwog/tri/__init__.py | lace/polliwog | 7744ce171738e4739e391fcff4f4689d9f177196 | [
"BSD-2-Clause"
] | 18 | 2019-05-03T02:08:12.000Z | 2022-03-24T11:49:59.000Z | polliwog/tri/__init__.py | lace/polliwog | 7744ce171738e4739e391fcff4f4689d9f177196 | [
"BSD-2-Clause"
] | 76 | 2019-04-03T15:24:01.000Z | 2022-03-01T14:07:04.000Z | polliwog/tri/__init__.py | lace/polliwog | 7744ce171738e4739e391fcff4f4689d9f177196 | [
"BSD-2-Clause"
] | 3 | 2019-11-04T16:22:07.000Z | 2022-03-09T08:50:52.000Z | from . import functions as _functions
from .functions import * # noqa: F401,F403
from .quad_faces import quads_to_tris
__all__ = _functions.__all__ + ["quads_to_tris"]
| 28.333333 | 48 | 0.776471 |
f224a1293fdc148ee28c3d5f42f88c489aa0c477 | 10,961 | py | Python | packages/pegasus-api/src/Pegasus/api/replica_catalog.py | spxiwh/pegasus | ebe3e205ae34c1721c540465712da557979c7437 | [
"Apache-2.0"
] | null | null | null | packages/pegasus-api/src/Pegasus/api/replica_catalog.py | spxiwh/pegasus | ebe3e205ae34c1721c540465712da557979c7437 | [
"Apache-2.0"
] | null | null | null | packages/pegasus-api/src/Pegasus/api/replica_catalog.py | spxiwh/pegasus | ebe3e205ae34c1721c540465712da557979c7437 | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict
from pathlib import Path
from typing import Dict, Optional, Set, Union
from ._utils import _chained
from .errors import DuplicateError
from .mixins import MetadataMixin
from .writable import Writable, _filter_out_nones
PEGASUS_VERSION = "5.0"
__all__ = ["File", "ReplicaCatalog"]
| 35.244373 | 186 | 0.560624 |
f225855419247f9e8048a49c1d9c71b3af0a2082 | 4,513 | py | Python | words.py | ashutoshkrris/Terminal-Wordle | edafc99a6adb12824495e53dd0c5be6dc89b8839 | [
"MIT"
] | null | null | null | words.py | ashutoshkrris/Terminal-Wordle | edafc99a6adb12824495e53dd0c5be6dc89b8839 | [
"MIT"
] | null | null | null | words.py | ashutoshkrris/Terminal-Wordle | edafc99a6adb12824495e53dd0c5be6dc89b8839 | [
"MIT"
] | null | null | null | word_list = ['ABOUT', 'ABOVE', 'ABUSE', 'ACTOR', 'ACUTE', 'ADMIT', 'ADOPT', 'ADULT', 'AFTER', 'AGAIN', 'AGENT', 'AGREE', 'AHEAD', 'ALARM', 'ALBUM', 'ALERT', 'ALIKE', 'ALIVE', 'ALLOW', 'ALONE', 'ALONG', 'ALTER', 'AMONG', 'ANGER', 'ANGLE', 'ANGRY', 'APART', 'APPLE', 'APPLY', 'ARENA', 'ARGUE', 'ARISE', 'ARRAY', 'ASIDE', 'ASSET', 'AUDIO', 'AUDIT', 'AVOID', 'AWARD', 'AWARE', 'BADLY', 'BAKER', 'BASES', 'BASIC', 'BASIS', 'BEACH', 'BEGAN', 'BEGIN', 'BEGUN', 'BEING', 'BELOW', 'BENCH', 'BILLY', 'BIRTH', 'BLACK', 'BLAME', 'BLIND', 'BLOCK', 'BLOOD', 'BOARD', 'BOOST', 'BOOTH', 'BOUND', 'BRAIN', 'BRAND', 'BREAD', 'BREAK', 'BREED', 'BRIEF', 'BRING', 'BROAD', 'BROKE', 'BROWN', 'BUILD', 'BUILT', 'BUYER', 'CABLE', 'CALIF', 'CARRY', 'CATCH', 'CAUSE', 'CHAIN', 'CHAIR', 'CHART', 'CHASE', 'CHEAP', 'CHECK', 'CHEST', 'CHIEF', 'CHILD', 'CHINA', 'CHOSE', 'CIVIL', 'CLAIM', 'CLASS', 'CLEAN', 'CLEAR', 'CLICK', 'CLOCK', 'CLOSE', 'COACH', 'COAST', 'COULD', 'COUNT', 'COURT', 'COVER', 'CRAFT', 'CRASH', 'CREAM', 'CRIME', 'CROSS', 'CROWD', 'CROWN', 'CURVE', 'CYCLE', 'DAILY', 'DANCE', 'DATED', 'DEALT', 'DEATH', 'DEBUT', 'DELAY', 'DEPTH', 'DOING', 'DOUBT', 'DOZEN', 'DRAFT', 'DRAMA', 'DRAWN', 'DREAM', 'DRESS', 'DRILL', 'DRINK', 'DRIVE', 'DROVE', 'DYING', 'EAGER', 'EARLY', 'EARTH', 'EIGHT', 'ELITE', 'EMPTY', 'ENEMY', 'ENJOY', 'ENTER', 'ENTRY', 'EQUAL', 'ERROR', 'EVENT', 'EVERY', 'EXACT', 'EXIST', 'EXTRA', 'FAITH', 'FALSE', 'FAULT', 'FIBER', 'FIELD', 'FIFTH', 'FIFTY', 'FIGHT', 'FINAL', 'FIRST', 'FIXED', 'FLASH', 'FLEET', 'FLOOR', 'FLUID', 'FOCUS', 'FORCE', 'FORTH', 'FORTY', 'FORUM', 'FOUND', 'FRAME', 'FRANK', 'FRAUD', 'FRESH', 'FRONT', 'FRUIT', 'FULLY', 'FUNNY', 'GIANT', 'GIVEN', 'GLASS', 'GLOBE', 'GOING', 'GRACE', 'GRADE', 'GRAND', 'GRANT', 'GRASS', 'GREAT', 'GREEN', 'GROSS', 'GROUP', 'GROWN', 'GUARD', 'GUESS', 'GUEST', 'GUIDE', 'HAPPY', 'HARRY', 'HEART', 'HEAVY', 'HENCE', 'HENRY', 'HORSE', 'HOTEL', 'HOUSE', 'HUMAN', 'IDEAL', 'IMAGE', 'INDEX', 'INNER', 'INPUT', 'ISSUE', 'JAPAN', 'JIMMY', 'JOINT', 'JONES', 'JUDGE', 'KNOWN', 'LABEL', 'LARGE', 'LASER', 'LATER', 'LAUGH', 'LAYER', 'LEARN', 'LEASE', 'LEAST', 'LEAVE', 'LEGAL', 'LEVEL', 'LEWIS', 'LIGHT', 'LIMIT', 'LINKS', 'LIVES', 'LOCAL', 'LOGIC', 'LOOSE', 'LOWER', 'LUCKY', 'LUNCH', 'LYING', 'MAGIC', 'MAJOR', 'MAKER', 'MARCH', 'MARIA', 'MATCH', 'MAYBE', 'MAYOR', 'MEANT', 'MEDIA', 'METAL', 'MIGHT', 'MINOR', 'MINUS', 'MIXED', 'MODEL', 'MONEY', 'MONTH', 'MORAL', 'MOTOR', 'MOUNT', 'MOUSE', 'MOUTH', 'MOVIE', 'MUSIC', 'NEEDS', 'NEVER', 'NEWLY', 'NIGHT', 'NOISE', 'NORTH', 'NOTED', 'NOVEL', 'NURSE', 'OCCUR', 'OCEAN', 'OFFER', 'OFTEN', 'ORDER', 'OTHER', 'OUGHT', 'PAINT', 'PANEL', 'PAPER', 'PARTY', 'PEACE', 'PETER', 'PHASE', 'PHONE', 'PHOTO', 'PIECE', 'PILOT', 'PITCH', 'PLACE', 'PLAIN', 'PLANE', 'PLANT', 'PLATE', 'POINT', 'POUND', 'POWER', 'PRESS', 'PRICE', 'PRIDE', 'PRIME', 'PRINT', 'PRIOR', 'PRIZE', 'PROOF', 'PROUD', 'PROVE', 'QUEEN', 'QUICK', 'QUIET', 'QUITE', 'RADIO', 'RAISE', 'RANGE', 'RAPID', 'RATIO', 'REACH', 'READY', 'REFER', 'RIGHT', 'RIVAL', 'RIVER', 'ROBIN', 'ROGER', 'ROMAN', 'ROUGH', 'ROUND', 'ROUTE', 'ROYAL', 'RURAL', 'SCALE', 'SCENE', 'SCOPE', 'SCORE', 'SENSE', 'SERVE', 'SEVEN', 'SHALL', 'SHAPE', 'SHARE', 'SHARP', 'SHEET', 'SHELF', 'SHELL', 'SHIFT', 'SHIRT', 'SHOCK', 'SHOOT', 'SHORT', 'SHOWN', 'SIGHT', 'SINCE', 'SIXTH', 'SIXTY', 'SIZED', 'SKILL', 'SLEEP', 'SLIDE', 'SMALL', 'SMART', 'SMILE', 'SMITH', 'SMOKE', 'SOLID', 'SOLVE', 'SORRY', 'SOUND', 'SOUTH', 'SPACE', 'SPARE', 'SPEAK', 'SPEED', 'SPEND', 'SPENT', 'SPLIT', 'SPOKE', 'SPORT', 'STAFF', 'STAGE', 'STAKE', 'STAND', 'START', 'STATE', 'STEAM', 'STEEL', 'STICK', 'STILL', 'STOCK', 'STONE', 'STOOD', 'STORE', 'STORM', 'STORY', 'STRIP', 'STUCK', 'STUDY', 'STUFF', 'STYLE', 'SUGAR', 'SUITE', 'SUPER', 'SWEET', 'TABLE', 'TAKEN', 'TASTE', 'TAXES', 'TEACH', 'TEETH', 'TERRY', 'TEXAS', 'THANK', 'THEFT', 'THEIR', 'THEME', 'THERE', 'THESE', 'THICK', 'THING', 'THINK', 'THIRD', 'THOSE', 'THREE', 'THREW', 'THROW', 'TIGHT', 'TIMES', 'TIRED', 'TITLE', 'TODAY', 'TOPIC', 'TOTAL', 'TOUCH', 'TOUGH', 'TOWER', 'TRACK', 'TRADE', 'TRAIN', 'TREAT', 'TREND', 'TRIAL', 'TRIED', 'TRIES', 'TRUCK', 'TRULY', 'TRUST', 'TRUTH', 'TWICE', 'UNDER', 'UNDUE', 'UNION', 'UNITY', 'UNTIL', 'UPPER', 'UPSET', 'URBAN', 'USAGE', 'USUAL', 'VALID', 'VALUE', 'VIDEO', 'VIRUS', 'VISIT', 'VITAL', 'VOICE', 'WASTE', 'WATCH', 'WATER', 'WHEEL', 'WHERE', 'WHICH', 'WHILE', 'WHITE', 'WHOLE', 'WHOSE', 'WOMAN', 'WOMEN', 'WORLD', 'WORRY', 'WORSE', 'WORST', 'WORTH', 'WOULD', 'WOUND', 'WRITE', 'WRONG', 'WROTE', 'YIELD', 'YOUNG', 'YOUTH']
| 2,256.5 | 4,512 | 0.555949 |
f225fb07e45aa0d878182c25737de8508c76dbb0 | 6,759 | py | Python | importanize/groups.py | xiachufang/importanize | 594e33b7827a9619c15aaacbe03b8cdf42a5c7a0 | [
"MIT"
] | null | null | null | importanize/groups.py | xiachufang/importanize | 594e33b7827a9619c15aaacbe03b8cdf42a5c7a0 | [
"MIT"
] | null | null | null | importanize/groups.py | xiachufang/importanize | 594e33b7827a9619c15aaacbe03b8cdf42a5c7a0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import itertools
import operator
from collections import OrderedDict, defaultdict
from functools import reduce
import six
from .formatters import DEFAULT_FORMATTER, DEFAULT_LENGTH
from .utils import is_site_package, is_std_lib
# -- RemainderGroup goes last and catches everything left over
GROUP_MAPPING = OrderedDict(
(
("stdlib", StdLibGroup),
("sitepackages", SitePackagesGroup),
("packages", PackagesGroup),
("local", LocalGroup),
("remainder", RemainderGroup),
)
)
| 28.884615 | 80 | 0.557627 |
f22667e27e25306a81ed4197a3f3283e37b3daea | 846 | py | Python | NLP4CCB/migrations/0005_auto_20170415_2236.py | rossmechanic/know_your_nyms | 805ca845121fa93a38088f09cd0a430ddb9f95cf | [
"BSD-3-Clause"
] | 1 | 2020-02-12T13:24:15.000Z | 2020-02-12T13:24:15.000Z | NLP4CCB/migrations/0005_auto_20170415_2236.py | rossmechanic/know_your_nyms | 805ca845121fa93a38088f09cd0a430ddb9f95cf | [
"BSD-3-Clause"
] | null | null | null | NLP4CCB/migrations/0005_auto_20170415_2236.py | rossmechanic/know_your_nyms | 805ca845121fa93a38088f09cd0a430ddb9f95cf | [
"BSD-3-Clause"
] | 1 | 2017-10-25T11:24:51.000Z | 2017-10-25T11:24:51.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-04-15 22:36
from django.db import migrations, models
| 26.4375 | 78 | 0.582742 |
f226b3e74e0c07da106f197b5ad2bd3632fb47b8 | 2,198 | py | Python | synchCams/start_server.py | ateshkoul/synchCams | 3f73cf593e27d57b72f65d453d13cc535646e86d | [
"MIT"
] | null | null | null | synchCams/start_server.py | ateshkoul/synchCams | 3f73cf593e27d57b72f65d453d13cc535646e86d | [
"MIT"
] | null | null | null | synchCams/start_server.py | ateshkoul/synchCams | 3f73cf593e27d57b72f65d453d13cc535646e86d | [
"MIT"
] | null | null | null | import socket
import json
import pdb
import copy
# def server_read(host='',port=30):
# # host = '' # Symbolic name meaning all available interfaces
# # port = 30 # Arbitrary non-privileged port
# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# s.bind((host, port))
# print(host , port)
# s.listen(1)
# conn, addr = s.accept()
# print('Connected by', addr)
# return_data = {}
# while True:
# try:
# in_data = conn.recv(1024)
# # pdb.set_trace()
# if in_data: return_data = copy.deepcopy(in_data)
# if not in_data: break
# print("Client Says: "+return_data.decode("utf-8"))
# conn.sendall(b"Server Says:hi")
# except socket.error:
# print("Error Occured.")
# break
# conn.close()
# return(bytes_to_dict(return_data))
# # return(return_data) | 27.475 | 73 | 0.556415 |
f226f9e28b1182a033e88cc3340054c8eee83b4e | 2,243 | py | Python | 9.part2.py | elp2/advent_of_code_2020 | 71e12e25769aa7d5154213077ffae595ad9a4019 | [
"Apache-2.0"
] | 1 | 2021-12-02T15:19:36.000Z | 2021-12-02T15:19:36.000Z | 2020/9.part2.py | elp2/advent_of_code | 600e2db9a7d5b576937c9b39c5c6805db406f57b | [
"Apache-2.0"
] | null | null | null | 2020/9.part2.py | elp2/advent_of_code | 600e2db9a7d5b576937c9b39c5c6805db406f57b | [
"Apache-2.0"
] | null | null | null | from collections import defaultdict
CHALLENGE_DAY = "9"
REAL = open(CHALLENGE_DAY + ".txt").read()
assert len(REAL) > 1
SAMPLE = open(CHALLENGE_DAY + ".sample.txt").read()
SAMPLE_EXPECTED = 127
# SAMPLE_EXPECTED =
test_parsing(parse_lines(SAMPLE))
print("^^^^^^^^^PARSED SAMPLE SAMPLE^^^^^^^^^")
# sample = solve(SAMPLE)
# if SAMPLE_EXPECTED is None:
# print("*** SKIPPING SAMPLE! ***")
# else:
# assert sample == SAMPLE_EXPECTED
# print("*** SAMPLE PASSED ***")
solved = solve(REAL)
print("SOLUTION: ", solved)
# assert solved
| 25.202247 | 82 | 0.543914 |
f227f1a6050eb38656085af87d1b77f4623a92c4 | 3,091 | py | Python | exif_address_finder/ExifAddressFinderManager.py | jonathanlurie/ExifAddressFinder | ddb3e526040a80534f2f72246f1b9f96c9c5d0b0 | [
"MIT"
] | null | null | null | exif_address_finder/ExifAddressFinderManager.py | jonathanlurie/ExifAddressFinder | ddb3e526040a80534f2f72246f1b9f96c9c5d0b0 | [
"MIT"
] | null | null | null | exif_address_finder/ExifAddressFinderManager.py | jonathanlurie/ExifAddressFinder | ddb3e526040a80534f2f72246f1b9f96c9c5d0b0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
'''
Author : Jonathan Lurie
Email : lurie.jo@gmail.com
Version : 0.1
Licence : MIT
description : The entry point to the library.
'''
import GeoToolbox
import exifread
import piexif
from IFD_KEYS_REFERENCE import *
import exifWriter
import os
| 30.60396 | 136 | 0.620835 |
1ee6c5cf51fc01113d2c8df3b5c4886a89607d63 | 1,402 | py | Python | src/anaplan_api/Model.py | jeswils-ap/anaplan-api | e08ea75828a60e96024d596b2f30184c18fa31d3 | [
"BSD-2-Clause"
] | 2 | 2021-09-23T08:49:40.000Z | 2022-03-28T08:40:02.000Z | src/anaplan_api/Model.py | jeswils-ap/anaplan-api | e08ea75828a60e96024d596b2f30184c18fa31d3 | [
"BSD-2-Clause"
] | 3 | 2021-11-06T09:58:03.000Z | 2021-11-11T14:00:40.000Z | src/anaplan_api/Model.py | jeswils-ap/anaplan-api | e08ea75828a60e96024d596b2f30184c18fa31d3 | [
"BSD-2-Clause"
] | 1 | 2022-02-13T15:59:42.000Z | 2022-02-13T15:59:42.000Z | import json
import logging
import requests
from typing import List
from requests.exceptions import HTTPError, ConnectionError, SSLError, Timeout, ConnectTimeout, ReadTimeout
from .User import User
from .ModelDetails import ModelDetails
logger = logging.getLogger(__name__)
| 32.604651 | 106 | 0.733951 |
1ee821fc6ac5eced71be163ce2b2c80f9de72828 | 1,757 | py | Python | reproduction/Summarization/BertSum/model.py | KuNyaa/fastNLP | 22f9b87c54a4eebec7352c7ff772cd24685c7186 | [
"Apache-2.0"
] | 1 | 2019-10-05T06:02:44.000Z | 2019-10-05T06:02:44.000Z | reproduction/Summarization/BertSum/model.py | awesomemachinelearning/fastNLP | 945b30bb6174751130744231aa26119bf9bb2601 | [
"Apache-2.0"
] | 1 | 2019-12-09T06:34:44.000Z | 2019-12-09T06:34:44.000Z | reproduction/Summarization/BertSum/model.py | awesomemachinelearning/fastNLP | 945b30bb6174751130744231aa26119bf9bb2601 | [
"Apache-2.0"
] | 2 | 2020-04-21T06:17:59.000Z | 2020-05-05T11:22:11.000Z | import torch
from torch import nn
from torch.nn import init
from fastNLP.modules.encoder.bert import BertModel
| 33.788462 | 122 | 0.647126 |
1ee8df8ae43bb7100b118b6ba8aa926ea9cbaa1d | 2,226 | py | Python | p4p2p/dht/constants.py | ntoll/p4p2p | 189a35ae964bef7e6db094283f3ead79c6356a6c | [
"MIT"
] | 8 | 2015-03-08T11:02:58.000Z | 2020-04-20T15:36:24.000Z | p4p2p/dht/constants.py | ntoll/p4p2p | 189a35ae964bef7e6db094283f3ead79c6356a6c | [
"MIT"
] | null | null | null | p4p2p/dht/constants.py | ntoll/p4p2p | 189a35ae964bef7e6db094283f3ead79c6356a6c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Defines constants used by P4P2P. Usually these are based upon concepts from
the Kademlia DHT and where possible naming is derived from the original
Kademlia paper as are the suggested default values.
"""
#: Represents the degree of parallelism in network calls.
ALPHA = 3
#: The maximum number of contacts stored in a bucket. Must be an even number.
K = 20
#: The default maximum time a NodeLookup is allowed to take (in seconds).
LOOKUP_TIMEOUT = 600
#: The timeout for network connections (in seconds).
RPC_TIMEOUT = 5
#: The timeout for receiving complete message once a connection is made (in
#: seconds). Ensures there are no stale deferreds in the node's _pending
#: dictionary.
RESPONSE_TIMEOUT = 1800 # half an hour
#: How long to wait before an unused bucket is refreshed (in seconds).
REFRESH_TIMEOUT = 3600 # 1 hour
#: How long to wait before a node replicates any data it stores (in seconds).
REPLICATE_INTERVAL = REFRESH_TIMEOUT
#: How long to wait before a node checks whether any buckets need refreshing or
#: data needs republishing (in seconds).
REFRESH_INTERVAL = int(REFRESH_TIMEOUT / 6) # Every 10 minutes.
#: The number of failed remote procedure calls allowed for a peer node. If this
#: is equalled or exceeded then the contact is removed from the routing table.
ALLOWED_RPC_FAILS = 5
#: The number of nodes to attempt to use to store a value in the network.
DUPLICATION_COUNT = K
#: The duration (in seconds) that is added to a value's creation time in order
#: to work out its expiry timestamp. -1 denotes no expiry point.
EXPIRY_DURATION = -1
#: Defines the errors that can be reported between nodes in the network.
ERRORS = {
# The message simply didn't make any sense.
1: 'Bad message',
# The message was parsed but not recognised.
2: 'Unknown message type',
# The message was parsed and recognised but the node encountered a problem
# when dealing with it.
3: 'Internal error',
# The message was too big for the node to handle.
4: 'Message too big',
# Unsupported version of the protocol.
5: 'Unsupported protocol',
# The message could not be cryptographically verified.
6: 'Unverifiable provenance'
}
| 35.903226 | 79 | 0.737646 |
1eea6f3a21c71f86ef8549a937b8ac0d9d222692 | 600 | py | Python | turtle-crossing/car_manager.py | twbm/Git-Learning-Thingy | 7dce0d4f1329df911e1e7008800f843217a5a9a2 | [
"MIT"
] | 1 | 2022-03-20T17:00:32.000Z | 2022-03-20T17:00:32.000Z | turtle-crossing/car_manager.py | Theodor45/Projects | e311e4a3ae047d6d01d24b3b868ee05ac595f391 | [
"MIT"
] | null | null | null | turtle-crossing/car_manager.py | Theodor45/Projects | e311e4a3ae047d6d01d24b3b868ee05ac595f391 | [
"MIT"
] | null | null | null | from turtle import Turtle
import random
COLORS = ["red", "orange", "yellow", "green", "blue", "purple"]
STARTING_MOVE_DISTANCE = 5
MOVE_INCREMENT = 10
| 22.222222 | 63 | 0.598333 |
1eec0296b555ebcc98cbc7b360b616946e53db82 | 941 | py | Python | manabi/apps/flashcards/permissions.py | aehlke/manabi | 1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b | [
"MIT"
] | 14 | 2015-10-03T07:34:28.000Z | 2021-09-20T07:10:29.000Z | manabi/apps/flashcards/permissions.py | aehlke/manabi | 1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b | [
"MIT"
] | 23 | 2019-10-25T08:47:23.000Z | 2022-01-30T02:00:45.000Z | manabi/apps/flashcards/permissions.py | aehlke/manabi | 1dfdd4ecb9c1214b6a70268be0dcfeda9da8754b | [
"MIT"
] | 7 | 2016-10-04T08:10:36.000Z | 2021-09-20T07:10:33.000Z | from django.shortcuts import get_object_or_404
from rest_framework import permissions
from manabi.apps.flashcards.models import Deck
WRITE_ACTIONS = ['create', 'update', 'partial_update', 'delete']
| 30.354839 | 75 | 0.679065 |
1eef8337b3089adedce496c555766805e7a14c76 | 365 | py | Python | Scripts/create_phone_number.py | yogeshwaran01/Mini-Projects | c1a8790079d904405d49c71d6903ca4daaa77b38 | [
"MIT"
] | 4 | 2020-09-30T17:18:13.000Z | 2021-06-11T21:02:10.000Z | Scripts/create_phone_number.py | yogeshwaran01/Mini-Projects | c1a8790079d904405d49c71d6903ca4daaa77b38 | [
"MIT"
] | null | null | null | Scripts/create_phone_number.py | yogeshwaran01/Mini-Projects | c1a8790079d904405d49c71d6903ca4daaa77b38 | [
"MIT"
] | 1 | 2021-04-02T14:51:00.000Z | 2021-04-02T14:51:00.000Z | """
Function convert lists of 10 elements
into in the format of phone number
Example,
(123) 456-789
"""
def create_phone_number(n: list) -> str:
"""
>>> create_phone_number([1,2,3,4,5,6,7,8,9,0])
'(123) 456-7890'
"""
return "({}{}{}) {}{}{}-{}{}{}{}".format(*n)
if __name__ == "__main__":
import doctest
doctest.testmod()
| 15.869565 | 50 | 0.556164 |
1eefb7ddb845bc64282cda5039ab52bf01d96b1d | 1,539 | py | Python | pointscan/scan.py | gtfierro/point_label_sharing | add0db472ec0bade566c3c1bf9428786c759d980 | [
"BSD-3-Clause"
] | 5 | 2019-08-19T10:16:49.000Z | 2021-12-19T17:18:18.000Z | pointscan/scan.py | gtfierro/point_label_sharing | add0db472ec0bade566c3c1bf9428786c759d980 | [
"BSD-3-Clause"
] | null | null | null | pointscan/scan.py | gtfierro/point_label_sharing | add0db472ec0bade566c3c1bf9428786c759d980 | [
"BSD-3-Clause"
] | 1 | 2019-10-11T15:48:42.000Z | 2019-10-11T15:48:42.000Z | import click
import logging
import pandas as pd
from pathlib import Path
if __name__ == '__main__':
main()
| 28.5 | 74 | 0.57245 |
1ef0038cb2a91e1c8d60b3c9d94b61a72a9905a8 | 569 | py | Python | var/spack/repos/builtin/packages/py-jdatetime/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2019-09-15T23:55:48.000Z | 2019-09-15T23:55:48.000Z | var/spack/repos/builtin/packages/py-jdatetime/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/py-jdatetime/package.py | adrianjhpc/spack | 0a9e4fcee57911f2db586aa50c8873d9cca8de92 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 1 | 2017-01-21T17:19:32.000Z | 2017-01-21T17:19:32.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
| 35.5625 | 95 | 0.755712 |
1ef103ee8055d6489e89b6cf03c9f9136b33632a | 646 | py | Python | interview/leet/147_Insertion_Sort_List_Challenge.py | eroicaleo/LearningPython | 297d46eddce6e43ce0c160d2660dff5f5d616800 | [
"MIT"
] | 1 | 2020-10-12T13:33:29.000Z | 2020-10-12T13:33:29.000Z | interview/leet/147_Insertion_Sort_List_Challenge.py | eroicaleo/LearningPython | 297d46eddce6e43ce0c160d2660dff5f5d616800 | [
"MIT"
] | null | null | null | interview/leet/147_Insertion_Sort_List_Challenge.py | eroicaleo/LearningPython | 297d46eddce6e43ce0c160d2660dff5f5d616800 | [
"MIT"
] | 1 | 2016-11-09T07:28:45.000Z | 2016-11-09T07:28:45.000Z | #!/usr/bin/env python
from linklist import *
sol = Solution()
nodeStringList = [
'[4,2,1,3]',
'[-1,5,3,4,0]',
'[3,2]',
'[23]',
'[]'
]
for nodeString in nodeStringList:
head = linkListBuilder(nodeString)
traverse(head)
traverse(sol.insertionSortList(head))
| 22.275862 | 62 | 0.53096 |
1ef14d5232899017df5a28aea662b5304b5bbc53 | 976 | py | Python | robocrm/migrations/0020_auto_20141027_0145.py | CMU-Robotics-Club/roboticsclub.org | 5f2ad4a15dc62160c6d03c87c121e934cacb8228 | [
"MIT"
] | null | null | null | robocrm/migrations/0020_auto_20141027_0145.py | CMU-Robotics-Club/roboticsclub.org | 5f2ad4a15dc62160c6d03c87c121e934cacb8228 | [
"MIT"
] | 16 | 2015-01-01T03:42:36.000Z | 2016-06-21T05:14:16.000Z | robocrm/migrations/0020_auto_20141027_0145.py | CMU-Robotics-Club/roboticsclub.org | 5f2ad4a15dc62160c6d03c87c121e934cacb8228 | [
"MIT"
] | 2 | 2015-07-23T14:37:16.000Z | 2021-09-11T01:23:25.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
| 29.575758 | 158 | 0.599385 |
1ef2ba31fbb403bcb4ce6125ac2b8a6fd53306d0 | 527 | py | Python | src/tests/flow.py | SeleSchaefer/super_resolution | bf28a959fb150ceeadbd9f0bcfc12f3025cf82f4 | [
"MIT"
] | 5 | 2019-11-11T10:01:52.000Z | 2020-12-08T11:56:33.000Z | src/tests/flow.py | SeleSchaefer/super_resolution | bf28a959fb150ceeadbd9f0bcfc12f3025cf82f4 | [
"MIT"
] | 1 | 2020-06-13T06:39:44.000Z | 2020-06-13T06:39:44.000Z | src/tests/flow.py | SeleSchaefer/super_resolution | bf28a959fb150ceeadbd9f0bcfc12f3025cf82f4 | [
"MIT"
] | 1 | 2020-07-16T23:07:28.000Z | 2020-07-16T23:07:28.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import cv2
import imageio
import numpy as np
from tar.miscellaneous import convert_flow_to_color
prev = imageio.imread("ressources/1_1.png")
prev = cv2.cvtColor(prev, cv2.COLOR_RGB2GRAY)
curr = imageio.imread("ressources/1_2.png")
curr = cv2.cvtColor(curr, cv2.COLOR_RGB2GRAY)
flow = cv2.calcOpticalFlowFarneback(prev, curr, None, 0.9, 15, 20, 100, 10, 1.5, cv2.OPTFLOW_FARNEBACK_GAUSSIAN)
rgb = convert_flow_to_color(flow)
imageio.imsave("/Users/sele/Desktop/test.png", rgb)
| 29.277778 | 112 | 0.759013 |
1ef394d030837a85a23ff0b3c23491f9f879dcc0 | 998 | py | Python | assignment4/utils.py | nicedi/ML_course_projects | 136a18ec8615ae72bb60b4d60e920beb77728115 | [
"Apache-2.0"
] | null | null | null | assignment4/utils.py | nicedi/ML_course_projects | 136a18ec8615ae72bb60b4d60e920beb77728115 | [
"Apache-2.0"
] | null | null | null | assignment4/utils.py | nicedi/ML_course_projects | 136a18ec8615ae72bb60b4d60e920beb77728115 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
| 27.722222 | 69 | 0.625251 |
1ef4bd40a0edef859ca09644504d0ac02de309a6 | 746 | py | Python | post/migrations/0009_auto_20171207_2320.py | silvareal/personal-blog | 9ed8ac48864510cd5b3227b7b0f7d335beb648de | [
"MIT"
] | 2 | 2018-03-15T16:53:11.000Z | 2020-01-17T15:56:33.000Z | post/migrations/0009_auto_20171207_2320.py | silvareal/personal-blog | 9ed8ac48864510cd5b3227b7b0f7d335beb648de | [
"MIT"
] | null | null | null | post/migrations/0009_auto_20171207_2320.py | silvareal/personal-blog | 9ed8ac48864510cd5b3227b7b0f7d335beb648de | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-07 22:20
from __future__ import unicode_literals
from django.db import migrations, models
| 26.642857 | 177 | 0.577748 |
1ef4eeb144b92d317488e7746cdc05ddecffcf45 | 3,018 | py | Python | tests/test_utils_project.py | FingerCrunch/scrapy | 3225de725720bba246ba8c9845fe4b84bc0c82e7 | [
"BSD-3-Clause"
] | 41,267 | 2015-01-01T07:39:25.000Z | 2022-03-31T20:09:40.000Z | tests/test_utils_project.py | FingerCrunch/scrapy | 3225de725720bba246ba8c9845fe4b84bc0c82e7 | [
"BSD-3-Clause"
] | 4,420 | 2015-01-02T09:35:38.000Z | 2022-03-31T22:53:32.000Z | tests/test_utils_project.py | FingerCrunch/scrapy | 3225de725720bba246ba8c9845fe4b84bc0c82e7 | [
"BSD-3-Clause"
] | 11,080 | 2015-01-01T18:11:30.000Z | 2022-03-31T15:33:19.000Z | import unittest
import os
import tempfile
import shutil
import contextlib
from pytest import warns
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.project import data_path, get_project_settings
| 30.795918 | 70 | 0.629556 |
1ef590187d92be6eb1062c6742984e4a21a536f0 | 212 | py | Python | trainer/__init__.py | Greeser/gate-decorator-pruning | 1069fc89099100091412b6f89ead0519d382c518 | [
"Apache-2.0"
] | 192 | 2019-09-18T10:02:16.000Z | 2022-03-24T16:31:18.000Z | trainer/__init__.py | pawopawo/gate-decorator-pruning | d89021802fa56b1eba97921db3d8cadcacdd2073 | [
"Apache-2.0"
] | 25 | 2019-09-24T10:53:51.000Z | 2022-01-18T07:13:52.000Z | trainer/__init__.py | pawopawo/gate-decorator-pruning | d89021802fa56b1eba97921db3d8cadcacdd2073 | [
"Apache-2.0"
] | 33 | 2019-09-19T02:21:58.000Z | 2022-03-31T10:04:20.000Z | from trainer.normal import NormalTrainer
from config import cfg
| 19.272727 | 40 | 0.679245 |
1ef715bb94a229f05900f1d0c867b3d0fe21f76d | 776 | py | Python | Tasks/Community/ts_scriptExamples/pythonLogging.py | nneul/Velocity-assets | 9be7cd6f483754871c5a541d0083fbe933dfb456 | [
"MIT"
] | 4 | 2019-05-27T23:36:34.000Z | 2020-11-12T17:08:04.000Z | Tasks/Community/ts_scriptExamples/pythonLogging.py | nneul/Velocity-assets | 9be7cd6f483754871c5a541d0083fbe933dfb456 | [
"MIT"
] | 12 | 2019-04-17T02:47:25.000Z | 2021-04-02T09:15:37.000Z | Tasks/Community/ts_scriptExamples/pythonLogging.py | nneul/Velocity-assets | 9be7cd6f483754871c5a541d0083fbe933dfb456 | [
"MIT"
] | 15 | 2018-04-26T05:18:12.000Z | 2021-11-06T04:44:58.000Z | #!/usr/bin/python
import logging
# create logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create file handler which and set level to debug
fh = logging.FileHandler('pythonLogging.log')
fh.setLevel(logging.WARNING)
# create formatter
formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s")
# add formatter to ch and fh
ch.setFormatter(formatter)
fh.setFormatter(formatter)
# add ch and fh to logger
logger.addHandler(ch)
logger.addHandler(fh)
# "application" code
logger.debug("debug message")
logger.info("info message")
logger.warn("warn message")
logger.error("error message")
logger.critical("critical message")
print('\nDone')
| 25.866667 | 72 | 0.76933 |
1ef7b25dff5a6ddf0729f1a5e0bea3ab89df1ed3 | 3,565 | py | Python | google/datalab/commands/_datalab.py | freyrsae/pydatalab | 9aba1ac6bbe8e1384e7a4b07c5042af84348797d | [
"Apache-2.0"
] | 198 | 2016-07-14T19:47:52.000Z | 2022-03-15T08:45:21.000Z | google/datalab/commands/_datalab.py | freyrsae/pydatalab | 9aba1ac6bbe8e1384e7a4b07c5042af84348797d | [
"Apache-2.0"
] | 534 | 2016-07-15T19:12:43.000Z | 2022-03-11T23:11:39.000Z | google/datalab/commands/_datalab.py | freyrsae/pydatalab | 9aba1ac6bbe8e1384e7a4b07c5042af84348797d | [
"Apache-2.0"
] | 86 | 2016-07-13T17:39:05.000Z | 2021-11-03T03:39:41.000Z | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Platform library - datalab cell magic."""
from __future__ import absolute_import
from __future__ import unicode_literals
try:
import IPython
import IPython.core.display
import IPython.core.magic
except ImportError:
raise Exception('This module can only be loaded in ipython.')
import google.datalab.utils.commands
| 33.317757 | 76 | 0.748387 |
1ef7c90725f50f509ebf7ce67bf02498f0dcedf7 | 181 | py | Python | src/server/__main__.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
] | 1 | 2021-06-20T05:47:53.000Z | 2021-06-20T05:47:53.000Z | src/server/__main__.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
] | null | null | null | src/server/__main__.py | ENDERZOMBI102/chatapp | 3f54e72a8d3b10457cf88ec5f87b2984cc84a51f | [
"MIT"
] | null | null | null | from sys import argv
from server.AServer import AServer
if '--old' in argv:
from server.server import Server
Server()
else:
AServer( websocket='--websocket' in argv ).Start()
| 16.454545 | 51 | 0.729282 |
1ef930c42df2781ea2ef6709774093b794cfc83e | 3,081 | py | Python | testing/tests/registers.py | Wynjones1/gbvhdl | 46cef04cef308967ea4764eeeaf7d611dc783ae4 | [
"MIT"
] | null | null | null | testing/tests/registers.py | Wynjones1/gbvhdl | 46cef04cef308967ea4764eeeaf7d611dc783ae4 | [
"MIT"
] | null | null | null | testing/tests/registers.py | Wynjones1/gbvhdl | 46cef04cef308967ea4764eeeaf7d611dc783ae4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2.7
from common import *
from random import randint, choice
registers = {\
"a" : int("0000", 2),
"f" : int("0001", 2),
"b" : int("0010", 2),
"c" : int("0011", 2),
"d" : int("0100", 2),
"e" : int("0101", 2),
"h" : int("0110", 2),
"l" : int("0111", 2),
"af" : int("1000", 2),
"bc" : int("1001", 2),
"de" : int("1010", 2),
"hl" : int("1011", 2),
"sp" : int("1100", 2),
"pc" : int("1101", 2),
}
if __name__ == "__main__":
main()
| 31.121212 | 79 | 0.477118 |
1efa2e6d895702b8d443cbba288ae926b3327dee | 290 | py | Python | DiscordRPC/__init__.py | EterNomm/discord-rpc | 86bdf35a75df9ab8971763042d19f2f820e08a51 | [
"Apache-2.0"
] | 4 | 2021-12-13T13:26:00.000Z | 2022-02-20T17:11:19.000Z | DiscordRPC/__init__.py | LyQuid12/discord-rpc | 86bdf35a75df9ab8971763042d19f2f820e08a51 | [
"Apache-2.0"
] | null | null | null | DiscordRPC/__init__.py | LyQuid12/discord-rpc | 86bdf35a75df9ab8971763042d19f2f820e08a51 | [
"Apache-2.0"
] | null | null | null | from .presence import *
from .button import button
from .exceptions import *
#from .get_current_app import GCAR (Disabling due to a bug)
__title__ = "Discord-RPC"
__version__ = "3.5"
__authors__ = "LyQuid"
__license__ = "Apache License 2.0"
__copyright__ = "Copyright 2021-present LyQuid"
| 26.363636 | 59 | 0.762069 |
1efa6932e5014bf06e1e937b4bacbf01a0d855e1 | 11,392 | py | Python | brax/training/ars.py | benelot/brax | 6b74009face5a12ae3e47b87cdb1abc45181040e | [
"Apache-2.0"
] | 1 | 2021-09-27T18:38:49.000Z | 2021-09-27T18:38:49.000Z | brax/training/ars.py | benelot/brax | 6b74009face5a12ae3e47b87cdb1abc45181040e | [
"Apache-2.0"
] | null | null | null | brax/training/ars.py | benelot/brax | 6b74009face5a12ae3e47b87cdb1abc45181040e | [
"Apache-2.0"
] | 1 | 2021-09-27T18:38:57.000Z | 2021-09-27T18:38:57.000Z | # Copyright 2021 The Brax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Augmented Random Search training.
See: https://arxiv.org/pdf/1803.07055.pdf
"""
import time
from typing import Any, Callable, Dict, Optional
from absl import logging
from brax import envs
from brax.training import env
from brax.training import networks
from brax.training import normalization
import flax
import jax
import jax.numpy as jnp
import optax
Params = Any
epochs_per_step = (epochs + log_frequency - 1) // log_frequency
training_state = TrainingState(key=key,
normalizer_params=normalizer_params,
policy_params=policy_params)
training_walltime = 0
eval_walltime = 0
sps = 0
eval_sps = 0
metrics = {}
summary = {}
state = first_state
for it in range(log_frequency + 1):
logging.info('starting iteration %s %s', it, time.time() - xt)
t = time.time()
if process_id == 0:
eval_state = run_eval(eval_first_state,
training_state.policy_params,
training_state.normalizer_params)
eval_state.completed_episodes.block_until_ready()
eval_walltime += time.time() - t
eval_sps = (
episode_length * eval_first_state.core.reward.shape[0] /
(time.time() - t))
avg_episode_length = (
eval_state.completed_episodes_steps / eval_state.completed_episodes)
metrics = dict(
dict({
f'eval/episode_{name}': value / eval_state.completed_episodes
for name, value in eval_state.completed_episodes_metrics.items()
}),
**dict({
f'train/{name}': value for name, value in summary.items()
}),
**dict({
'eval/completed_episodes': eval_state.completed_episodes,
'eval/episode_length': avg_episode_length,
'speed/sps': sps,
'speed/eval_sps': eval_sps,
'speed/training_walltime': training_walltime,
'speed/eval_walltime': eval_walltime,
'speed/timestamp': training_walltime,
}))
logging.info('Step %s metrics %s',
int(training_state.normalizer_params[0]) * action_repeat,
metrics)
if progress_fn:
progress_fn(int(training_state.normalizer_params[0]) * action_repeat,
metrics)
if it == log_frequency:
break
t = time.time()
# optimization
state, training_state, summary = run_ars(state, training_state)
jax.tree_map(lambda x: x.block_until_ready(), training_state)
sps = episode_length * num_envs * epochs_per_step / (
time.time() - t)
training_walltime += time.time() - t
_, inference = make_params_and_inference_fn(core_env.observation_size,
core_env.action_size,
normalize_observations,
head_type)
params = training_state.normalizer_params, training_state.policy_params
return (inference, params, metrics)
def make_params_and_inference_fn(observation_size, action_size,
normalize_observations, head_type=None):
"""Creates params and inference function for the ES agent."""
obs_normalizer_params, obs_normalizer_apply_fn = normalization.make_data_and_apply_fn(
observation_size, normalize_observations, apply_clipping=False)
policy_head = get_policy_head(head_type)
policy_model = make_ars_model(action_size, observation_size)
params = (obs_normalizer_params, policy_model.init(jax.random.PRNGKey(0)))
return params, inference_fn
| 36.512821 | 88 | 0.684077 |
1efb1e54ed275e79479018453c75c13bf653026c | 330 | py | Python | docs/api/conf.py | kagemeka/selext | 1882e518f8698f6d257549cdb36c79e05e801d39 | [
"MIT"
] | 1 | 2022-02-15T12:02:02.000Z | 2022-02-15T12:02:02.000Z | docs/api/conf.py | kagemeka/filesystem-python | 565beb128326f5ee41a5bb2b3a751788d4a02e4c | [
"MIT"
] | 6 | 2022-01-05T09:15:54.000Z | 2022-01-09T05:48:43.000Z | docs/api/conf.py | kagemeka/python-algorithms | dface89b8c618845cf524429aa8e97c4b2b10ceb | [
"MIT"
] | null | null | null | import os
import sys
sys.path.append(find_docs_root())
from _rtd_conf import *
from _sphinx_conf import *
| 20.625 | 45 | 0.70303 |
1efbf1d335ee13e467149f16bec6b633d71434fe | 1,314 | py | Python | src/graph/cli/server.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
] | null | null | null | src/graph/cli/server.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
] | null | null | null | src/graph/cli/server.py | clayman-micro/graph | 742015c276f89841310794e952280a06c24fe8ef | [
"MIT"
] | null | null | null | import socket
import click
import uvicorn # type: ignore
| 22.655172 | 70 | 0.590563 |
1efec6ae65507f91537c6a7a371e02ca57452f0d | 175 | py | Python | settings/libs.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
] | null | null | null | settings/libs.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
] | null | null | null | settings/libs.py | skylifewww/pangolinreact | 8d8a45fd15c442618f2ed1ecab15e2e2ab4b7a3a | [
"MIT"
] | null | null | null | # grappelli
GRAPPELLI_ADMIN_TITLE = 'pangolin - Administration panel'
# rest framework
# REST_FRAMEWORK = {
# 'PAGINATE_BY_PARAM': 'limit',
# 'SEARCH_PARAM': 'q'
# }
| 19.444444 | 57 | 0.68 |
1efed2c2a7cb93434e5d67d1db9954f3a5ff1653 | 1,543 | py | Python | kubespawner/clients.py | moskiGithub/spawner_test | 405f088041054080f53b620b68fe040e5e0b091a | [
"BSD-3-Clause"
] | null | null | null | kubespawner/clients.py | moskiGithub/spawner_test | 405f088041054080f53b620b68fe040e5e0b091a | [
"BSD-3-Clause"
] | null | null | null | kubespawner/clients.py | moskiGithub/spawner_test | 405f088041054080f53b620b68fe040e5e0b091a | [
"BSD-3-Clause"
] | null | null | null | """Shared clients for kubernetes
avoids creating multiple kubernetes client objects,
each of which spawns an unused max-size thread pool
"""
from unittest.mock import Mock
import weakref
import kubernetes.client
from kubernetes.client import api_client
# FIXME: remove when instantiating a kubernetes client
# doesn't create N-CPUs threads unconditionally.
# monkeypatch threadpool in kubernetes api_client
# to avoid instantiating ThreadPools.
# This is known to work for kubernetes-4.0
# and may need updating with later kubernetes clients
_dummy_pool = Mock()
api_client.ThreadPool = lambda *args, **kwargs: _dummy_pool
_client_cache = {}
def shared_client(ClientType, *args, **kwargs):
"""Return a single shared kubernetes client instance
A weak reference to the instance is cached,
so that concurrent calls to shared_client
will all return the same instance until
all references to the client are cleared.
"""
kwarg_key = tuple((key, kwargs[key]) for key in sorted(kwargs))
cache_key = (ClientType, args, kwarg_key)
client = None
if cache_key in _client_cache:
# resolve cached weakref
# client can still be None after this!
client = _client_cache[cache_key]()
if client is None:
Client = getattr(kubernetes.client, ClientType)
client = Client(*args, **kwargs)
# cache weakref so that clients can be garbage collected
_client_cache[cache_key] = weakref.ref(client)
return client
| 32.829787 | 68 | 0.711601 |
1effabe25e75813c61f0401ae397020afd635812 | 8,956 | py | Python | src/syncgitlab2msproject/gitlab_issues.py | lcv3/SyncGitlab2MSProject | 4a81191b7deb6974e893d44f3b04fcfc1da36571 | [
"MIT"
] | null | null | null | src/syncgitlab2msproject/gitlab_issues.py | lcv3/SyncGitlab2MSProject | 4a81191b7deb6974e893d44f3b04fcfc1da36571 | [
"MIT"
] | null | null | null | src/syncgitlab2msproject/gitlab_issues.py | lcv3/SyncGitlab2MSProject | 4a81191b7deb6974e893d44f3b04fcfc1da36571 | [
"MIT"
] | null | null | null | import dateutil.parser
from datetime import datetime
from functools import lru_cache
from gitlab import Gitlab
from gitlab.v4.objects import Project
from logging import getLogger
from typing import Dict, List, Optional, Union
from .custom_types import GitlabIssue, GitlabUserDict
from .exceptions import MovedIssueNotDefined
from .funcions import warn_once
logger = getLogger(f"{__package__}.{__name__}")
def get_user_identifier(user_dict: GitlabUserDict) -> str:
"""
Return the user identifier
keep as separate function to allow easier changes later if required
"""
return str(user_dict["name"])
# **************************************************************
# *** Define some default properties to allow static typing ***
# **************************************************************
def _get_from_time_stats(self, key) -> Optional[float]:
"""
Somehow the python-gitlab API seems to be not 100% fixed,
see issue #9
:param key: key to query from time stats
:return: the value if existing or none
"""
query_dict: Dict[str, float]
if callable(self.obj.time_stats):
query_dict = self.obj.time_stats()
else:
query_dict = self.obj.time_stats
return query_dict.get(key, None)
def get_gitlab_class(server: str, personal_token: Optional[str] = None) -> Gitlab:
if personal_token is None:
return Gitlab(server, ssl_verify=False)
else:
return Gitlab(server, private_token=personal_token, ssl_verify=False)
def get_group_issues(gitlab: Gitlab, group_id: int) -> List[Issue]:
group = gitlab.groups.get(group_id, lazy=True)
return [Issue(issue) for issue in group.issues.list(all=True)]
def get_project_issues(gitlab: Gitlab, project_id: int) -> List[Issue]:
project = gitlab.projects.get(project_id)
return [
Issue(issue, fixed_group_id=get_group_id_from_gitlab_project(project))
for issue in project.issues.list(all=True)
]
| 30.154882 | 85 | 0.600715 |
48026c0db5235d96fe4f5a2a24dc36b8317b3710 | 964 | py | Python | pytest/track_test.py | Sergej91/TheiaSfM | e603e16888456c3e565a2c197fa9f8643c176175 | [
"BSD-3-Clause"
] | null | null | null | pytest/track_test.py | Sergej91/TheiaSfM | e603e16888456c3e565a2c197fa9f8643c176175 | [
"BSD-3-Clause"
] | null | null | null | pytest/track_test.py | Sergej91/TheiaSfM | e603e16888456c3e565a2c197fa9f8643c176175 | [
"BSD-3-Clause"
] | null | null | null | import pytheia as pt
import os
import numpy as np
if __name__ == "__main__":
test_track_set_descriptor_read_write() | 27.542857 | 56 | 0.701245 |
4805394e98503f43fbc6141c4232f0ba1a824264 | 1,732 | py | Python | jayk/util.py | alekratz/jayk | 87dc1aa4fd7be9ee1757ddee066dffb1bd7df09b | [
"ISC"
] | 1 | 2018-04-06T23:06:21.000Z | 2018-04-06T23:06:21.000Z | jayk/util.py | alekratz/jayk | 87dc1aa4fd7be9ee1757ddee066dffb1bd7df09b | [
"ISC"
] | 5 | 2017-10-12T12:13:11.000Z | 2018-05-15T22:32:04.000Z | jayk/util.py | alekratz/jayk | 87dc1aa4fd7be9ee1757ddee066dffb1bd7df09b | [
"ISC"
] | 5 | 2017-10-10T21:59:18.000Z | 2019-06-28T13:28:10.000Z | """Common utilities used through this codebase."""
import logging
import logging.config
| 30.928571 | 97 | 0.612587 |
48065a4ff80756ba525a0a9808129ee5012b319d | 224 | py | Python | experimentation/trap/statistics_calculator.py | GruppoPBDMNG-10/AIExam | 22fd4dad20bcff689deeae227f179267b92a60d8 | [
"MIT"
] | null | null | null | experimentation/trap/statistics_calculator.py | GruppoPBDMNG-10/AIExam | 22fd4dad20bcff689deeae227f179267b92a60d8 | [
"MIT"
] | 5 | 2018-09-24T13:13:19.000Z | 2018-09-24T18:34:22.000Z | experimentation/trap/statistics_calculator.py | GruppoPBDMNG-10/AIExam | 22fd4dad20bcff689deeae227f179267b92a60d8 | [
"MIT"
] | null | null | null | import experimentation.statistics.statistics as statistics
intersection = statistics.find_matches_from_file('result/experimentation/hmm/anomalous.json', 'result/experimentation/rnn/anomalous.json')
print(len(intersection)) | 44.8 | 138 | 0.852679 |
4807121b7fee14846de82fc2e6158a386836a6aa | 13,056 | py | Python | objects/CSCG/_3d/exact_solutions/status/incompressible_Navier_Stokes/Sin_Cos.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | 1 | 2020-10-14T12:48:35.000Z | 2020-10-14T12:48:35.000Z | objects/CSCG/_3d/exact_solutions/status/incompressible_Navier_Stokes/Sin_Cos.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | null | null | null | objects/CSCG/_3d/exact_solutions/status/incompressible_Navier_Stokes/Sin_Cos.py | mathischeap/mifem | 3242e253fb01ca205a76568eaac7bbdb99e3f059 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@author: Yi Zhang.
Department of Aerodynamics
Faculty of Aerospace Engineering
TU Delft, Delft, Netherlands
"""
from numpy import sin, cos, pi
from objects.CSCG._3d.exact_solutions.status.incompressible_Navier_Stokes.base import incompressible_NavierStokes_Base
from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField
# noinspection PyAbstractClass
# noinspection PyAbstractClass
# noinspection PyAbstractClass
# noinspection PyAbstractClass
# noinspection PyAbstractClass
| 30.72 | 118 | 0.555913 |
4807de81e5cd93efaec1325cded4f4d3e15bd5c9 | 93 | py | Python | aaem_summaries/components/transmission/__init__.py | gina-alaska/alaska_affordable_energy_model | 96fed0137152985ce280ea37e0affec131e3087f | [
"MIT-feh"
] | 1 | 2022-01-23T07:18:36.000Z | 2022-01-23T07:18:36.000Z | aaem_summaries/components/transmission/__init__.py | gina-alaska/alaska_affordable_energy_model | 96fed0137152985ce280ea37e0affec131e3087f | [
"MIT-feh"
] | 5 | 2017-07-14T21:56:46.000Z | 2017-07-14T21:59:15.000Z | aaem_summaries/components/transmission/__init__.py | gina-alaska/alaska_affordable_energy_model | 96fed0137152985ce280ea37e0affec131e3087f | [
"MIT-feh"
] | 2 | 2020-04-28T18:12:55.000Z | 2021-01-13T01:56:57.000Z | """
__init__.py
summary for
Transmission Line in a community
"""
from summary import *
| 11.625 | 32 | 0.698925 |
4808f358b59d11b04181bc6422be2976a2eb690f | 2,344 | py | Python | setup.py | doconce/preprocess | c09c61ca7571699c6ed8d93eaf4fac13ee614409 | [
"MIT"
] | 5 | 2020-05-23T14:56:13.000Z | 2021-07-30T15:26:27.000Z | setup.py | doconce/preprocess | c09c61ca7571699c6ed8d93eaf4fac13ee614409 | [
"MIT"
] | 2 | 2018-02-19T10:12:50.000Z | 2020-11-15T15:27:17.000Z | setup.py | doconce/preprocess | c09c61ca7571699c6ed8d93eaf4fac13ee614409 | [
"MIT"
] | 2 | 2018-06-10T11:51:52.000Z | 2020-09-02T20:41:46.000Z | #!/usr/bin/env python
# Copyright (c) 2002-2005 ActiveState Software Ltd.
"""preprocess: a multi-language preprocessor
There are millions of templating systems out there (most of them
developed for the web). This isn't one of those, though it does share
some basics: a markup syntax for templates that are processed to give
resultant text output. The main difference with `preprocess.py` is
that its syntax is hidden in comments (whatever the syntax for comments
maybe in the target filetype) so that the file can still have valid
syntax. A comparison with the C preprocessor is more apt.
`preprocess.py` is targetted at build systems that deal with many
types of files. Languages for which it works include: C++, Python,
Perl, Tcl, XML, JavaScript, CSS, IDL, TeX, Fortran, PHP, Java, Shell
scripts (Bash, CSH, etc.) and C#. Preprocess is usable both as a
command line app and as a Python module.
"""
import os
import sys
import distutils
import re
from setuptools import setup
version = '.'.join(re.findall('__version_info__ = \((\d+), (\d+), (\d+)\)',
open('lib/preprocess.py', 'r').read())[0])
classifiers = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python
Operating System :: OS Independent
Topic :: Software Development :: Libraries :: Python Modules
Topic :: Text Processing :: Filters
"""
if sys.version_info < (2, 3):
# Distutils before Python 2.3 doesn't accept classifiers.
_setup = setup
doclines = __doc__.split("\n")
setup(
name="preprocess",
version=version,
author="Trent Mick",
author_email="trentm@gmail.com",
maintainer="Kristian Gregorius Hustad",
maintainer_email="krihus@ifi.uio.no",
url="http://github.com/doconce/preprocess/",
license="http://www.opensource.org/licenses/mit-license.php",
platforms=["any"],
py_modules=["preprocess"],
package_dir={"": "lib"},
entry_points={'console_scripts': ['preprocess = preprocess:main']},
install_requires=['future'],
description=doclines[0],
classifiers=filter(None, classifiers.split("\n")),
long_description="\n".join(doclines[2:]),
)
| 34.470588 | 75 | 0.700939 |
48098c0ad42aef816d1cc33d9f2b4cf7db1cf4ab | 1,504 | py | Python | bubblebbs/config.py | kawa-kokosowa/bubblebbs | 2b70fd66c27f717ed009cbe5efc0d0d5433f3423 | [
"MIT"
] | 7 | 2019-01-03T01:21:32.000Z | 2020-09-03T01:52:09.000Z | bubblebbs/config.py | lily-mayfield/bubblebbs | 2b70fd66c27f717ed009cbe5efc0d0d5433f3423 | [
"MIT"
] | 109 | 2018-05-04T10:32:23.000Z | 2018-06-24T03:35:01.000Z | bubblebbs/config.py | kawa-kokosowa/bubblebbs | 2b70fd66c27f717ed009cbe5efc0d0d5433f3423 | [
"MIT"
] | 3 | 2018-05-14T15:10:03.000Z | 2018-05-19T01:13:03.000Z | import os
BEHIND_REVERSE_PROXY = bool(os.environ.get('BBBS_BEHIND_REVERSE_PROXY', False))
POSTS_PER_PAGE = 25
TEMPLATES_AUTO_RELOAD = True
RECAPTCHA_ENABLED = os.environ.get('BBBS_RECAPTCHA_ENABLED', False)
RECAPTCHA_SITE_KEY = os.environ.get('BBBS_RECAPTCHA_SITE_KEY', 'CHANGEGME')
RECAPTCHA_SECRET_KEY = os.environ.get('BBS_RECAPTCHA_SECRET_KEY', 'CHANGEME')
SECRET_KEY = os.environ.get('BBBS_SECRET_KEY', 'PLEASE CHANGE ME')
SECRET_SALT = os.environ.get('BBBS_SECRET_SALT', 'CHANGEME')
SQLALCHEMY_DATABASE_URI = os.environ.get('BBBS_DB_STRING', 'sqlite:///test.db')
SITE_TAGLINE = os.environ.get('BBBS_SITE_TAGLINE', 'some tagline')
SITE_TITLE = os.environ.get('BBBS_SITE_TAGLINE', 'super title')
SITE_FOOTER = os.environ.get(
'BBBS_SITE_FOOTER',
'<a href="https://github.com/kawa-kokosowa/bubblebbs">Powered by BubbleBBS</a>',
)
RATELIMIT_STORAGE_URL = os.environ.get('BBBS_RATELIMIT_STORAGE_URL', 'redis://localhost:6379/1')
RATELIMIT_DEFAULT = "400 per day, 100 per hour"
RATELIMIT_ENABLED = True
RATELIMIT_LIST_THREADS = "20 per minute, 1 per second"
RATELIMIT_VIEW_SPECIFIC_POST = "20 per minute, 1 per second"
RATELIMIT_NEW_REPLY = "20 per hour, 1 per second, 2 per minute"
RATELIMIT_VIEW_TRIP_META = "50 per hour, 15 per minute"
RATELIMIT_EDIT_TRIP_META = "60 per hour, 1 per second, 4 per minute"
RATELIMIT_MANAGE_COOKIE = '60 per hour, 1 per second, 7 per minute'
RATELIMIT_CREATE_THREAD = '700 per hour, 100 per minute'
RATELIMIT_NEW_THREAD_FORM = '60 per hour, 1 per second'
| 42.971429 | 96 | 0.776596 |
480a52a59f5e6ca79a9056130cb2d9abb336a9ed | 11,497 | py | Python | sim_user/mailLib.py | silicom-hub/IS_simulator | 4d134a8051c3604a94c2552503ff24015a3e86ee | [
"MIT"
] | 4 | 2021-11-24T10:58:51.000Z | 2022-03-11T15:13:22.000Z | sim_user/mailLib.py | silicom-hub/IS_simulator | 4d134a8051c3604a94c2552503ff24015a3e86ee | [
"MIT"
] | 1 | 2021-11-24T09:16:08.000Z | 2021-11-30T16:19:41.000Z | sim_user/mailLib.py | silicom-hub/IS_simulator | 4d134a8051c3604a94c2552503ff24015a3e86ee | [
"MIT"
] | 1 | 2021-11-24T11:10:38.000Z | 2021-11-24T11:10:38.000Z | import os
import wget
import time
import glob
import getpass
import tarfile
import subprocess
import email.mime.multipart
import email.mime.text
import email.mime.image
import email.mime.audio
from datetime import datetime
from pprint import pprint
from colorama import Style, Fore
from smtplib import SMTP, SMTP_SSL
from imaplib import IMAP4_SSL, IMAP4
def smtp_connect(smtp_server, verbose=True):
""" Conection to smtp server.
smtp_server_ip (str): This value is the smtp server's ip.
verbose (boolean): Print information about function progress.
Returns:
None
"""
try:
smtp = SMTP_SSL(host=smtp_server)
smtp.ehlo()
if verbose:
print(Fore.GREEN+ " ==> [smtp_connect] with SSL" +Style.RESET_ALL)
return smtp
except:
try:
smtp = SMTP(host=smtp_server)
smtp.ehlo()
if verbose:
print(Fore.GREEN+ " ==> [smtp_connect] without SSL" +Style.RESET_ALL)
return smtp
except:
print(Fore.RED+ " ==> [smtp_connect] failed!" +Style.RESET_ALL)
return 1
def imap_connect(imap_server, username, password, verbose=True):
""" Connection to imp server.
imap_server_ip (str): This value is the imap server's ip.
verbose (boolean): Print information about function progress.
Returns:
None
"""
try:
imap = IMAP4_SSL(imap_server)
imap.login(username, password)
if verbose:
print(Fore.GREEN+ " ==> [imap_connect] with SSL" +Style.RESET_ALL)
return imap
except:
try:
imap = IMAP4(imap_server)
imap.login(username, password)
if verbose:
print(Fore.GREEN+ " ==> [imap_connect] without SSL" +Style.RESET_ALL)
return imap
except:
print(Fore.RED+ " ==> [imap_connect] failed!" +Style.RESET_ALL)
def send_mail(smtp_server, FROM="", TO="", subject="", msg="", attachements=[], verbose=True):
""" Send mail.
smtp_server_ip (str): This value is the smtp server's ip.
FROM (str): This value is the sender email address.
TO (list): This value is a list of multiple recipient
SUBJECT (str, Optional): This value is the email's subject content.
msg (str, Optional): This value is the email's message content.
attachements (list Optional):
verbose (boolean): Print information about function progress.
Returns:
None
"""
smtp = smtp_connect(smtp_server, verbose=False)
mail = email.mime.multipart.MIMEMultipart()
mail["Subject"] = "[ "+subject+" ]"
mail["From"] = FROM
mail["To"] = TO
msg = email.mime.text.MIMEText(msg, _subtype="plain")
msg.add_header("Content-Disposition", "email message")
mail.attach(msg)
for attachement in attachements:
if attachement[0] == "image":
img = email.mime.image.MIMEImage(open(attachement[1], "rb").read())
img.add_header("Content-Disposition", "attachement")
img.add_header("Attachement-type", "image")
img.add_header("Attachement-filename", attachement[1])
mail.attach(img)
if attachement[0] == "file":
text = email.mime.text.MIMEText(open(attachement[1], "r").read())
text.add_header("Content-Disposition", "attachement")
text.add_header("Attachement-type", "filetext")
text.add_header("Attachement-filename", attachement[1])
mail.attach(text)
try:
smtp.sendmail(mail["From"], mail["To"], mail.as_string())
if verbose:
print(Fore.GREEN+ " ==> [send_mail] "+mail["From"]+" --> "+mail["To"]+" {"+subject+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
smtp_logout(smtp, verbose=False)
except Exception as e:
print(Fore.RED+ " ==> [send_mail] failed! "+mail["From"]+" --> "+mail["To"]+" -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
print(Fore.RED+str(e)+Style.RESET_ALL)
smtp_logout(smtp, verbose=False)
def read_mailbox(imap_server, username, password, verbose=True): # attribut [ _payload ]
""" Read email inbox
imap_server_ip (str): This value is the imap server's ip.
login (str): This value is the username login.
password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
imap = imap_connect(imap_server, username, password, verbose=False)
all_mails = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
status, data = imap.fetch(mail, "(RFC822)")
mail_content = email.message_from_string(data[0][1].decode("utf-8"))
all_mails.append(mail_content)
for part in mail_content.walk():
if not part.is_multipart():
pass
if verbose:
print(Fore.GREEN+ " ==> [read_mailbox] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
imap_logout(imap, verbose=False)
return all_mails
def read_mailbox_download_execute(imap_server, imap_login, imap_password):
""" Read email inbox and download link inside.
imap_server_ip (str): This value is the imap server's ip.
imap_login (str): This value is the username login.
imap_password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
try:
path = None
mails = read_mailbox(imap_server, imap_login, imap_password, verbose=False)
if len(mails) <= 0:
print(Fore.YELLOW+ " ==> [read_mailbox_download_execute] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
return 0
for mail in mails:
for element in str(mail).replace("\n", " ").split(" "):
if "http" in element:
path = wget.download(element)
if path == None:
print(Fore.YELLOW+ " ==> [read_mailbox_download_execute] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
return 0
tarf_file = tarfile.open(path)
tarf_file.extractall(".")
tarf_file.close()
python_files = glob.glob("*/*maj*.py")
for python_script in python_files:
subprocess.getoutput("python3 "+python_script)
print(Fore.GREEN+ " ==> [read_mailbox_download_execute] {"+str(len(mails)-1)+"} -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
return True
except Exception as e:
print(Fore.RED+ " ==> [read_mailbox_download_execute] failed during execution! -- "+ time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
print(e)
return False
def download_attachements(imap_server, username, password, verbose=True):
""" Read email inbox and download attachements.
imap_server_ip (str): This value is the imap server's ip.
imap_login (str): This value is the username login.
imap_password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
imap = imap_connect(imap_server, username, password, verbose=False)
#INIT
if not os.path.isdir("/home/"+getpass.getuser()+"/Downloads"):
os.makedirs("/home/"+getpass.getuser()+"/Downloads")
mails = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
status, data = imap.fetch(mail, "(RFC822)")
mail_content = email.message_from_string(data[0][1].decode("utf-8"))
for part in mail_content.walk():
if not part.is_multipart():
if part["Content-Disposition"] == "attachement" and part["Attachement-type"] == "filetext":
username = getpass.getuser()
file = open(part["Attachement-filename"],"w")
file.write(part._payload)
file.close()
imap_logout(imap, verbose=False)
print(Fore.GREEN+ " ==> [download_attachements] --- " + time.strftime("%H:%M:%S", time.localtime())+Style.RESET_ALL)
# In progress
def delete_emails(imap, mails):
""" Delete mails specified in attributs
imap (imap_object): This value is the imap server's object.
mails (list): This value is an email list to delete.
Returns:
list of str: all emails content
"""
for mail in mails:
imap.store(mail,"+FLAGS","\\Deleted")
imap.expunge()
def delete_all_emails(imap_server, username, password, verbose=True):
""" Delete all emails in INBOX.
imap_server_ip (str): This value is the imap server's ip.
imap_login (str): This value is the username login.
imap_password (str): This value is the password login.
verbose (boolean): Print information about function progress.
Returns:
list of str: all emails content
"""
imap = imap_connect(imap_server, username, password, verbose=False)
delete_messages = []
imap.select("INBOX")
status, mails = imap.search(None, "ALL")
for mail in mails[0].split():
delete_messages.append(mail)
delete_emails(imap, delete_messages)
status, mails = imap.search(None, "ALL")
if len(mails) == 1:
print(Fore.GREEN+ " ==> [delete_all_emails] was successfull --- " + time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
imap_logout(imap, verbose=False)
return 0
print(Fore.RED+ " ==> [delete_all_emails] failed! --- " + time.strftime("%H:%M:%S", time.localtime()) +Style.RESET_ALL)
imap_logout(imap, verbose=False)
return 1
def imap_logout(imap, verbose=True):
""" Logout out to the imap service
imap (imap_object): This value is the imap server's object.
Returns:
None
"""
try:
imap.close()
imap.logout()
if verbose:
print(Fore.GREEN+ " ==> [imap_logout] was successfull" +Style.RESET_ALL)
except:
print(Fore.RED+ " ==> [imap_logout] failed" +Style.RESET_ALL)
def smtp_logout(smtp, verbose=True):
""" Logout out to the smtp service
smtp (smtp_object): This value is the smtp server's object.
Returns:
None
"""
try:
smtp.quit()
if verbose:
print(Fore.GREEN+ " ==> [smtp_logout] was successfull" +Style.RESET_ALL)
except:
print(Fore.RED+ " ==> [smtp_logout] failed" +Style.RESET_ALL)
| 41.060714 | 168 | 0.609898 |
480ae2b25c3bc1935302502823a56a560d147572 | 736 | py | Python | setup.py | vishnumenon/pyims | d6b1403332e83477661baa6443fba82daaf10542 | [
"MIT"
] | 1 | 2018-05-21T01:38:31.000Z | 2018-05-21T01:38:31.000Z | setup.py | vishnumenon/pyims | d6b1403332e83477661baa6443fba82daaf10542 | [
"MIT"
] | 1 | 2019-03-01T09:40:15.000Z | 2019-03-01T09:40:15.000Z | setup.py | vishnumenon/pyims | d6b1403332e83477661baa6443fba82daaf10542 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(name="pyims",
version='0.1.2',
description='A python wrapper for the IMS Word Sense Disambiguation tool (Zhong and Ng, 2010)',
url='http://github.com/vishnumenon/pyims',
author="Vishnu Menon",
author_email="me@vishnumenon.com",
long_description=long_description,
long_description_content_type="text/markdown",
license='MIT',
packages=setuptools.find_packages(),
install_requires=[
'nltk',
],
classifiers=(
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
zip_safe=False)
| 30.666667 | 99 | 0.658967 |
480af4411812ec81b6aa03218911dad18afb95a5 | 728 | py | Python | hass_apps/schedy/actor/__init__.py | weese/hass-apps | 1c0f7828213ab26924b571ad6d33514d52675ca0 | [
"Apache-2.0"
] | null | null | null | hass_apps/schedy/actor/__init__.py | weese/hass-apps | 1c0f7828213ab26924b571ad6d33514d52675ca0 | [
"Apache-2.0"
] | null | null | null | hass_apps/schedy/actor/__init__.py | weese/hass-apps | 1c0f7828213ab26924b571ad6d33514d52675ca0 | [
"Apache-2.0"
] | null | null | null | """
This package contains the various actor implementations.
"""
import typing as T
from .base import ActorBase
from .custom import CustomActor
from .generic import GenericActor
from .switch import SwitchActor
from .thermostat import ThermostatActor
__all__ = [
"ActorBase",
"CustomActor",
"GenericActor",
"SwitchActor",
"ThermostatActor",
]
| 22.75 | 77 | 0.703297 |
480bacc7e218e7aba2dc5770fad3518638833a8b | 11,372 | py | Python | triggmine_sdk/tests/test_client.py | TriggMineAdmin/TriggMine-Python-SDK | 9c5bd7c9ad2118bf5dc77796ccedc4eef0937df3 | [
"MIT"
] | null | null | null | triggmine_sdk/tests/test_client.py | TriggMineAdmin/TriggMine-Python-SDK | 9c5bd7c9ad2118bf5dc77796ccedc4eef0937df3 | [
"MIT"
] | null | null | null | triggmine_sdk/tests/test_client.py | TriggMineAdmin/TriggMine-Python-SDK | 9c5bd7c9ad2118bf5dc77796ccedc4eef0937df3 | [
"MIT"
] | null | null | null | # UnitTests of all triggmine events
import unittest
import datetime
from client import Client
if __name__ == '__main__':
unittest.main() | 73.367742 | 210 | 0.453922 |
480bf69db63d53cb496e110eff657c50a64491da | 1,605 | py | Python | lib/python2.7/site-packages/mpl_toolkits/tests/test_axes_grid.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
] | 1 | 2017-12-05T15:35:47.000Z | 2017-12-05T15:35:47.000Z | lib/python2.7/site-packages/mpl_toolkits/tests/test_axes_grid.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
] | 10 | 2017-07-13T00:24:03.000Z | 2017-07-17T07:39:03.000Z | lib/python2.7/site-packages/mpl_toolkits/tests/test_axes_grid.py | wfehrnstrom/harmonize | e5661d24b2021739e8ac4bf1d3a530eda4e155b3 | [
"MIT"
] | 7 | 2017-08-01T04:02:07.000Z | 2018-10-06T21:07:20.000Z |
from matplotlib.testing.decorators import image_comparison
from mpl_toolkits.axes_grid1 import ImageGrid
import numpy as np
import matplotlib.pyplot as plt
| 37.325581 | 68 | 0.556386 |
480cf2d366aba92e09518883be9b0629f1fbd5c8 | 4,985 | py | Python | src/resources/lib/listitem.py | ffoxin/kodi.kino.pub | f085beb99fcbab3da4efc698d56775d9553b9f28 | [
"BSD-3-Clause"
] | 59 | 2018-10-07T19:54:27.000Z | 2022-03-27T08:55:57.000Z | src/resources/lib/listitem.py | ffoxin/kodi.kino.pub | f085beb99fcbab3da4efc698d56775d9553b9f28 | [
"BSD-3-Clause"
] | 147 | 2018-10-07T19:02:13.000Z | 2022-03-05T17:15:16.000Z | src/resources/lib/listitem.py | ffoxin/kodi.kino.pub | f085beb99fcbab3da4efc698d56775d9553b9f28 | [
"BSD-3-Clause"
] | 22 | 2018-11-14T16:40:19.000Z | 2022-02-11T22:30:28.000Z | # -*- coding: utf-8 -*-
from xbmcgui import ListItem
| 39.88 | 98 | 0.620662 |
480d3d853c8689806f27c896226d9a525a7a54e6 | 3,451 | py | Python | static/firespread.py | thabat12/TetraNet | 48e98095e743f949fdb88491735acb3f19e27df7 | [
"MIT"
] | null | null | null | static/firespread.py | thabat12/TetraNet | 48e98095e743f949fdb88491735acb3f19e27df7 | [
"MIT"
] | null | null | null | static/firespread.py | thabat12/TetraNet | 48e98095e743f949fdb88491735acb3f19e27df7 | [
"MIT"
] | 1 | 2021-06-02T00:33:13.000Z | 2021-06-02T00:33:13.000Z | import numpy as np
import imageio
import tensorflow as tf
from keras.models import load_model
from PIL import Image, ImageOps
import numpy as np
from numpy import asarray
from matplotlib import pyplot as plt
from keras.utils import normalize
import os
import random
import azure_get_unet
import random
# for testing purposes only
| 29.75 | 73 | 0.541003 |
480d49129b8a557b65a1a726cce2b2b64435ab5e | 1,418 | py | Python | streamlitfront/tests/dummy_app.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
] | null | null | null | streamlitfront/tests/dummy_app.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
] | 1 | 2022-02-03T15:21:57.000Z | 2022-02-05T00:51:33.000Z | streamlitfront/tests/dummy_app.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
] | null | null | null | from streamlitfront.base import get_pages_specs, get_func_args_specs, BasePageFunc
import streamlit as st
from pydantic import BaseModel
import streamlit_pydantic as sp
DFLT_PAGE_FACTORY = SimplePageFunc2
if __name__ == '__main__':
app = get_pages_specs([multiple_input], page_factory=DFLT_PAGE_FACTORY)
app['Multiple Input'](None)
| 28.938776 | 82 | 0.658674 |
480d62d6a3b8b59327c71459cf291592859ce935 | 367 | py | Python | app/build.py | dhost-project/build-microservice | 4376169a2753f37fe8c7985525bd3fd3af6f11e7 | [
"MIT"
] | null | null | null | app/build.py | dhost-project/build-microservice | 4376169a2753f37fe8c7985525bd3fd3af6f11e7 | [
"MIT"
] | null | null | null | app/build.py | dhost-project/build-microservice | 4376169a2753f37fe8c7985525bd3fd3af6f11e7 | [
"MIT"
] | null | null | null | from flask_restful import Resource, reqparse
parser = reqparse.RequestParser()
parser.add_argument('command', required=True)
parser.add_argument('docker', required=True)
| 21.588235 | 45 | 0.6703 |
480d6eb2f995a9bfa4e6589d0220badcbea502c9 | 1,224 | py | Python | src/unicon/plugins/windows/__init__.py | nielsvanhooy/unicon.plugins | 3416fd8223f070cbb67a2cbe604e3c5d13584318 | [
"Apache-2.0"
] | 18 | 2019-11-23T23:14:53.000Z | 2022-01-10T01:17:08.000Z | src/unicon/plugins/windows/__init__.py | nielsvanhooy/unicon.plugins | 3416fd8223f070cbb67a2cbe604e3c5d13584318 | [
"Apache-2.0"
] | 12 | 2020-11-09T20:39:25.000Z | 2022-03-22T12:46:59.000Z | src/unicon/plugins/windows/__init__.py | nielsvanhooy/unicon.plugins | 3416fd8223f070cbb67a2cbe604e3c5d13584318 | [
"Apache-2.0"
] | 32 | 2020-02-12T15:42:22.000Z | 2022-03-15T16:42:10.000Z | __copyright__ = "# Copyright (c) 2018 by cisco Systems, Inc. All rights reserved."
__author__ = "dwapstra"
from unicon.plugins.generic import GenericSingleRpConnection, service_implementation as svc
from unicon.plugins.generic.connection_provider import GenericSingleRpConnectionProvider
from unicon.plugins.generic import ServiceList, service_implementation as svc
from . import service_implementation as windows_svc
from .statemachine import WindowsStateMachine
from .settings import WindowsSettings
| 29.853659 | 91 | 0.763072 |
480db89fecb8063418ca5134d6a59815af7cc219 | 702 | py | Python | sera/commands/symlink.py | bretth/sera | 507976b9ace58bdf4c8055dbfcf2fc10840eacb2 | [
"Apache-2.0"
] | null | null | null | sera/commands/symlink.py | bretth/sera | 507976b9ace58bdf4c8055dbfcf2fc10840eacb2 | [
"Apache-2.0"
] | 12 | 2016-10-04T20:19:45.000Z | 2017-01-31T03:59:57.000Z | sera/commands/symlink.py | bretth/sera | 507976b9ace58bdf4c8055dbfcf2fc10840eacb2 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
from shutil import which
from subprocess import run, PIPE
import click
from .main import main, lprint
| 25.071429 | 60 | 0.649573 |
480e88801229fe7f9b5057dd51b7998fec8f0003 | 3,597 | py | Python | self-attention.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
] | null | null | null | self-attention.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
] | null | null | null | self-attention.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
] | null | null | null | import torch
from torch import nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torchvision import transforms
from torchvision.models import resnet18
from torchvision.datasets import CIFAR10
from tqdm import tqdm
from torchvision.utils import save_image, make_grid
from matplotlib import pyplot as plt
from matplotlib.colors import hsv_to_rgb
from matplotlib.image import BboxImage
from matplotlib.transforms import Bbox, TransformedBbox
import numpy as np
from IPython import display
import requests
from io import BytesIO
from PIL import Image
from PIL import Image, ImageSequence
from IPython.display import HTML
import warnings
from matplotlib import rc
import gc
import matplotlib
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
gc.enable()
plt.ioff()
if __name__ == "__main__":
main() | 34.92233 | 116 | 0.683903 |
480f4cbaf998ac30c959d68f9aa7a041e7838e2a | 1,100 | py | Python | src/libraries/maimai_plate.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
] | 4 | 2022-01-24T05:33:34.000Z | 2022-03-25T06:29:19.000Z | src/libraries/maimai_plate.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
] | null | null | null | src/libraries/maimai_plate.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
] | 3 | 2022-02-08T13:24:59.000Z | 2022-03-13T06:42:40.000Z | from typing import Optional, Dict, List
import aiohttp
plate_to_version = {
'1': 'maimai',
'2': 'maimai PLUS',
'': 'maimai GreeN',
'': 'maimai GreeN PLUS',
'': 'maimai ORANGE',
'': 'maimai ORANGE PLUS',
'': 'maimai ORANGE PLUS',
'': 'maimai PiNK',
'': 'maimai PiNK PLUS',
'': 'maimai PiNK PLUS',
'': 'maimai MURASAKi',
'': 'maimai MURASAKi PLUS',
'': 'maimai MURASAKi PLUS',
'': 'maimai MiLK',
'': 'MiLK PLUS',
'': 'maimai FiNALE',
'': 'maimai FiNALE',
'': 'maimai ',
'': 'maimai PLUS',
'': 'maimai PLUS',
'': 'maimai Splash'
}
| 29.72973 | 123 | 0.53 |
48100064f1145ddaa5540b7d9cd09cc85ae092af | 6,390 | py | Python | env/lib/python2.7/site-packages/billiard/py2/reduction.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
] | 39 | 2016-12-05T14:36:37.000Z | 2021-07-29T18:22:34.000Z | env/lib/python2.7/site-packages/billiard/py2/reduction.py | jlwysf/onduty | 20d90583a6996d037912af08eb29a6d6fa06bf66 | [
"MIT"
] | 68 | 2016-12-12T20:38:47.000Z | 2020-07-26T18:28:49.000Z | p2p/lib/python2.7/site-packages/billiard/py2/reduction.py | sivaprakashniet/push_pull | 757be6fcdfdc8e73eeea5cb41a733d1916c7ae20 | [
"BSD-3-Clause"
] | 120 | 2016-08-18T14:53:03.000Z | 2020-06-16T13:27:20.000Z | #
# Module to allow connection and socket objects to be transferred
# between processes
#
# multiprocessing/reduction.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
import os
import sys
import socket
import threading
from pickle import Pickler
from .. import current_process
from .._ext import _billiard, win32
from ..util import register_after_fork, debug, sub_debug
is_win32 = sys.platform == 'win32'
is_pypy = hasattr(sys, 'pypy_version_info')
is_py3k = sys.version_info[0] == 3
if not(is_win32 or is_pypy or is_py3k or hasattr(_billiard, 'recvfd')):
raise ImportError('pickling of connections not supported')
close = win32.CloseHandle if sys.platform == 'win32' else os.close
__all__ = []
# globals set later
_listener = None
_lock = None
_cache = set()
#
# ForkingPickler
#
ForkingPickler.register(type(ForkingPickler.save), _reduce_method)
ForkingPickler.register(type(list.append), _reduce_method_descriptor)
ForkingPickler.register(type(int.__add__), _reduce_method_descriptor)
try:
from functools import partial
except ImportError:
pass
else:
ForkingPickler.register(partial, _reduce_partial)
#
# Platform specific definitions
#
if sys.platform == 'win32':
# XXX Should this subprocess import be here?
import _subprocess # noqa
else:
#
# Support for a per-process server thread which caches pickled handles
#
_reset(None)
register_after_fork(_reset, _reset)
#
# Functions to be used for pickling/unpickling objects with handles
#
#
# Register `_billiard.Connection` with `ForkingPickler`
#
# Register `socket.socket` with `ForkingPickler`
#
ForkingPickler.register(socket.socket, reduce_socket)
#
# Register `_billiard.PipeConnection` with `ForkingPickler`
#
if sys.platform == 'win32':
| 25.662651 | 74 | 0.680751 |
4810516c04a5fc1c2f18a86f01879ae7f2e15131 | 92 | py | Python | BOJ_Solved/BOJ-19698.py | CodingLeeSeungHoon/Python_Algorithm_TeamNote | 1e92986999b45aa9951e12e67b23062e410e9b36 | [
"MIT"
] | 7 | 2021-11-19T14:50:59.000Z | 2022-02-25T20:00:20.000Z | BOJ_Solved/BOJ-19698.py | CodingLeeSeungHoon/Python_Algorithm_TeamNote | 1e92986999b45aa9951e12e67b23062e410e9b36 | [
"MIT"
] | null | null | null | BOJ_Solved/BOJ-19698.py | CodingLeeSeungHoon/Python_Algorithm_TeamNote | 1e92986999b45aa9951e12e67b23062e410e9b36 | [
"MIT"
] | null | null | null | """
19698 :
"""
N, W, H, L = map(int, input().split())
print(min(W//L * H//L, N)) | 15.333333 | 38 | 0.48913 |
4811f993b7e4266adf0f540c90cd21e38ddb5532 | 13,723 | py | Python | .venv/lib/python2.7/site-packages/ansible/module_utils/nxos.py | Achraf-Ben/Ansible- | a271b4c32948a7f8726e3f3174e12fe6ff491619 | [
"MIT"
] | null | null | null | .venv/lib/python2.7/site-packages/ansible/module_utils/nxos.py | Achraf-Ben/Ansible- | a271b4c32948a7f8726e3f3174e12fe6ff491619 | [
"MIT"
] | null | null | null | .venv/lib/python2.7/site-packages/ansible/module_utils/nxos.py | Achraf-Ben/Ansible- | a271b4c32948a7f8726e3f3174e12fe6ff491619 | [
"MIT"
] | null | null | null | #
# This code is part of Ansible, but is an independent component.
#
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat, Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import collections
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network_common import to_list, ComplexList
from ansible.module_utils.connection import exec_command
from ansible.module_utils.six import iteritems
from ansible.module_utils.urls import fetch_url
_DEVICE_CONNECTION = None
nxos_provider_spec = {
'host': dict(),
'port': dict(type='int'),
'username': dict(fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
'password': dict(fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
'ssh_keyfile': dict(fallback=(env_fallback, ['ANSIBLE_NET_SSH_KEYFILE'])),
'use_ssl': dict(type='bool'),
'validate_certs': dict(type='bool'),
'timeout': dict(type='int'),
'transport': dict(default='cli', choices=['cli', 'nxapi'])
}
nxos_argument_spec = {
'provider': dict(type='dict', options=nxos_provider_spec),
}
nxos_argument_spec.update(nxos_provider_spec)
# Add argument's default value here
ARGS_DEFAULT_VALUE = {
'transport': 'cli'
}
def is_json(cmd):
return str(cmd).endswith('| json')
def is_text(cmd):
return not is_json(cmd)
def is_nxapi(module):
transport = module.params['transport']
provider_transport = (module.params['provider'] or {}).get('transport')
return 'nxapi' in (transport, provider_transport)
def to_command(module, commands):
if is_nxapi(module):
default_output = 'json'
else:
default_output = 'text'
transform = ComplexList(dict(
command=dict(key=True),
output=dict(default=default_output),
prompt=dict(),
answer=dict()
), module)
commands = transform(to_list(commands))
for item in commands:
if is_json(item['command']):
item['output'] = 'json'
return commands
def get_config(module, flags=[]):
conn = get_connection(module)
return conn.get_config(flags)
def run_commands(module, commands, check_rc=True):
conn = get_connection(module)
return conn.run_commands(to_command(module, commands), check_rc)
def load_config(module, config, return_error=False):
conn = get_connection(module)
return conn.load_config(config, return_error=return_error)
| 33.717445 | 130 | 0.602274 |
48123959326e46f67183f1e789974d0424c29d58 | 2,315 | py | Python | example/bin_provider.py | dell/dataiq-plugin-example | 7c323ec2d878ed705c4c74ab045e15595bd84b0b | [
"Apache-2.0"
] | 1 | 2021-04-12T09:32:21.000Z | 2021-04-12T09:32:21.000Z | example/bin_provider.py | dell/dataiq-plugin-example | 7c323ec2d878ed705c4c74ab045e15595bd84b0b | [
"Apache-2.0"
] | null | null | null | example/bin_provider.py | dell/dataiq-plugin-example | 7c323ec2d878ed705c4c74ab045e15595bd84b0b | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Dell Inc, or its subsidiaries.
#
# SPDX-License-Identifier: Apache-2.0
import csv
import datetime
import os
import sys
from abc import ABC, abstractmethod
from collections import namedtuple
from typing import Tuple, List, Iterator
from dateutil.relativedelta import relativedelta
from dataiq.plugin.user import User
# If LOCAL_DEV environment variable is not set, use ClarityNow API
if os.environ.get('LOCAL_DEV') is None:
try:
import claritynowapi
except ImportError:
sys.path.append('/usr/local/claritynow/scripts/python')
import claritynowapi
Bin = namedtuple('Bin', 'latest count')
| 32.605634 | 93 | 0.64622 |
48126352dee9a8d203347caa05ca59556d920c18 | 268 | py | Python | test.py | ewuerfel66/lambdata-mjh09 | 7f87f923ec351e7af139d098d84a320861632616 | [
"MIT"
] | null | null | null | test.py | ewuerfel66/lambdata-mjh09 | 7f87f923ec351e7af139d098d84a320861632616 | [
"MIT"
] | 4 | 2020-03-24T17:42:54.000Z | 2021-06-02T00:22:30.000Z | test.py | ewuerfel66/lambdata-mjh09 | 7f87f923ec351e7af139d098d84a320861632616 | [
"MIT"
] | 1 | 2019-08-14T15:17:22.000Z | 2019-08-14T15:17:22.000Z | import unittest
if __name__ == '__main__':
unittest.main() | 22.333333 | 56 | 0.634328 |
4812aa4cff01a349a2420a59bd686d36663cfba9 | 1,913 | py | Python | md5tosha256.py | yym68686/VirusTotalSpider | 7620b068e87964f6011d46003dbbf88be5c7dac9 | [
"MIT"
] | 2 | 2021-06-17T03:36:59.000Z | 2021-12-02T09:13:10.000Z | md5tosha256.py | yym68686/VirusTotalSpider | 7620b068e87964f6011d46003dbbf88be5c7dac9 | [
"MIT"
] | null | null | null | md5tosha256.py | yym68686/VirusTotalSpider | 7620b068e87964f6011d46003dbbf88be5c7dac9 | [
"MIT"
] | null | null | null | import os
import re
import time
import numpy as np
from msedge.selenium_tools import EdgeOptions, Edge
from selenium.webdriver.common.action_chains import ActionChains
headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36 Edg/85.0.564.41'
}
print('load data...')
sha256set = np.loadtxt(os.getcwd() + "/Gorgon Group.csv", delimiter=",", usecols=(0), dtype=str, skiprows=1) # usecols=(0) 0hash0
print('finish data load...')
opt = EdgeOptions() # ChromiumMicrosoft Edge
opt.use_chromium = True
# opt.add_argument("headless") #
opt.add_argument("disable-gpu")
opt.add_experimental_option('excludeSwitches', ['enable-logging'])
driver = Edge(executable_path = os.getcwd() + "/msedgedriver.exe", options = opt) # msedgedriver.exewebdriver
for filehash in sha256set:
noerror = 1
while(noerror):
try:
fileurl = 'https://www.virustotal.com/gui/file/' + filehash + '/behavior/VirusTotal%20Cuckoofork'
driver.get(fileurl)
driver.implicitly_wait(7)
driver.find_element_by_tag_name('body')
time.sleep(1.5)
print(driver.current_url)
if driver.current_url == "https://www.virustotal.com/gui/captcha": # 60s
ActionChains(driver).move_by_offset(342, 146).click().perform() #
ActionChains(driver).move_by_offset(-342, -146).perform()
time.sleep(90) #
matchresult = re.findall(r"file.(.*?).detection", driver.current_url, re.M)
with open(os.getcwd() + '/sha256.txt', 'a+', encoding='UTF-8') as f: #
f.write(matchresult[0] + '\n')
f.close()
noerror = 0
except:
noerror = 1
| 44.488372 | 150 | 0.658129 |